优化ttlcache回收速度

This commit is contained in:
GoEdgeLab
2023-10-12 16:03:52 +08:00
parent 36b1f84c20
commit fce75df033
3 changed files with 41 additions and 20 deletions

View File

@@ -3,6 +3,7 @@ package ttlcache
import (
"github.com/TeaOSLab/EdgeNode/internal/utils"
"github.com/TeaOSLab/EdgeNode/internal/utils/fasttime"
"runtime"
)
var SharedInt64Cache = NewBigCache[int64]()
@@ -19,6 +20,7 @@ type Cache[T any] struct {
countPieces uint64
maxItems int
maxPiecesPerGC int
gcPieceIndex int
}
@@ -61,9 +63,16 @@ func NewCache[T any](opt ...OptionInterface) *Cache[T] {
}
}
var maxPiecesPerGC = 4
var numCPU = runtime.NumCPU() / 2
if numCPU > maxPiecesPerGC {
maxPiecesPerGC = numCPU
}
var cache = &Cache[T]{
countPieces: uint64(countPieces),
maxItems: maxItems,
maxPiecesPerGC: maxPiecesPerGC,
}
for i := 0; i < countPieces; i++ {
@@ -136,15 +145,15 @@ func (this *Cache[T]) Count() (count int) {
func (this *Cache[T]) GC() {
var index = this.gcPieceIndex
const maxPiecesPerGC = 4
for i := index; i < index+maxPiecesPerGC; i++ {
for i := index; i < index+this.maxPiecesPerGC; i++ {
if i >= int(this.countPieces) {
break
}
this.pieces[i].GC()
}
index += maxPiecesPerGC
index += this.maxPiecesPerGC
if index >= int(this.countPieces) {
index = 0
}

View File

@@ -6,6 +6,7 @@ import (
"github.com/iwind/TeaGo/assert"
"github.com/iwind/TeaGo/rands"
"github.com/iwind/TeaGo/types"
timeutil "github.com/iwind/TeaGo/utils/time"
"runtime"
"strconv"
"sync/atomic"
@@ -46,12 +47,15 @@ func TestCache_Memory(t *testing.T) {
return
}
testutils.StartMemoryStats(t)
var cache = NewCache[int]()
testutils.StartMemoryStats(t, func() {
t.Log(cache.Count(), "items")
})
var count = 20_000_000
for i := 0; i < count; i++ {
cache.Write("a"+strconv.Itoa(i), 1, time.Now().Unix()+3600)
cache.Write("a"+strconv.Itoa(i), 1, time.Now().Unix()+int64(rands.Int(0, 300)))
}
t.Log(cache.Count())
@@ -67,7 +71,7 @@ func TestCache_Memory(t *testing.T) {
cache.Count()
time.Sleep(10 * time.Second)
time.Sleep(3600 * time.Second)
}
func TestCache_IncreaseInt64(t *testing.T) {
@@ -125,6 +129,10 @@ func TestCache_Read(t *testing.T) {
}
func TestCache_GC(t *testing.T) {
if !testutils.IsSingleTesting() {
return
}
var cache = NewCache[int](&PiecesOption{Count: 5})
cache.Write("a", 1, time.Now().Unix()+1)
cache.Write("b", 2, time.Now().Unix()+2)
@@ -159,11 +167,15 @@ func TestCache_GC(t *testing.T) {
}
func TestCache_GC2(t *testing.T) {
if !testutils.IsSingleTesting() {
return
}
runtime.GOMAXPROCS(1)
var cache1 = NewCache[int](NewPiecesOption(32))
for i := 0; i < 1_000_000; i++ {
cache1.Write(strconv.Itoa(i), i, time.Now().Unix()+int64(rands.Int(0, 10)))
var cache1 = NewCache[int](NewPiecesOption(256))
for i := 0; i < 10_000_000; i++ {
cache1.Write(strconv.Itoa(i), i, time.Now().Unix()+10)
}
var cache2 = NewCache[int](NewPiecesOption(5))
@@ -171,8 +183,8 @@ func TestCache_GC2(t *testing.T) {
cache2.Write(strconv.Itoa(i), i, time.Now().Unix()+int64(rands.Int(0, 10)))
}
for i := 0; i < 100; i++ {
t.Log(cache1.Count(), "items", cache2.Count(), "items")
for i := 0; i < 3600; i++ {
t.Log(timeutil.Format("H:i:s"), cache1.Count(), "items", cache2.Count(), "items")
time.Sleep(1 * time.Second)
}
}

View File

@@ -105,14 +105,14 @@ func (this *Piece[T]) GC() {
this.lastGCTime = currentTime - 3600
}
var min = this.lastGCTime
var max = currentTime
if min > max {
var minTime = this.lastGCTime
var maxTime = currentTime
if minTime > maxTime {
// 过去的时间比现在大,则从这一秒重新开始
min = max
minTime = maxTime
}
for i := min; i <= max; i++ {
for i := minTime; i <= maxTime; i++ {
var itemMap = this.expiresList.GC(i)
if len(itemMap) > 0 {
this.gcItemMap(itemMap)