Files
EdgeNode/internal/utils/ttlcache/cache_test.go

330 lines
6.9 KiB
Go
Raw Normal View History

2020-11-21 21:43:03 +08:00
package ttlcache
import (
"github.com/TeaOSLab/EdgeNode/internal/utils/fasttime"
2024-03-28 17:17:34 +08:00
memutils "github.com/TeaOSLab/EdgeNode/internal/utils/mem"
2022-03-12 18:00:22 +08:00
"github.com/TeaOSLab/EdgeNode/internal/utils/testutils"
2022-04-09 18:28:22 +08:00
"github.com/iwind/TeaGo/assert"
"github.com/iwind/TeaGo/rands"
2022-04-09 18:28:22 +08:00
"github.com/iwind/TeaGo/types"
2023-10-12 16:03:52 +08:00
timeutil "github.com/iwind/TeaGo/utils/time"
"runtime"
2023-12-25 16:57:25 +08:00
"runtime/debug"
"strconv"
2024-05-08 11:10:56 +08:00
"sync"
2022-04-09 18:28:22 +08:00
"sync/atomic"
"testing"
"time"
)
func TestNewCache(t *testing.T) {
2023-10-05 08:28:16 +08:00
var cache = NewCache[int]()
2020-11-21 21:43:03 +08:00
cache.Write("a", 1, time.Now().Unix()+3600)
2023-04-25 17:38:59 +08:00
cache.Write("b", 2, time.Now().Unix()+1)
cache.Write("c", 1, time.Now().Unix()+3602)
2020-11-21 21:43:03 +08:00
cache.Write("d", 1, time.Now().Unix()+1)
for _, piece := range cache.pieces {
if len(piece.m) > 0 {
for k, item := range piece.m {
2024-05-08 11:10:56 +08:00
t.Log(k, "=>", item.Value, item.expiresAt)
}
}
}
2022-04-09 18:28:22 +08:00
t.Log("a:", cache.Read("a"))
2023-10-04 14:56:26 +08:00
if testutils.IsSingleTesting() {
time.Sleep(5 * time.Second)
}
2023-04-25 17:38:59 +08:00
for i := 0; i < len(cache.pieces); i++ {
cache.GC()
}
t.Log("b:", cache.Read("b"))
t.Log("d:", cache.Read("d"))
2022-04-09 18:28:22 +08:00
t.Log("left:", cache.Count(), "items")
}
func TestCache_Memory(t *testing.T) {
2023-10-02 19:48:11 +08:00
if !testutils.IsSingleTesting() {
return
}
2023-10-05 08:28:16 +08:00
var cache = NewCache[int]()
2023-12-25 16:57:25 +08:00
var isReady bool
2023-10-12 16:03:52 +08:00
testutils.StartMemoryStats(t, func() {
2023-12-25 16:57:25 +08:00
if !isReady {
return
}
2023-10-12 16:03:52 +08:00
t.Log(cache.Count(), "items")
})
2023-12-25 16:57:25 +08:00
var count = 1_000_000
2024-03-28 17:17:34 +08:00
if memutils.SystemMemoryGB() > 4 {
2023-12-25 16:57:25 +08:00
count = 20_000_000
}
2024-05-08 11:10:56 +08:00
var concurrent = runtime.NumCPU()
var wg = &sync.WaitGroup{}
wg.Add(concurrent)
var id int64
for i := 0; i < concurrent; i++ {
go func() {
defer wg.Done()
for {
var newId = atomic.AddInt64(&id, 1)
if newId > int64(count) {
return
}
cache.Write("a"+types.String(newId), 1, time.Now().Unix()+int64(rands.Int(1, 300)))
}
}()
}
2024-05-08 11:10:56 +08:00
wg.Wait()
2021-12-22 16:43:16 +08:00
2023-12-25 16:57:25 +08:00
func() {
var before = time.Now()
runtime.GC()
var costSeconds = time.Since(before).Seconds()
var stats = &debug.GCStats{}
debug.ReadGCStats(stats)
t.Log("GC pause:", stats.Pause[0].Seconds()*1000, "ms", "cost:", costSeconds*1000, "ms")
}()
isReady = true
2022-03-12 18:00:22 +08:00
t.Log(cache.Count())
2022-04-09 18:28:22 +08:00
time.Sleep(10 * time.Second)
2022-03-12 18:00:22 +08:00
for i := 0; i < count; i++ {
if i%2 == 0 {
cache.Delete("a" + strconv.Itoa(i))
}
}
t.Log(cache.Count())
2021-12-22 16:43:16 +08:00
2022-03-12 18:00:22 +08:00
cache.Count()
2023-10-12 16:03:52 +08:00
time.Sleep(3600 * time.Second)
}
2020-11-22 12:11:39 +08:00
func TestCache_IncreaseInt64(t *testing.T) {
2022-04-09 18:28:22 +08:00
var a = assert.NewAssertion(t)
2023-10-05 08:28:16 +08:00
var cache = NewCache[int64]()
2022-04-09 18:28:22 +08:00
var unixTime = time.Now().Unix()
2020-11-22 12:11:39 +08:00
{
cache.IncreaseInt64("a", 1, unixTime+3600, false)
2022-04-09 18:28:22 +08:00
var item = cache.Read("a")
t.Log(item)
2023-10-05 08:28:16 +08:00
a.IsTrue(item.Value == 1)
2024-05-08 11:10:56 +08:00
a.IsTrue(item.expiresAt == unixTime+3600)
2020-11-22 12:11:39 +08:00
}
{
cache.IncreaseInt64("a", 1, unixTime+3600+1, true)
2022-04-09 18:28:22 +08:00
var item = cache.Read("a")
t.Log(item)
2023-10-05 08:28:16 +08:00
a.IsTrue(item.Value == 2)
2024-05-08 11:10:56 +08:00
a.IsTrue(item.expiresAt == unixTime+3600+1)
2020-11-22 12:11:39 +08:00
}
{
cache.Write("b", 1, time.Now().Unix()+3600+2)
t.Log(cache.Read("b"))
}
{
cache.IncreaseInt64("b", 1, time.Now().Unix()+3600+3, false)
2020-11-22 12:11:39 +08:00
t.Log(cache.Read("b"))
}
}
func TestCache_Read(t *testing.T) {
2024-01-21 11:13:30 +08:00
if !testutils.IsSingleTesting() {
return
}
runtime.GOMAXPROCS(1)
2023-10-05 08:28:16 +08:00
var cache = NewCache[int](PiecesOption{Count: 32})
for i := 0; i < 10_000_000; i++ {
2020-11-21 21:43:03 +08:00
cache.Write("HELLO_WORLD_"+strconv.Itoa(i), i, time.Now().Unix()+int64(i%10240)+1)
}
2021-09-19 16:11:46 +08:00
time.Sleep(10 * time.Second)
2020-11-21 21:43:03 +08:00
total := 0
for _, piece := range cache.pieces {
2020-11-21 21:43:03 +08:00
//t.Log(len(piece.m), "keys")
total += len(piece.m)
}
2020-11-21 21:43:03 +08:00
t.Log(total, "total keys")
before := time.Now()
for i := 0; i < 10_240; i++ {
_ = cache.Read("HELLO_WORLD_" + strconv.Itoa(i))
}
t.Log(time.Since(before).Seconds()*1000, "ms")
}
func TestCache_GC(t *testing.T) {
2023-10-12 16:03:52 +08:00
if !testutils.IsSingleTesting() {
return
}
2023-10-05 08:28:16 +08:00
var cache = NewCache[int](&PiecesOption{Count: 5})
2020-11-21 21:43:03 +08:00
cache.Write("a", 1, time.Now().Unix()+1)
cache.Write("b", 2, time.Now().Unix()+2)
cache.Write("c", 3, time.Now().Unix()+3)
cache.Write("d", 4, time.Now().Unix()+4)
cache.Write("e", 5, time.Now().Unix()+10)
go func() {
for i := 0; i < 1000; i++ {
2020-11-21 21:43:03 +08:00
cache.Write("f", 1, time.Now().Unix()+1)
time.Sleep(10 * time.Millisecond)
}
}()
for i := 0; i < 20; i++ {
cache.GC()
t.Log("items:", cache.Count())
2022-04-09 18:28:22 +08:00
if cache.Count() == 0 {
break
}
time.Sleep(1 * time.Second)
}
t.Log("now:", time.Now().Unix())
for _, p := range cache.pieces {
2022-04-09 18:28:22 +08:00
t.Log("expire list:", p.expiresList.Count(), p.expiresList)
for k, v := range p.m {
2024-05-08 11:10:56 +08:00
t.Log(k, v.Value, v.expiresAt)
}
}
}
func TestCache_GC2(t *testing.T) {
2023-10-12 16:03:52 +08:00
if !testutils.IsSingleTesting() {
return
}
runtime.GOMAXPROCS(1)
2023-10-12 16:03:52 +08:00
var cache1 = NewCache[int](NewPiecesOption(256))
for i := 0; i < 10_000_000; i++ {
cache1.Write(strconv.Itoa(i), i, time.Now().Unix()+10)
}
2023-10-05 08:28:16 +08:00
var cache2 = NewCache[int](NewPiecesOption(5))
for i := 0; i < 1_000_000; i++ {
2024-04-21 08:48:33 +08:00
cache2.Write(strconv.Itoa(i), i, time.Now().Unix()+int64(rands.Int(0, 20)))
}
2023-10-12 16:03:52 +08:00
for i := 0; i < 3600; i++ {
t.Log(timeutil.Format("H:i:s"), cache1.Count(), "items", cache2.Count(), "items")
2024-04-21 08:48:33 +08:00
if cache1.Count() == 0 && cache2.Count() == 0 {
break
}
time.Sleep(1 * time.Second)
}
}
2022-04-09 10:02:09 +08:00
2022-04-09 18:44:51 +08:00
func TestCacheDestroy(t *testing.T) {
2023-10-05 08:28:16 +08:00
var cache = NewCache[int]()
2022-04-09 18:44:51 +08:00
t.Log("count:", SharedManager.Count())
cache.Destroy()
t.Log("count:", SharedManager.Count())
}
2024-05-08 11:10:56 +08:00
func TestCache_Clean(t *testing.T) {
var cache = NewCache[int]()
cache.Clean()
}
func TestCache_Destroy(t *testing.T) {
var cache = NewCache[int]()
t.Log(SharedManager.Count())
for i := 0; i < 1_000; i++ {
cache.Write("a"+types.String(i), 1, fasttime.Now().Unix()+3600)
}
cache.Destroy()
t.Log(SharedManager.Count())
}
2022-04-09 10:02:09 +08:00
func BenchmarkNewCache(b *testing.B) {
runtime.GOMAXPROCS(1)
2023-10-05 08:28:16 +08:00
var cache = NewCache[int](NewPiecesOption(128))
2022-04-09 10:02:09 +08:00
for i := 0; i < 2_000_000; i++ {
cache.Write(strconv.Itoa(i), i, time.Now().Unix()+int64(rands.Int(10, 100)))
}
b.Log("start reading ...")
b.ResetTimer()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
cache.Read(strconv.Itoa(rands.Int(0, 999999)))
}
})
}
2022-04-09 18:28:22 +08:00
func BenchmarkCache_Add(b *testing.B) {
runtime.GOMAXPROCS(1)
2023-10-05 08:28:16 +08:00
var cache = NewCache[int]()
2022-04-09 18:28:22 +08:00
for i := 0; i < b.N; i++ {
cache.Write(strconv.Itoa(i), i, fasttime.Now().Unix()+int64(i%1024))
2022-04-09 18:28:22 +08:00
}
}
func BenchmarkCache_Add_Parallel(b *testing.B) {
runtime.GOMAXPROCS(1)
2023-10-05 08:28:16 +08:00
var cache = NewCache[int64]()
2022-04-09 18:28:22 +08:00
var i int64
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
var j = atomic.AddInt64(&i, 1)
cache.Write(types.String(j%1e6), j, fasttime.Now().Unix()+i%1024)
2022-04-09 18:28:22 +08:00
}
})
}
func BenchmarkNewCacheGC(b *testing.B) {
runtime.GOMAXPROCS(1)
2023-10-05 08:28:16 +08:00
var cache = NewCache[int](NewPiecesOption(1024))
2022-04-09 18:28:22 +08:00
for i := 0; i < 3_000_000; i++ {
cache.Write(strconv.Itoa(i), i, time.Now().Unix()+int64(rands.Int(0, 100)))
}
//b.Log(cache.pieces[0].Count())
b.ResetTimer()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
cache.GC()
}
})
}
func BenchmarkNewCacheClean(b *testing.B) {
runtime.GOMAXPROCS(1)
2023-10-05 08:28:16 +08:00
var cache = NewCache[int](NewPiecesOption(128))
2022-04-09 18:28:22 +08:00
for i := 0; i < 3_000_000; i++ {
cache.Write(strconv.Itoa(i), i, time.Now().Unix()+int64(rands.Int(10, 100)))
}
b.ResetTimer()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
cache.Clean()
}
})
}