diff --git a/Makefile b/Makefile index 5b3f26b..fffa15f 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,5 @@ t: - go test ./... + go test ./... -race -count=1 f: go fmt ./... diff --git a/cache.go b/cache.go index b3e6969..4b2f474 100644 --- a/cache.go +++ b/cache.go @@ -141,6 +141,9 @@ func (c *Cache) Replace(key string, value interface{}) bool { // Attempts to get the value from the cache and calles fetch on a miss (missing // or stale item). If fetch returns an error, no value is cached and the error // is returned back to the caller. +// Note that Fetch merely calls the public Get and Set functions. If you want +// a different Fetch behavior, such as thundering herd protection or returning +// expired items, implement it in your application. func (c *Cache) Fetch(key string, duration time.Duration, fetch func() (interface{}, error)) (*Item, error) { item := c.Get(key) if item != nil && !item.Expired() { @@ -373,7 +376,13 @@ func (c *Cache) doPromote(item *Item) bool { func (c *Cache) gc() int { dropped := 0 element := c.list.Back() - for i := 0; i < c.itemsToPrune; i++ { + + itemsToPrune := int64(c.itemsToPrune) + if min := c.size - c.maxSize; min > itemsToPrune { + itemsToPrune = min + } + + for i := int64(0); i < itemsToPrune; i++ { if element == nil { return dropped } diff --git a/cache_test.go b/cache_test.go index 33c7f4d..2b0ee9b 100644 --- a/cache_test.go +++ b/cache_test.go @@ -18,6 +18,7 @@ func Test_Cache(t *testing.T) { func (_ CacheTests) DeletesAValue() { cache := New(Configure()) + defer cache.Stop() Expect(cache.ItemCount()).To.Equal(0) cache.Set("spice", "flow", time.Minute) @@ -32,6 +33,7 @@ func (_ CacheTests) DeletesAValue() { func (_ CacheTests) DeletesAPrefix() { cache := New(Configure()) + defer cache.Stop() Expect(cache.ItemCount()).To.Equal(0) cache.Set("aaa", "1", time.Minute) @@ -55,6 +57,7 @@ func (_ CacheTests) DeletesAPrefix() { func (_ CacheTests) DeletesAFunc() { cache := New(Configure()) + defer cache.Stop() Expect(cache.ItemCount()).To.Equal(0) cache.Set("a", 1, time.Minute) diff --git a/go.mod b/go.mod index eebeeb9..434fead 100644 --- a/go.mod +++ b/go.mod @@ -3,6 +3,6 @@ module github.com/karlseguin/ccache/v2 go 1.13 require ( - github.com/karlseguin/expect v1.0.2-0.20190806010014-778a5f0c6003 + github.com/karlseguin/expect v1.0.7 github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0 ) diff --git a/go.sum b/go.sum index f2d23bc..863759d 100644 --- a/go.sum +++ b/go.sum @@ -2,5 +2,7 @@ github.com/karlseguin/expect v1.0.1 h1:z4wy4npwwHSWKjGWH85WNJO42VQhovxTCZDSzhjo8 github.com/karlseguin/expect v1.0.1/go.mod h1:zNBxMY8P21owkeogJELCLeHIt+voOSduHYTFUbwRAV8= github.com/karlseguin/expect v1.0.2-0.20190806010014-778a5f0c6003 h1:vJ0Snvo+SLMY72r5J4sEfkuE7AFbixEP2qRbEcum/wA= github.com/karlseguin/expect v1.0.2-0.20190806010014-778a5f0c6003/go.mod h1:zNBxMY8P21owkeogJELCLeHIt+voOSduHYTFUbwRAV8= +github.com/karlseguin/expect v1.0.7 h1:OF4mqjblc450v8nKARBS5Q0AweBNR0A+O3VjjpxwBrg= +github.com/karlseguin/expect v1.0.7/go.mod h1:lXdI8iGiQhmzpnnmU/EGA60vqKs8NbRNFnhhrJGoD5g= github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0 h1:3UeQBvD0TFrlVjOeLOBz+CPAI8dnbqNSVwUwRrkp7vQ= github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0/go.mod h1:IXCdmsXIht47RaVFLEdVnh1t+pgYtTAhQGj73kz+2DM= diff --git a/layeredcache.go b/layeredcache.go index a516c7a..8573753 100644 --- a/layeredcache.go +++ b/layeredcache.go @@ -131,6 +131,9 @@ func (c *LayeredCache) Replace(primary, secondary string, value interface{}) boo // Attempts to get the value from the cache and calles fetch on a miss. // If fetch returns an error, no value is cached and the error is returned back // to the caller. +// Note that Fetch merely calls the public Get and Set functions. If you want +// a different Fetch behavior, such as thundering herd protection or returning +// expired items, implement it in your application. func (c *LayeredCache) Fetch(primary, secondary string, duration time.Duration, fetch func() (interface{}, error)) (*Item, error) { item := c.Get(primary, secondary) if item != nil { @@ -297,7 +300,13 @@ func (c *LayeredCache) doPromote(item *Item) bool { func (c *LayeredCache) gc() int { element := c.list.Back() dropped := 0 - for i := 0; i < c.itemsToPrune; i++ { + itemsToPrune := int64(c.itemsToPrune) + + if min := c.size - c.maxSize; min > itemsToPrune { + itemsToPrune = min + } + + for i := int64(0); i < itemsToPrune; i++ { if element == nil { return dropped } diff --git a/layeredcache_test.go b/layeredcache_test.go index a7492d5..feb41ba 100644 --- a/layeredcache_test.go +++ b/layeredcache_test.go @@ -235,6 +235,8 @@ func (_ LayeredCacheTests) ResizeOnTheFly() { for i := 0; i < 5; i++ { cache.Set(strconv.Itoa(i), "a", i, time.Minute) } + cache.SyncUpdates() + cache.SetMaxSize(3) cache.SyncUpdates() Expect(cache.GetDropped()).To.Equal(2) diff --git a/readme.md b/readme.md index 8617f18..b45a487 100644 --- a/readme.md +++ b/readme.md @@ -86,6 +86,8 @@ item, err := cache.Fetch("user:4", time.Minute * 10, func() (interface{}, error) }) ``` +`Fetch` doesn't do anything fancy: it merely uses the public `Get` and `Set` functions. If you want more advanced behavior, such as using a singleflight to protect against thundering herd, support a callback that accepts the key, or returning expired items, you should implement that in your application. + ### Delete `Delete` expects the key to delete. It's ok to call `Delete` on a non-existent key: