Conditionally prune more than itemsToPrune items

It's possible, though unlikely, that c.size will be larger than
c.maxSize by more than c.itemsToPrune. The most likely case that this
can happen is when using SetMaxSize to dynamically adjust the cache
size. The gc will now always clear to at least c.maxSize.
This commit is contained in:
Karl Seguin
2021-03-18 19:29:04 +08:00
parent f28a7755a1
commit df2d98315c
7 changed files with 24 additions and 6 deletions

View File

@@ -1,5 +1,5 @@
t: t:
go test ./... go test ./... -race -count=1
f: f:
go fmt ./... go fmt ./...

View File

@@ -304,7 +304,13 @@ func (c *Cache) doPromote(item *Item) bool {
func (c *Cache) gc() int { func (c *Cache) gc() int {
dropped := 0 dropped := 0
element := c.list.Back() element := c.list.Back()
for i := 0; i < c.itemsToPrune; i++ {
itemsToPrune := int64(c.itemsToPrune)
if min := c.size - c.maxSize; min > itemsToPrune {
itemsToPrune = min
}
for i := int64(0); i < itemsToPrune; i++ {
if element == nil { if element == nil {
return dropped return dropped
} }

View File

@@ -18,6 +18,7 @@ func Test_Cache(t *testing.T) {
func (_ CacheTests) DeletesAValue() { func (_ CacheTests) DeletesAValue() {
cache := New(Configure()) cache := New(Configure())
defer cache.Stop()
Expect(cache.ItemCount()).To.Equal(0) Expect(cache.ItemCount()).To.Equal(0)
cache.Set("spice", "flow", time.Minute) cache.Set("spice", "flow", time.Minute)
@@ -32,6 +33,7 @@ func (_ CacheTests) DeletesAValue() {
func (_ CacheTests) DeletesAPrefix() { func (_ CacheTests) DeletesAPrefix() {
cache := New(Configure()) cache := New(Configure())
defer cache.Stop()
Expect(cache.ItemCount()).To.Equal(0) Expect(cache.ItemCount()).To.Equal(0)
cache.Set("aaa", "1", time.Minute) cache.Set("aaa", "1", time.Minute)
@@ -55,6 +57,7 @@ func (_ CacheTests) DeletesAPrefix() {
func (_ CacheTests) DeletesAFunc() { func (_ CacheTests) DeletesAFunc() {
cache := New(Configure()) cache := New(Configure())
defer cache.Stop()
Expect(cache.ItemCount()).To.Equal(0) Expect(cache.ItemCount()).To.Equal(0)
cache.Set("a", 1, time.Minute) cache.Set("a", 1, time.Minute)

2
go.mod
View File

@@ -3,6 +3,6 @@ module github.com/karlseguin/ccache/v2
go 1.13 go 1.13
require ( require (
github.com/karlseguin/expect v1.0.2-0.20190806010014-778a5f0c6003 github.com/karlseguin/expect v1.0.7
github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0 github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0
) )

2
go.sum
View File

@@ -2,5 +2,7 @@ github.com/karlseguin/expect v1.0.1 h1:z4wy4npwwHSWKjGWH85WNJO42VQhovxTCZDSzhjo8
github.com/karlseguin/expect v1.0.1/go.mod h1:zNBxMY8P21owkeogJELCLeHIt+voOSduHYTFUbwRAV8= github.com/karlseguin/expect v1.0.1/go.mod h1:zNBxMY8P21owkeogJELCLeHIt+voOSduHYTFUbwRAV8=
github.com/karlseguin/expect v1.0.2-0.20190806010014-778a5f0c6003 h1:vJ0Snvo+SLMY72r5J4sEfkuE7AFbixEP2qRbEcum/wA= github.com/karlseguin/expect v1.0.2-0.20190806010014-778a5f0c6003 h1:vJ0Snvo+SLMY72r5J4sEfkuE7AFbixEP2qRbEcum/wA=
github.com/karlseguin/expect v1.0.2-0.20190806010014-778a5f0c6003/go.mod h1:zNBxMY8P21owkeogJELCLeHIt+voOSduHYTFUbwRAV8= github.com/karlseguin/expect v1.0.2-0.20190806010014-778a5f0c6003/go.mod h1:zNBxMY8P21owkeogJELCLeHIt+voOSduHYTFUbwRAV8=
github.com/karlseguin/expect v1.0.7 h1:OF4mqjblc450v8nKARBS5Q0AweBNR0A+O3VjjpxwBrg=
github.com/karlseguin/expect v1.0.7/go.mod h1:lXdI8iGiQhmzpnnmU/EGA60vqKs8NbRNFnhhrJGoD5g=
github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0 h1:3UeQBvD0TFrlVjOeLOBz+CPAI8dnbqNSVwUwRrkp7vQ= github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0 h1:3UeQBvD0TFrlVjOeLOBz+CPAI8dnbqNSVwUwRrkp7vQ=
github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0/go.mod h1:IXCdmsXIht47RaVFLEdVnh1t+pgYtTAhQGj73kz+2DM= github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0/go.mod h1:IXCdmsXIht47RaVFLEdVnh1t+pgYtTAhQGj73kz+2DM=

View File

@@ -286,7 +286,13 @@ func (c *LayeredCache) doPromote(item *Item) bool {
func (c *LayeredCache) gc() int { func (c *LayeredCache) gc() int {
element := c.list.Back() element := c.list.Back()
dropped := 0 dropped := 0
for i := 0; i < c.itemsToPrune; i++ { itemsToPrune := int64(c.itemsToPrune)
if min := c.size - c.maxSize; min > itemsToPrune {
itemsToPrune = min
}
for i := int64(0); i < itemsToPrune; i++ {
if element == nil { if element == nil {
return dropped return dropped
} }

View File

@@ -235,8 +235,9 @@ func (_ LayeredCacheTests) ResizeOnTheFly() {
for i := 0; i < 5; i++ { for i := 0; i < 5; i++ {
cache.Set(strconv.Itoa(i), "a", i, time.Minute) cache.Set(strconv.Itoa(i), "a", i, time.Minute)
} }
time.Sleep(time.Millisecond * 20)
cache.SetMaxSize(3) cache.SetMaxSize(3)
time.Sleep(time.Millisecond * 10) time.Sleep(time.Millisecond * 20)
Expect(cache.GetDropped()).To.Equal(2) Expect(cache.GetDropped()).To.Equal(2)
Expect(cache.Get("0", "a")).To.Equal(nil) Expect(cache.Get("0", "a")).To.Equal(nil)
Expect(cache.Get("1", "a")).To.Equal(nil) Expect(cache.Get("1", "a")).To.Equal(nil)
@@ -245,7 +246,7 @@ func (_ LayeredCacheTests) ResizeOnTheFly() {
Expect(cache.Get("4", "a").Value()).To.Equal(4) Expect(cache.Get("4", "a").Value()).To.Equal(4)
cache.Set("5", "a", 5, time.Minute) cache.Set("5", "a", 5, time.Minute)
time.Sleep(time.Millisecond * 5) time.Sleep(time.Millisecond * 10)
Expect(cache.GetDropped()).To.Equal(1) Expect(cache.GetDropped()).To.Equal(1)
Expect(cache.Get("2", "a")).To.Equal(nil) Expect(cache.Get("2", "a")).To.Equal(nil)
Expect(cache.Get("3", "a").Value()).To.Equal(3) Expect(cache.Get("3", "a").Value()).To.Equal(3)