Merge branch 'upstream-mirror' into sync-updates

# Conflicts:
#	layeredcache_test.go
This commit is contained in:
Eli Bishop
2021-03-18 20:27:43 -07:00
8 changed files with 31 additions and 4 deletions

View File

@@ -1,5 +1,5 @@
t:
go test ./...
go test ./... -race -count=1
f:
go fmt ./...

View File

@@ -141,6 +141,9 @@ func (c *Cache) Replace(key string, value interface{}) bool {
// Attempts to get the value from the cache and calles fetch on a miss (missing
// or stale item). If fetch returns an error, no value is cached and the error
// is returned back to the caller.
// Note that Fetch merely calls the public Get and Set functions. If you want
// a different Fetch behavior, such as thundering herd protection or returning
// expired items, implement it in your application.
func (c *Cache) Fetch(key string, duration time.Duration, fetch func() (interface{}, error)) (*Item, error) {
item := c.Get(key)
if item != nil && !item.Expired() {
@@ -373,7 +376,13 @@ func (c *Cache) doPromote(item *Item) bool {
func (c *Cache) gc() int {
dropped := 0
element := c.list.Back()
for i := 0; i < c.itemsToPrune; i++ {
itemsToPrune := int64(c.itemsToPrune)
if min := c.size - c.maxSize; min > itemsToPrune {
itemsToPrune = min
}
for i := int64(0); i < itemsToPrune; i++ {
if element == nil {
return dropped
}

View File

@@ -18,6 +18,7 @@ func Test_Cache(t *testing.T) {
func (_ CacheTests) DeletesAValue() {
cache := New(Configure())
defer cache.Stop()
Expect(cache.ItemCount()).To.Equal(0)
cache.Set("spice", "flow", time.Minute)
@@ -32,6 +33,7 @@ func (_ CacheTests) DeletesAValue() {
func (_ CacheTests) DeletesAPrefix() {
cache := New(Configure())
defer cache.Stop()
Expect(cache.ItemCount()).To.Equal(0)
cache.Set("aaa", "1", time.Minute)
@@ -55,6 +57,7 @@ func (_ CacheTests) DeletesAPrefix() {
func (_ CacheTests) DeletesAFunc() {
cache := New(Configure())
defer cache.Stop()
Expect(cache.ItemCount()).To.Equal(0)
cache.Set("a", 1, time.Minute)

2
go.mod
View File

@@ -3,6 +3,6 @@ module github.com/karlseguin/ccache/v2
go 1.13
require (
github.com/karlseguin/expect v1.0.2-0.20190806010014-778a5f0c6003
github.com/karlseguin/expect v1.0.7
github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0
)

2
go.sum
View File

@@ -2,5 +2,7 @@ github.com/karlseguin/expect v1.0.1 h1:z4wy4npwwHSWKjGWH85WNJO42VQhovxTCZDSzhjo8
github.com/karlseguin/expect v1.0.1/go.mod h1:zNBxMY8P21owkeogJELCLeHIt+voOSduHYTFUbwRAV8=
github.com/karlseguin/expect v1.0.2-0.20190806010014-778a5f0c6003 h1:vJ0Snvo+SLMY72r5J4sEfkuE7AFbixEP2qRbEcum/wA=
github.com/karlseguin/expect v1.0.2-0.20190806010014-778a5f0c6003/go.mod h1:zNBxMY8P21owkeogJELCLeHIt+voOSduHYTFUbwRAV8=
github.com/karlseguin/expect v1.0.7 h1:OF4mqjblc450v8nKARBS5Q0AweBNR0A+O3VjjpxwBrg=
github.com/karlseguin/expect v1.0.7/go.mod h1:lXdI8iGiQhmzpnnmU/EGA60vqKs8NbRNFnhhrJGoD5g=
github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0 h1:3UeQBvD0TFrlVjOeLOBz+CPAI8dnbqNSVwUwRrkp7vQ=
github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0/go.mod h1:IXCdmsXIht47RaVFLEdVnh1t+pgYtTAhQGj73kz+2DM=

View File

@@ -131,6 +131,9 @@ func (c *LayeredCache) Replace(primary, secondary string, value interface{}) boo
// Attempts to get the value from the cache and calles fetch on a miss.
// If fetch returns an error, no value is cached and the error is returned back
// to the caller.
// Note that Fetch merely calls the public Get and Set functions. If you want
// a different Fetch behavior, such as thundering herd protection or returning
// expired items, implement it in your application.
func (c *LayeredCache) Fetch(primary, secondary string, duration time.Duration, fetch func() (interface{}, error)) (*Item, error) {
item := c.Get(primary, secondary)
if item != nil {
@@ -297,7 +300,13 @@ func (c *LayeredCache) doPromote(item *Item) bool {
func (c *LayeredCache) gc() int {
element := c.list.Back()
dropped := 0
for i := 0; i < c.itemsToPrune; i++ {
itemsToPrune := int64(c.itemsToPrune)
if min := c.size - c.maxSize; min > itemsToPrune {
itemsToPrune = min
}
for i := int64(0); i < itemsToPrune; i++ {
if element == nil {
return dropped
}

View File

@@ -235,6 +235,8 @@ func (_ LayeredCacheTests) ResizeOnTheFly() {
for i := 0; i < 5; i++ {
cache.Set(strconv.Itoa(i), "a", i, time.Minute)
}
cache.SyncUpdates()
cache.SetMaxSize(3)
cache.SyncUpdates()
Expect(cache.GetDropped()).To.Equal(2)

View File

@@ -86,6 +86,8 @@ item, err := cache.Fetch("user:4", time.Minute * 10, func() (interface{}, error)
})
```
`Fetch` doesn't do anything fancy: it merely uses the public `Get` and `Set` functions. If you want more advanced behavior, such as using a singleflight to protect against thundering herd, support a callback that accepts the key, or returning expired items, you should implement that in your application.
### Delete
`Delete` expects the key to delete. It's ok to call `Delete` on a non-existent key: