add ForEachFunc

This commit is contained in:
Karl Seguin
2021-02-05 19:24:54 +08:00
parent 36ffada8b5
commit ae1872d700
7 changed files with 129 additions and 8 deletions

View File

@@ -17,6 +17,18 @@ func (b *bucket) itemCount() int {
return len(b.lookup) return len(b.lookup)
} }
func (b *bucket) forEachFunc(matches func(key string, item *Item) bool) bool {
lookup := b.lookup
b.RLock()
defer b.RUnlock()
for key, item := range lookup {
if !matches(key, item) {
return false
}
}
return true
}
func (b *bucket) get(key string) *Item { func (b *bucket) get(key string) *Item {
b.RLock() b.RLock()
defer b.RUnlock() defer b.RUnlock()

View File

@@ -76,6 +76,14 @@ func (c *Cache) DeleteFunc(matches func(key string, item *Item) bool) int {
return count return count
} }
func (c *Cache) ForEachFunc(matches func(key string, item *Item) bool) {
for _, b := range c.buckets {
if !b.forEachFunc(matches) {
break
}
}
}
// Get an item from the cache. Returns nil if the item wasn't found. // Get an item from the cache. Returns nil if the item wasn't found.
// This can return an expired item. Use item.Expired() to see if the item // This can return an expired item. Use item.Expired() to see if the item
// is expired and item.TTL() to see how long until the item expires (which // is expired and item.TTL() to see how long until the item expires (which

View File

@@ -1,6 +1,7 @@
package ccache package ccache
import ( import (
"sort"
"strconv" "strconv"
"sync/atomic" "sync/atomic"
"testing" "testing"
@@ -273,6 +274,34 @@ func (_ CacheTests) ResizeOnTheFly() {
Expect(cache.Get("6").Value()).To.Equal(6) Expect(cache.Get("6").Value()).To.Equal(6)
} }
func (_ CacheTests) ForEachFunc() {
cache := New(Configure().MaxSize(3).ItemsToPrune(1))
Expect(forEachKeys(cache)).To.Equal([]string{})
cache.Set("1", 1, time.Minute)
Expect(forEachKeys(cache)).To.Equal([]string{"1"})
cache.Set("2", 2, time.Minute)
time.Sleep(time.Millisecond * 10)
Expect(forEachKeys(cache)).To.Equal([]string{"1", "2"})
cache.Set("3", 3, time.Minute)
time.Sleep(time.Millisecond * 10)
Expect(forEachKeys(cache)).To.Equal([]string{"1", "2", "3"})
cache.Set("4", 4, time.Minute)
time.Sleep(time.Millisecond * 10)
Expect(forEachKeys(cache)).To.Equal([]string{"2", "3", "4"})
cache.Set("stop", 5, time.Minute)
time.Sleep(time.Millisecond * 10)
Expect(forEachKeys(cache)).Not.To.Contain("stop")
cache.Set("6", 6, time.Minute)
time.Sleep(time.Millisecond * 10)
Expect(forEachKeys(cache)).Not.To.Contain("stop")
}
type SizedItem struct { type SizedItem struct {
id int id int
s int64 s int64
@@ -293,3 +322,16 @@ func gcCache(cache *Cache) {
cache.gc() cache.gc()
cache.restart() cache.restart()
} }
func forEachKeys(cache *Cache) []string {
keys := make([]string, 0, 10)
cache.ForEachFunc(func(key string, i *Item) bool {
if key == "stop" {
return false
}
keys = append(keys, key)
return true
})
sort.Strings(keys)
return keys
}

View File

@@ -102,6 +102,15 @@ func (b *layeredBucket) deleteAll(primary string, deletables chan *Item) bool {
return true return true
} }
func (b *layeredBucket) forEachFunc(primary string, matches func(key string, item *Item) bool) {
b.RLock()
bucket, exists := b.buckets[primary]
b.RUnlock()
if exists {
bucket.forEachFunc(matches)
}
}
func (b *layeredBucket) clear() { func (b *layeredBucket) clear() {
b.Lock() b.Lock()
defer b.Unlock() defer b.Unlock()

View File

@@ -73,6 +73,10 @@ func (c *LayeredCache) Get(primary, secondary string) *Item {
return item return item
} }
func (c *LayeredCache) ForEachFunc(primary string, matches func(key string, item *Item) bool) {
c.bucket(primary).forEachFunc(primary, matches)
}
// Get the secondary cache for a given primary key. This operation will // Get the secondary cache for a given primary key. This operation will
// never return nil. In the case where the primary key does not exist, a // never return nil. In the case where the primary key does not exist, a
// new, underlying, empty bucket will be created and returned. // new, underlying, empty bucket will be created and returned.

View File

@@ -1,6 +1,7 @@
package ccache package ccache
import ( import (
"sort"
"strconv" "strconv"
"sync/atomic" "sync/atomic"
"testing" "testing"
@@ -261,12 +262,6 @@ func (_ LayeredCacheTests) ResizeOnTheFly() {
Expect(cache.Get("6", "a").Value()).To.Equal(6) Expect(cache.Get("6", "a").Value()).To.Equal(6)
} }
func newLayered() *LayeredCache {
c := Layered(Configure())
c.Clear()
return c
}
func (_ LayeredCacheTests) RemovesOldestItemWhenFullBySizer() { func (_ LayeredCacheTests) RemovesOldestItemWhenFullBySizer() {
cache := Layered(Configure().MaxSize(9).ItemsToPrune(2)) cache := Layered(Configure().MaxSize(9).ItemsToPrune(2))
for i := 0; i < 7; i++ { for i := 0; i < 7; i++ {
@@ -329,6 +324,41 @@ func (_ LayeredCacheTests) ReplaceChangesSize() {
checkLayeredSize(cache, 5) checkLayeredSize(cache, 5)
} }
func (_ LayeredCacheTests) EachFunc() {
cache := Layered(Configure().MaxSize(3).ItemsToPrune(1))
Expect(forEachKeysLayered(cache, "1")).To.Equal([]string{})
cache.Set("1", "a", 1, time.Minute)
Expect(forEachKeysLayered(cache, "1")).To.Equal([]string{"a"})
cache.Set("1", "b", 2, time.Minute)
time.Sleep(time.Millisecond * 10)
Expect(forEachKeysLayered(cache, "1")).To.Equal([]string{"a", "b"})
cache.Set("1", "c", 3, time.Minute)
time.Sleep(time.Millisecond * 10)
Expect(forEachKeysLayered(cache, "1")).To.Equal([]string{"a", "b", "c"})
cache.Set("1", "d", 4, time.Minute)
time.Sleep(time.Millisecond * 10)
Expect(forEachKeysLayered(cache, "1")).To.Equal([]string{"b", "c", "d"})
// iteration is non-deterministic, all we know for sure is "stop" should not be in there
cache.Set("1", "stop", 5, time.Minute)
time.Sleep(time.Millisecond * 10)
Expect(forEachKeysLayered(cache, "1")).Not.To.Contain("stop")
cache.Set("1", "e", 6, time.Minute)
time.Sleep(time.Millisecond * 10)
Expect(forEachKeysLayered(cache, "1")).Not.To.Contain("stop")
}
func newLayered() *LayeredCache {
c := Layered(Configure())
c.Clear()
return c
}
func checkLayeredSize(cache *LayeredCache, sz int64) { func checkLayeredSize(cache *LayeredCache, sz int64) {
cache.Stop() cache.Stop()
Expect(cache.size).To.Equal(sz) Expect(cache.size).To.Equal(sz)
@@ -340,3 +370,16 @@ func gcLayeredCache(cache *LayeredCache) {
cache.gc() cache.gc()
cache.restart() cache.restart()
} }
func forEachKeysLayered(cache *LayeredCache, primary string) []string {
keys := make([]string, 0, 10)
cache.ForEachFunc(primary, func(key string, i *Item) bool {
if key == "stop" {
return false
}
keys = append(keys, key)
return true
})
sort.Strings(keys)
return keys
}

View File

@@ -97,7 +97,10 @@ cache.Delete("user:4")
`DeletePrefix` deletes all keys matching the provided prefix. Returns the number of keys removed. `DeletePrefix` deletes all keys matching the provided prefix. Returns the number of keys removed.
### DeleteFunc ### DeleteFunc
`DeleteFunc` deletes all items that the provded matches func evaluates to true. Returns the number of keys removed. `DeleteFunc` deletes all items that the provided matches func evaluates to true. Returns the number of keys removed.
### ForEachFunc
`ForEachFunc` iterates through all keys and values in the map and passes them to the provided function. Iteration stops if the function returns false. Iteration order is random.
### Clear ### Clear
`Clear` clears the cache. If the cache's gc is running, `Clear` waits for it to finish. `Clear` clears the cache. If the cache's gc is running, `Clear` waits for it to finish.