Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: New WithMaxCost option for custom cache capacity management strategies #152

Merged
merged 36 commits into from
Dec 3, 2024
Merged
Show file tree
Hide file tree
Changes from 12 commits
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
2d92605
new option to define the max memory the cache can use in bytes
dadrus Oct 22, 2024
6e92deb
cache.set updated to cope with memory limitations
dadrus Oct 22, 2024
666a987
options updated to implement the suggested new option
dadrus Nov 10, 2024
0a2236a
cache impl updated + tests
dadrus Nov 10, 2024
fa705e7
dependencies updated
dadrus Nov 10, 2024
dcc9743
imports organized
dadrus Nov 10, 2024
7d51be1
readme updated to document the new configuration option
dadrus Nov 10, 2024
fea314a
better explanation in the example
dadrus Nov 10, 2024
e7e24cc
readme line removed by accident restored
dadrus Nov 10, 2024
53b1934
function renamed (typo fixed)
dadrus Nov 10, 2024
7a9baa3
wording fixed
dadrus Nov 10, 2024
0b17592
useless sentence removed
dadrus Nov 10, 2024
d1bdbee
costs renamed to cost
dadrus Nov 14, 2024
a2f5915
cost used in tests simplified
dadrus Nov 14, 2024
8965baf
CostCalcFunc renamed to CostFunc
dadrus Nov 14, 2024
58dcb90
costsCalcFunc renamed to costFunc
dadrus Nov 14, 2024
23cfbf0
updateExpirations moved
dadrus Nov 14, 2024
08a47e8
totalCost renamed to maxCost
dadrus Nov 14, 2024
c594b8d
WithTotalCost option renamed to WithMaxCost
dadrus Nov 14, 2024
c4b0baa
example in README updated
dadrus Nov 14, 2024
2a04fee
signature of the WithMaxCost option changed
dadrus Nov 14, 2024
e595b68
Merge branch 'v3' into feat/cache_size_in_bytes
dadrus Nov 14, 2024
e36c05f
readme updated
dadrus Nov 14, 2024
ce9482f
Item impl updated to hold the current cost and the corresponding cost…
dadrus Nov 17, 2024
da1f07e
cache impl updated to make use of the new item properties
dadrus Nov 17, 2024
d275891
new item options
dadrus Nov 17, 2024
d20ea4d
tests for the new options
dadrus Nov 17, 2024
692baf0
new Item related tests
dadrus Nov 17, 2024
2308b72
cache tests fixed to make them compile
dadrus Nov 17, 2024
fd2c7ad
item option implementation updated to become a private interface
dadrus Nov 27, 2024
2a15bc1
cache set test updated
dadrus Nov 27, 2024
eebe1a8
made new item related functions private + eviction reason renamed
dadrus Nov 29, 2024
5cb43bf
moved initial item cost calsulation to the constructor function
dadrus Nov 29, 2024
e70f90d
test functions renamed to reflect the private nature of functions und…
dadrus Nov 29, 2024
43fc033
version tracking fixed
dadrus Nov 29, 2024
41f3351
small readme update
dadrus Nov 29, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 20 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -141,3 +141,23 @@ func main() {
item := cache.Get("key from file")
}
```

To restrict the cache's capacity based on criteria beyond the number
of items it can hold, the `ttlcache.WithTotalCost` option allows for
implementing custom strategies. The following example demonstrates
how to limit the maximum memory usage of a cache to 5MB:
```go
func main() {
cache := ttlcache.New[string, string](
ttlcache.WithTotalCost[string, string](5120, func(key string, item string) uint64 {
// 72 (bytes) represent the memory occupied by the *ttlcache.Item structure
// used to store the new value.
// 16 (bytes) represent the memory footprint of a string header in Go,
// as determined by unsafe.Sizeof.
return 72 + 16 + len(key) + 16 + len(item)
}),
)

cache.Set("first", "value1", ttlcache.DefaultTTL)
}
```
28 changes: 28 additions & 0 deletions cache.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ const (
EvictionReasonDeleted EvictionReason = iota + 1
EvictionReasonCapacityReached
EvictionReasonExpired
EvictionReasonTotalCostExceeded
dadrus marked this conversation as resolved.
Show resolved Hide resolved
)

// EvictionReason is used to specify why a certain item was
Expand All @@ -36,6 +37,7 @@ type Cache[K comparable, V any] struct {

timerCh chan time.Duration
}
costs uint64
dadrus marked this conversation as resolved.
Show resolved Hide resolved

metricsMu sync.RWMutex
metrics Metrics
Expand Down Expand Up @@ -137,7 +139,20 @@ func (c *Cache[K, V]) set(key K, value V, ttl time.Duration) *Item[K, V] {
if elem != nil {
// update/overwrite an existing item
item := elem.Value.(*Item[K, V])
oldValue := item.value
item.update(value, ttl)

if c.options.totalCost != 0 {
oldItemCosts := c.options.costsCalcFunc(key, oldValue)
newItemCosts := c.options.costsCalcFunc(key, value)

c.costs = c.costs - oldItemCosts + newItemCosts

for c.costs > c.options.totalCost {
c.evict(EvictionReasonTotalCostExceeded, c.items.lru.Back())
}
}

c.updateExpirations(false, elem)
dadrus marked this conversation as resolved.
Show resolved Hide resolved

return item
Expand All @@ -158,6 +173,14 @@ func (c *Cache[K, V]) set(key K, value V, ttl time.Duration) *Item[K, V] {
c.items.values[key] = elem
c.updateExpirations(true, elem)

if c.options.totalCost != 0 {
c.costs += c.options.costsCalcFunc(key, value)

for c.costs > c.options.totalCost {
c.evict(EvictionReasonTotalCostExceeded, c.items.lru.Back())
}
}

c.metricsMu.Lock()
c.metrics.Insertions++
c.metricsMu.Unlock()
Expand Down Expand Up @@ -258,6 +281,11 @@ func (c *Cache[K, V]) evict(reason EvictionReason, elems ...*list.Element) {
for i := range elems {
item := elems[i].Value.(*Item[K, V])
delete(c.items.values, item.key)

if c.options.totalCost != 0 {
c.costs -= c.options.costsCalcFunc(item.key, item.value)
}

c.items.lru.Remove(elems[i])
c.items.expQueue.remove(elems[i])

Expand Down
101 changes: 72 additions & 29 deletions cache_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ func Test_Cache_updateExpirations(t *testing.T) {
t.Run(cn, func(t *testing.T) {
t.Parallel()

cache := prepCache(time.Hour)
cache := prepCache(0, time.Hour)

if c.TimerChValue > 0 {
cache.items.timerCh <- c.TimerChValue
Expand Down Expand Up @@ -172,6 +172,7 @@ func Test_Cache_set(t *testing.T) {

cc := map[string]struct {
Capacity uint64
MaxCost uint64
Key string
TTL time.Duration
Metrics Metrics
Expand Down Expand Up @@ -244,6 +245,33 @@ func Test_Cache_set(t *testing.T) {
},
ExpectFns: true,
},
"Set with existing key and eviction caused by exhausted cost": {
MaxCost: 300,
Key: existingKey,
TTL: DefaultTTL,
Metrics: Metrics{
Insertions: 0,
Evictions: 1,
},
},
"Set with existing key and no eviction": {
MaxCost: 500,
Key: existingKey,
TTL: DefaultTTL,
Metrics: Metrics{
Insertions: 0,
Evictions: 0,
},
},
"Set with new key and eviction caused by exhausted cost": {
MaxCost: 400,
Key: newKey,
TTL: DefaultTTL,
Metrics: Metrics{
Insertions: 1,
Evictions: 1,
},
},
}

for cn, c := range cc {
Expand All @@ -260,7 +288,7 @@ func Test_Cache_set(t *testing.T) {
// calculated based on how addToCache sets ttl
existingKeyTTL := time.Hour + time.Minute

cache := prepCache(time.Hour, evictedKey, existingKey, "test3")
cache := prepCache(c.MaxCost, time.Hour, evictedKey, existingKey, "test3")
cache.options.capacity = c.Capacity
cache.options.ttl = time.Minute * 20
cache.events.insertion.fns[1] = func(item *Item[string, string]) {
Expand All @@ -269,16 +297,18 @@ func Test_Cache_set(t *testing.T) {
}
cache.events.insertion.fns[2] = cache.events.insertion.fns[1]
cache.events.eviction.fns[1] = func(r EvictionReason, item *Item[string, string]) {
assert.Equal(t, EvictionReasonCapacityReached, r)
if c.MaxCost != 0 {
assert.Equal(t, EvictionReasonTotalCostExceeded, r)
} else {
assert.Equal(t, EvictionReasonCapacityReached, r)
}

assert.Equal(t, evictedKey, item.key)
evictionFnsCalls++
}
cache.events.eviction.fns[2] = cache.events.eviction.fns[1]

total := 3
if c.Key == newKey && (c.Capacity == 0 || c.Capacity >= 4) {
total++
}
total := 3 - int(c.Metrics.Evictions) + int(c.Metrics.Insertions)

item := cache.set(c.Key, "value123", c.TTL)

Expand Down Expand Up @@ -390,7 +420,7 @@ func Test_Cache_get(t *testing.T) {
t.Run(cn, func(t *testing.T) {
t.Parallel()

cache := prepCache(time.Hour, existingKey, "test2", "test3")
cache := prepCache(0, time.Hour, existingKey, "test2", "test3")
addToCache(cache, time.Nanosecond, expiredKey)
time.Sleep(time.Millisecond) // force expiration

Expand Down Expand Up @@ -441,7 +471,7 @@ func Test_Cache_evict(t *testing.T) {
key4FnsCalls int
)

cache := prepCache(time.Hour, "1", "2", "3", "4")
cache := prepCache(0, time.Hour, "1", "2", "3", "4")
cache.events.eviction.fns[1] = func(r EvictionReason, item *Item[string, string]) {
assert.Equal(t, EvictionReasonDeleted, r)
switch item.key {
Expand Down Expand Up @@ -486,7 +516,7 @@ func Test_Cache_evict(t *testing.T) {
}

func Test_Cache_Set(t *testing.T) {
cache := prepCache(time.Hour, "test1", "test2", "test3")
cache := prepCache(0, time.Hour, "test1", "test2", "test3")
item := cache.Set("hello", "value123", time.Minute)
require.NotNil(t, item)
assert.Same(t, item, cache.items.values["hello"].Value)
Expand Down Expand Up @@ -599,7 +629,7 @@ func Test_Cache_Get(t *testing.T) {
t.Run(cn, func(t *testing.T) {
t.Parallel()

cache := prepCache(time.Minute, foundKey, "test2", "test3")
cache := prepCache(0, time.Minute, foundKey, "test2", "test3")
oldExpiresAt := cache.items.values[foundKey].Value.(*Item[string, string]).expiresAt
cache.options = c.DefaultOptions

Expand Down Expand Up @@ -632,7 +662,7 @@ func Test_Cache_Get(t *testing.T) {
func Test_Cache_Delete(t *testing.T) {
var fnsCalls int

cache := prepCache(time.Hour, "1", "2", "3", "4")
cache := prepCache(0, time.Hour, "1", "2", "3", "4")
cache.events.eviction.fns[1] = func(r EvictionReason, item *Item[string, string]) {
assert.Equal(t, EvictionReasonDeleted, r)
fnsCalls++
Expand All @@ -652,7 +682,7 @@ func Test_Cache_Delete(t *testing.T) {
}

func Test_Cache_Has(t *testing.T) {
cache := prepCache(time.Hour, "1")
cache := prepCache(0, time.Hour, "1")
addToCache(cache, time.Nanosecond, "2")

assert.True(t, cache.Has("1"))
Expand All @@ -661,7 +691,7 @@ func Test_Cache_Has(t *testing.T) {
}

func Test_Cache_GetOrSet(t *testing.T) {
cache := prepCache(time.Hour)
cache := prepCache(0, time.Hour)
item, retrieved := cache.GetOrSet("test", "1", WithTTL[string, string](time.Minute))
require.NotNil(t, item)
assert.Same(t, item, cache.items.values["test"].Value)
Expand All @@ -685,7 +715,7 @@ func Test_Cache_GetOrSet(t *testing.T) {
}

func Test_Cache_GetAndDelete(t *testing.T) {
cache := prepCache(time.Hour, "test1", "test2", "test3")
cache := prepCache(0, time.Hour, "test1", "test2", "test3")
listItem := cache.items.lru.Front()
require.NotNil(t, listItem)
assert.Same(t, listItem, cache.items.values["test3"])
Expand Down Expand Up @@ -721,7 +751,7 @@ func Test_Cache_DeleteAll(t *testing.T) {
key4FnsCalls int
)

cache := prepCache(time.Hour, "1", "2", "3", "4")
cache := prepCache(0, time.Hour, "1", "2", "3", "4")
cache.events.eviction.fns[1] = func(r EvictionReason, item *Item[string, string]) {
assert.Equal(t, EvictionReasonDeleted, r)
switch item.key {
Expand Down Expand Up @@ -751,7 +781,7 @@ func Test_Cache_DeleteExpired(t *testing.T) {
key2FnsCalls int
)

cache := prepCache(time.Hour)
cache := prepCache(0, time.Hour)
cache.events.eviction.fns[1] = func(r EvictionReason, item *Item[string, string]) {
assert.Equal(t, EvictionReasonExpired, r)
switch item.key {
Expand Down Expand Up @@ -792,7 +822,7 @@ func Test_Cache_DeleteExpired(t *testing.T) {
}

func Test_Cache_Touch(t *testing.T) {
cache := prepCache(time.Hour, "1", "2")
cache := prepCache(0, time.Hour, "1", "2")
oldExpiresAt := cache.items.values["1"].Value.(*Item[string, string]).expiresAt

cache.Touch("1")
Expand All @@ -803,7 +833,7 @@ func Test_Cache_Touch(t *testing.T) {
}

func Test_Cache_Len(t *testing.T) {
cache := prepCache(time.Hour)
cache := prepCache(0, time.Hour)
assert.Equal(t, 0, cache.Len())

addToCache(cache, time.Hour, "1")
Expand All @@ -820,13 +850,13 @@ func Test_Cache_Len(t *testing.T) {
}

func Test_Cache_Keys(t *testing.T) {
cache := prepCache(time.Hour, "1", "2", "3")
cache := prepCache(0, time.Hour, "1", "2", "3")
addToCache(cache, time.Nanosecond, "4")
assert.ElementsMatch(t, []string{"1", "2", "3"}, cache.Keys())
}

func Test_Cache_Items(t *testing.T) {
cache := prepCache(time.Hour, "1", "2", "3")
cache := prepCache(0, time.Hour, "1", "2", "3")
addToCache(cache, time.Nanosecond, "4")
items := cache.Items()
require.Len(t, items, 3)
Expand All @@ -840,7 +870,7 @@ func Test_Cache_Items(t *testing.T) {
}

func Test_Cache_Range(t *testing.T) {
c := prepCache(DefaultTTL, "1", "2", "3", "4", "5")
c := prepCache(0, DefaultTTL, "1", "2", "3", "4", "5")
addToCache(c, time.Nanosecond, "6")
var results []string

Expand All @@ -860,7 +890,7 @@ func Test_Cache_Range(t *testing.T) {
}

func Test_Cache_RangeBackwards(t *testing.T) {
c := prepCache(DefaultTTL)
c := prepCache(0, DefaultTTL)
addToCache(c, time.Nanosecond, "1")
addToCache(c, time.Hour, "2", "3", "4", "5")

Expand Down Expand Up @@ -890,7 +920,7 @@ func Test_Cache_Metrics(t *testing.T) {
}

func Test_Cache_Start(t *testing.T) {
cache := prepCache(0)
cache := prepCache(0, 0)
cache.stopCh = make(chan struct{})

addToCache(cache, time.Nanosecond, "1")
Expand Down Expand Up @@ -938,7 +968,7 @@ func Test_Cache_Stop(t *testing.T) {
func Test_Cache_OnInsertion(t *testing.T) {
checkCh := make(chan struct{})
resCh := make(chan struct{})
cache := prepCache(time.Hour)
cache := prepCache(0, time.Hour)
del1 := cache.OnInsertion(func(_ context.Context, _ *Item[string, string]) {
checkCh <- struct{}{}
})
Expand Down Expand Up @@ -1022,7 +1052,7 @@ func Test_Cache_OnInsertion(t *testing.T) {
func Test_Cache_OnEviction(t *testing.T) {
checkCh := make(chan struct{})
resCh := make(chan struct{})
cache := prepCache(time.Hour)
cache := prepCache(0, time.Hour)
del1 := cache.OnEviction(func(_ context.Context, _ EvictionReason, _ *Item[string, string]) {
checkCh <- struct{}{}
})
Expand Down Expand Up @@ -1181,7 +1211,7 @@ func Test_SuppressedLoader_Load(t *testing.T) {
item1, item2 *Item[string, string]
)

cache := prepCache(time.Hour)
cache := prepCache(0, time.Hour)

// nil result
wg.Add(2)
Expand Down Expand Up @@ -1228,9 +1258,17 @@ func Test_SuppressedLoader_Load(t *testing.T) {
assert.Equal(t, 1, loadCalls)
}

func prepCache(ttl time.Duration, keys ...string) *Cache[string, string] {
func prepCache(maxCost uint64, ttl time.Duration, keys ...string) *Cache[string, string] {
c := &Cache[string, string]{}
c.options.ttl = ttl
if maxCost != 0 {
c.options.totalCost = maxCost
c.options.costsCalcFunc = func(key string, item string) uint64 {
// 72 bytes are used by the Item struct
// 2 * 16 bytes are used by the used string headers (key and item)
return uint64(104 + len(key) + len(item))
dadrus marked this conversation as resolved.
Show resolved Hide resolved
}
}
c.items.values = make(map[string]*list.Element)
c.items.lru = list.New()
c.items.expQueue = newExpirationQueue[string, string]()
Expand All @@ -1245,14 +1283,19 @@ func prepCache(ttl time.Duration, keys ...string) *Cache[string, string] {

func addToCache(c *Cache[string, string], ttl time.Duration, keys ...string) {
for i, key := range keys {
value := fmt.Sprint("value of", key)
item := NewItem(
key,
fmt.Sprint("value of", key),
value,
ttl+time.Duration(i)*time.Minute,
false,
)
elem := c.items.lru.PushFront(item)
c.items.values[key] = elem
c.items.expQueue.push(elem)

if c.options.totalCost != 0 {
c.costs += c.options.costsCalcFunc(key, value)
}
}
}
Loading
Loading