diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java index 5bab0c123f..a5e06a6a9b 100644 --- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java +++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java @@ -380,46 +380,12 @@ void evictEntries() { Node next = node.getNextInAccessOrder(); if (node.getWeight() != 0) { - evictEntry(node, RemovalCause.SIZE); + evictEntry(node, RemovalCause.SIZE, 0L); } node = next; } } - @GuardedBy("evictionLock") - void evictEntry(Node node, RemovalCause cause) { - K key = node.getKey(); - V value = node.getValue(); - boolean[] removed = new boolean[1]; - RemovalCause actualCause = (key == null) || (value == null) ? RemovalCause.COLLECTED : cause; - - data.computeIfPresent(node.getKeyReference(), (k, n) -> { - if (n == node) { - writer.delete(key, value, actualCause); - removed[0] = true; - return null; - } - return n; - }); - - makeDead(node); - if (evicts() || expiresAfterAccess()) { - accessOrderDeque().remove(node); - } - if (expiresAfterWrite()) { - writeOrderDeque().remove(node); - } - - if (removed[0]) { - statsCounter().recordEviction(); - if (hasRemovalListener()) { - // Notify the listener only if the entry was evicted. This must be performed as the last - // step during eviction to safe guard against the executor rejecting the notification task. - notifyRemoval(key, value, actualCause); - } - } - } - @GuardedBy("evictionLock") void expireEntries() { long now = expirationTicker().read(); @@ -430,7 +396,7 @@ void expireEntries() { if ((node == null) || (node.getAccessTime() > expirationTime)) { break; } - evictEntry(node, RemovalCause.EXPIRED); + evictEntry(node, RemovalCause.EXPIRED, now); } } if (expiresAfterWrite()) { @@ -440,7 +406,7 @@ void expireEntries() { if ((node == null) || (node.getWriteTime() > expirationTime)) { break; } - evictEntry(node, RemovalCause.EXPIRED); + evictEntry(node, RemovalCause.EXPIRED, now); } } } @@ -454,6 +420,69 @@ boolean hasExpired(Node node, long now) { || (expiresAfterWrite() && (now - node.getWriteTime() >= expiresAfterWriteNanos())); } + /** + * Attempts to evict the entry based on the given removal cause. A removal due to expiration may + * be ignored if the entry was since updated and is no longer eligible for eviction. + * + * @param node the entry to evict + * @param cause the reason to evict + * @param now the current time, used only if expiring + */ + @GuardedBy("evictionLock") + void evictEntry(Node node, RemovalCause cause, long now) { + K key = node.getKey(); + V value = node.getValue(); + boolean[] removed = new boolean[1]; + boolean[] resurrect = new boolean[1]; + RemovalCause actualCause = (key == null) || (value == null) ? RemovalCause.COLLECTED : cause; + + data.computeIfPresent(node.getKeyReference(), (k, n) -> { + if (n != node) { + return n; + } + if (actualCause == RemovalCause.EXPIRED) { + boolean expired = false; + if (expiresAfterAccess()) { + long expirationTime = now - expiresAfterAccessNanos(); + expired |= n.getAccessTime() <= expirationTime; + } + if (expiresAfterWrite()) { + long expirationTime = now - expiresAfterWriteNanos(); + expired |= n.getWriteTime() <= expirationTime; + } + if (!expired) { + resurrect[0] = true; + return n; + } + } + writer.delete(key, value, actualCause); + removed[0] = true; + return null; + }); + + if (resurrect[0]) { + // The entry is no longer expired and will be reordered in the next maintenance cycle + return; + } + + makeDead(node); + if (evicts() || expiresAfterAccess()) { + accessOrderDeque().remove(node); + } + if (expiresAfterWrite()) { + writeOrderDeque().remove(node); + } + + if (removed[0]) { + statsCounter().recordEviction(); + if (hasRemovalListener()) { + // Notify the listener only if the entry was evicted. This must be performed as the last + // step during eviction to safe guard against the executor rejecting the notification task. + notifyRemoval(key, value, actualCause); + } + } + } + /** * Performs the post-processing work required after a read. * @@ -577,7 +606,7 @@ void drainKeyReferences() { while ((keyRef = keyReferenceQueue().poll()) != null) { Node node = data.get(keyRef); if (node != null) { - evictEntry(node, RemovalCause.COLLECTED); + evictEntry(node, RemovalCause.COLLECTED, 0L); } } } @@ -594,7 +623,7 @@ void drainValueReferences() { InternalReference ref = (InternalReference) valueRef; Node node = data.get(ref.getKeyReference()); if ((node != null) && (valueRef == node.getValueReference())) { - evictEntry(node, RemovalCause.COLLECTED); + evictEntry(node, RemovalCause.COLLECTED, 0L); } } } diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/issues/Issue30Test.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/issues/Issue30Test.java new file mode 100644 index 0000000000..efe672ae30 --- /dev/null +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/issues/Issue30Test.java @@ -0,0 +1,149 @@ +package com.github.benmanes.caffeine.cache.issues; + +import static com.github.benmanes.caffeine.testing.IsFutureValue.futureOf; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Executor; +import java.util.concurrent.TimeUnit; + +import org.testng.SkipException; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import com.github.benmanes.caffeine.cache.AsyncLoadingCache; +import com.github.benmanes.caffeine.cache.CacheLoader; +import com.github.benmanes.caffeine.cache.Caffeine; + +/** + * Issue #30: Unexpected cache misses with expireAfterWrite using multiple keys. + *

+ * Prior to eviction, the cache must revalidate that the entry has expired. If the entry was updated + * but the maintenance thread reads a stale value, then the entry may be prematurely expired. The + * removal must detect that the entry was "resurrected" and cancel the expiration. + * + * @author yurgis2 + * @author ben.manes@gmail.com (Ben Manes) + */ +public final class Issue30Test { + private static final boolean DEBUG = false; + + private static final String A_KEY = "foo"; + private static final String A_ORIGINAL = "foo0"; + private static final String A_UPDATE_1 = "foo1"; + private static final String A_UPDATE_2 = "foo2"; + + private static final String B_KEY = "bar"; + private static final String B_ORIGINAL = "bar0"; + private static final String B_UPDATE_1 = "bar1"; + private static final String B_UPDATE_2 = "bar2"; + + private static final int TTL = 100; + private static final int EPSILON = 10; + + @DataProvider(name = "cache") + public Object[][] providesCache() { + ConcurrentMap source = new ConcurrentHashMap<>(); + ConcurrentMap lastLoad = new ConcurrentHashMap<>(); + AsyncLoadingCache cache = Caffeine.newBuilder() + .expireAfterWrite(TTL, TimeUnit.MILLISECONDS) + .buildAsync(new Loader(source, lastLoad)); + return new Object[][] {{ cache, source, lastLoad }}; + } + + @Test(dataProvider = "cache", invocationCount = 100, threadPoolSize = 10) + public void expiration(AsyncLoadingCache cache, + ConcurrentMap source, ConcurrentMap lastLoad) throws Exception { + if (System.getProperties().containsKey("CI")) { + throw new SkipException("This test cannot be run on a shared CI server due to timeouts"); + } + + initialValues(cache, source, lastLoad); + firstUpdate(cache, source); + secondUpdate(cache, source); + } + + private void initialValues(AsyncLoadingCache cache, + ConcurrentMap source, ConcurrentMap lastLoad) + throws InterruptedException, ExecutionException { + source.put(A_KEY, A_ORIGINAL); + source.put(B_KEY, B_ORIGINAL); + lastLoad.clear(); + + assertThat("should serve initial value", cache.get(A_KEY), is(futureOf(A_ORIGINAL))); + assertThat("should serve initial value", cache.get(B_KEY), is(futureOf(B_ORIGINAL))); + } + + private void firstUpdate(AsyncLoadingCache cache, + ConcurrentMap source) throws InterruptedException, ExecutionException { + source.put(A_KEY, A_UPDATE_1); + source.put(B_KEY, B_UPDATE_1); + + assertThat("should serve cached initial value", cache.get(A_KEY), is(futureOf(A_ORIGINAL))); + assertThat("should serve cached initial value", cache.get(B_KEY), is(futureOf(B_ORIGINAL))); + + Thread.sleep(EPSILON); // sleep for less than expiration + assertThat("still serve cached initial value", cache.get(A_KEY), is(futureOf(A_ORIGINAL))); + assertThat("still serve cached initial value", cache.get(B_KEY), is(futureOf(B_ORIGINAL))); + + Thread.sleep(TTL + EPSILON); // sleep until expiration + assertThat("now serve first updated value", cache.get(A_KEY), is(futureOf(A_UPDATE_1))); + assertThat("now serve first updated value", cache.get(B_KEY), is(futureOf(B_UPDATE_1))); + } + + private void secondUpdate(AsyncLoadingCache cache, + ConcurrentMap source) throws Exception { + source.put(A_KEY, A_UPDATE_2); + source.put(B_KEY, B_UPDATE_2); + + assertThat("serve cached first updated value", cache.get(A_KEY), is(futureOf(A_UPDATE_1))); + assertThat("serve cached first updated value", cache.get(B_KEY), is(futureOf(B_UPDATE_1))); + + Thread.sleep(EPSILON); // sleep for less than expiration + assertThat("serve cached first updated value", cache.get(A_KEY), is(futureOf(A_UPDATE_1))); + assertThat("serve cached first updated value", cache.get(A_KEY), is(futureOf(A_UPDATE_1))); + } + + static final class Loader implements CacheLoader { + final ConcurrentMap source; + final ConcurrentMap lastLoad; + + Loader(ConcurrentMap source, ConcurrentMap lastLoad) { + this.source = source; + this.lastLoad = lastLoad; + } + + @Override + public String load(String key) { + throw new IllegalStateException(); + } + + @Override + public CompletableFuture asyncLoad(String key, Executor executor) { + reportCacheMiss(key); + return CompletableFuture.completedFuture(source.get(key)); + } + + private void reportCacheMiss(String key) { + Date now = new Date(); + Date last = lastLoad.get(key); + lastLoad.put(key, now); + + if (DEBUG) { + String time = new SimpleDateFormat("hh:MM:ss.SSS").format(new Date()); + if (last == null) { + System.out.println(key + ": first load @ " + time); + } else { + long duration = (now.getTime() - last.getTime()); + System.out.println(key + ": " + duration + "ms after last load @ " + time); + } + } + } + } +}