diff --git a/caffeine/src/javaPoet/java/com/github/benmanes/caffeine/cache/local/AddRemovalListener.java b/caffeine/src/javaPoet/java/com/github/benmanes/caffeine/cache/local/AddRemovalListener.java index bf9c3b2b71..000e6ed9de 100644 --- a/caffeine/src/javaPoet/java/com/github/benmanes/caffeine/cache/local/AddRemovalListener.java +++ b/caffeine/src/javaPoet/java/com/github/benmanes/caffeine/cache/local/AddRemovalListener.java @@ -42,7 +42,7 @@ protected void execute() { .returns(REMOVAL_LISTENER) .build()); context.cache.addMethod(MethodSpec.methodBuilder("hasRemovalListener") - .addModifiers(protectedFinalModifiers) + .addModifiers(publicFinalModifiers) .addStatement("return true") .returns(boolean.class) .build()); diff --git a/caffeine/src/jmh/java/com/github/benmanes/caffeine/cache/CacheType.java b/caffeine/src/jmh/java/com/github/benmanes/caffeine/cache/CacheType.java index 7b9005c7c3..724aa7019c 100644 --- a/caffeine/src/jmh/java/com/github/benmanes/caffeine/cache/CacheType.java +++ b/caffeine/src/jmh/java/com/github/benmanes/caffeine/cache/CacheType.java @@ -18,8 +18,6 @@ import java.util.concurrent.ConcurrentHashMap; import org.apache.jackrabbit.oak.cache.CacheLIRS; -import org.cache2k.impl.ClockProPlusCache; -import org.cache2k.impl.LruCache; import org.cliffc.high_scale_lib.NonBlockingHashMap; import org.infinispan.commons.equivalence.AnyEquivalence; import org.infinispan.commons.util.concurrent.jdk8backported.BoundedEquivalentConcurrentHashMapV8; @@ -70,14 +68,9 @@ public enum CacheType { /* ---------------- Bounded -------------- */ - Cache2k_ClockProPlus { + Cache2k { @Override public BasicCache create(int maximumSize) { - return new Cache2k<>(ClockProPlusCache.class, maximumSize); - } - }, - Cache2k_Lru { - @Override public BasicCache create(int maximumSize) { - return new Cache2k<>(LruCache.class, maximumSize); + return new Cache2k<>(maximumSize); } }, Caffeine { diff --git a/caffeine/src/jmh/java/com/github/benmanes/caffeine/cache/impl/Cache2k.java b/caffeine/src/jmh/java/com/github/benmanes/caffeine/cache/impl/Cache2k.java index a04fe4f6ee..05838108f5 100644 --- a/caffeine/src/jmh/java/com/github/benmanes/caffeine/cache/impl/Cache2k.java +++ b/caffeine/src/jmh/java/com/github/benmanes/caffeine/cache/impl/Cache2k.java @@ -16,7 +16,7 @@ package com.github.benmanes.caffeine.cache.impl; import org.cache2k.Cache; -import org.cache2k.CacheBuilder; +import org.cache2k.Cache2kBuilder; import com.github.benmanes.caffeine.cache.BasicCache; @@ -26,10 +26,9 @@ public final class Cache2k implements BasicCache { private final Cache cache; - @SuppressWarnings({"unchecked", "deprecation"}) - public Cache2k(Class implementation, int maximumSize) { - cache = (Cache) CacheBuilder.newCache(Object.class, Object.class) - .implementation(implementation) + @SuppressWarnings("unchecked") + public Cache2k(int maximumSize) { + cache = (Cache) Cache2kBuilder.forUnknownTypes() .entryCapacity(maximumSize) .eternal(true) .build(); diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java index 3b31dfdc50..691e1c27f9 100644 --- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java +++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java @@ -16,6 +16,9 @@ package com.github.benmanes.caffeine.cache; import static com.github.benmanes.caffeine.cache.Caffeine.requireState; +import static com.github.benmanes.caffeine.cache.Node.EDEN; +import static com.github.benmanes.caffeine.cache.Node.PROBATION; +import static com.github.benmanes.caffeine.cache.Node.PROTECTED; import static java.util.Objects.requireNonNull; import java.io.InvalidObjectException; @@ -266,13 +269,13 @@ public RemovalListener removalListener() { return null; } - /** Returns whether this cache notifies when an entry is removed. */ - protected boolean hasRemovalListener() { + @Override + public boolean hasRemovalListener() { return false; } - /** Asynchronously sends a removal notification to the listener. */ - void notifyRemoval(@Nullable K key, @Nullable V value, RemovalCause cause) { + @Override + public void notifyRemoval(@Nullable K key, @Nullable V value, RemovalCause cause) { requireState(hasRemovalListener(), "Notification should be guarded with a check"); Runnable task = () -> { try { @@ -355,6 +358,11 @@ protected void setRefreshAfterWriteNanos(long refreshAfterWriteNanos) { throw new UnsupportedOperationException(); } + @Override + public boolean hasWriteTime() { + return expiresAfterWrite() || refreshAfterWrite(); + } + @Override public Ticker expirationTicker() { return Ticker.disabledTicker(); @@ -524,6 +532,7 @@ int evictFromEden() { */ @GuardedBy("evictionLock") void evictFromMain(int candidates) { + int victimQueue = PROBATION; Node victim = accessOrderProbationDeque().peekFirst(); Node candidate = accessOrderProbationDeque().peekLast(); while (weightedSize() > maximum()) { @@ -532,14 +541,20 @@ void evictFromMain(int candidates) { candidate = null; } - // Start evicting from the protected queue + // Try evicting from the protected and eden queues if ((candidate == null) && (victim == null)) { - victim = accessOrderProtectedDeque().peekFirst(); + if (victimQueue == PROBATION) { + victim = accessOrderProtectedDeque().peekFirst(); + victimQueue = PROTECTED; + continue; + } else if (victimQueue == PROTECTED) { + victim = accessOrderEdenDeque().peekFirst(); + victimQueue = EDEN; + continue; + } // The pending operations will adjust the size to reflect the correct weight - if (victim == null) { - break; - } + break; } // Skip over entries with zero weight @@ -728,7 +743,11 @@ void evictEntry(Node node, RemovalCause cause, long now) { return n; } } else if (actualCause == RemovalCause.SIZE) { - if (node.getWeight() == 0) { + int weight; + synchronized (node) { + weight = node.getWeight(); + } + if (weight == 0) { resurrect[0] = true; return n; } @@ -805,45 +824,63 @@ void refreshIfNeeded(Node node, long now) { if (!refreshAfterWrite()) { return; } + K key; + V oldValue; long oldWriteTime = node.getWriteTime(); long refreshWriteTime = isAsync ? Long.MAX_VALUE : now; if (((now - oldWriteTime) > refreshAfterWriteNanos()) + && ((key = node.getKey()) != null) && ((oldValue = node.getValue()) != null) && node.casWriteTime(oldWriteTime, refreshWriteTime)) { try { - executor().execute(() -> { - K key = node.getKey(); - if ((key != null) && node.isAlive()) { - BiFunction refreshFunction = (k, v) -> { - if (node.getWriteTime() != refreshWriteTime) { - return v; - } - try { - if (isAsync) { - @SuppressWarnings("unchecked") - V oldValue = ((CompletableFuture) v).join(); - CompletableFuture future = - cacheLoader.asyncReload(key, oldValue, Runnable::run); - if (future.join() == null) { - return null; - } - @SuppressWarnings("unchecked") - V castFuture = (V) future; - return castFuture; - } - return cacheLoader.reload(k, v); - } catch (Exception e) { - node.setWriteTime(oldWriteTime); - return LocalCache.throwUnchecked(e); - } - }; - try { - computeIfPresent(key, refreshFunction); - } catch (Throwable t) { - logger.log(Level.WARNING, "Exception thrown during refresh", t); + CompletableFuture refreshFuture; + if (isAsync) { + @SuppressWarnings("unchecked") + CompletableFuture future = (CompletableFuture) oldValue; + if (Async.isReady(future)) { + refreshFuture = future.thenCompose(value -> + cacheLoader.asyncReload(key, value, executor)); + } else { + // no-op if load is pending + node.casWriteTime(refreshWriteTime, oldWriteTime); + return; + } + } else { + refreshFuture = cacheLoader.asyncReload(key, oldValue, executor); + } + refreshFuture.whenComplete((newValue, error) -> { + long loadTime = statsTicker().read() - now; + if (error != null) { + logger.log(Level.WARNING, "Exception thrown during refresh", error); + node.casWriteTime(refreshWriteTime, oldWriteTime); + statsCounter().recordLoadFailure(loadTime); + return; + } + + @SuppressWarnings("unchecked") + V value = (isAsync && (newValue != null)) ? (V) refreshFuture : newValue; + + boolean[] discard = new boolean[1]; + compute(key, (k, currentValue) -> { + if (currentValue == null) { + return value; + } else if ((currentValue == oldValue) && (node.getWriteTime() == refreshWriteTime)) { + return value; } + discard[0] = true; + return currentValue; + }, /* recordMiss */ false, /* recordLoad */ false); + + if (discard[0] && hasRemovalListener()) { + notifyRemoval(key, value, RemovalCause.REPLACED); + } + if (newValue == null) { + statsCounter().recordLoadFailure(loadTime); + } else { + statsCounter().recordLoadSuccess(loadTime); } }); } catch (Throwable t) { + node.casWriteTime(refreshWriteTime, oldWriteTime); logger.log(Level.SEVERE, "Exception thrown when submitting refresh task", t); } } @@ -1372,6 +1409,18 @@ public V getIfPresent(Object key, boolean recordStats) { return node.getValue(); } + @Override + public V getIfPresentQuietly(Object key, long[/* 1 */] writeTime) { + V value; + Node node = data.get(nodeFactory.newLookupKey(key)); + if ((node == null) || ((value = node.getValue()) == null) + || hasExpired(node, expirationTicker().read())) { + return null; + } + writeTime[0] = node.getWriteTime(); + return value; + } + @Override public Map getAllPresent(Iterable keys) { int misses = 0; @@ -1401,24 +1450,24 @@ public Map getAllPresent(Iterable keys) { public V put(K key, V value) { int weight = weigher.weigh(key, value); return (weight > 0) - ? putFast(key, value, weight, true, false) - : putSlow(key, value, weight, true, false); + ? putFast(key, value, weight, /* notifyWriter */ true, /* onlyIfAbsent */ false) + : putSlow(key, value, weight, /* notifyWriter */ true, /* onlyIfAbsent */ false); } @Override public V put(K key, V value, boolean notifyWriter) { int weight = weigher.weigh(key, value); return (weight > 0) - ? putFast(key, value, weight, notifyWriter, false) - : putSlow(key, value, weight, notifyWriter, false); + ? putFast(key, value, weight, notifyWriter, /* onlyIfAbsent */ false) + : putSlow(key, value, weight, notifyWriter, /* onlyIfAbsent */ false); } @Override public V putIfAbsent(K key, V value) { int weight = weigher.weigh(key, value); return (weight > 0) - ? putFast(key, value, weight, true, true) - : putSlow(key, value, weight, true, true); + ? putFast(key, value, weight, /* notifyWriter */ true, /* onlyIfAbsent */ true) + : putSlow(key, value, weight, /* notifyWriter */ true, /* onlyIfAbsent */ true); } /** @@ -1823,7 +1872,7 @@ public void replaceAll(BiFunction function) { @Override public V computeIfAbsent(K key, Function mappingFunction, - boolean isAsync) { + boolean recordStats, boolean recordLoad) { requireNonNull(key); requireNonNull(mappingFunction); long now = expirationTicker().read(); @@ -1837,13 +1886,16 @@ public V computeIfAbsent(K key, Function mappingFunction return value; } } + if (recordStats) { + mappingFunction = statsAware(mappingFunction, recordLoad); + } Object keyRef = nodeFactory.newReferenceKey(key, keyReferenceQueue()); - return doComputeIfAbsent(key, keyRef, mappingFunction, isAsync, now); + return doComputeIfAbsent(key, keyRef, mappingFunction, now); } /** Returns the current value from a computeIfAbsent invocation. */ - V doComputeIfAbsent(K key, Object keyRef, Function mappingFunction, - boolean isAsync, long now) { + V doComputeIfAbsent(K key, Object keyRef, + Function mappingFunction, long now) { @SuppressWarnings("unchecked") V[] oldValue = (V[]) new Object[1]; @SuppressWarnings("unchecked") @@ -1857,7 +1909,7 @@ V doComputeIfAbsent(K key, Object keyRef, Function mappi RemovalCause[] cause = new RemovalCause[1]; Node node = data.compute(keyRef, (k, n) -> { if (n == null) { - newValue[0] = statsAware(mappingFunction, isAsync).apply(key); + newValue[0] = mappingFunction.apply(key); if (newValue[0] == null) { return null; } @@ -1881,7 +1933,7 @@ V doComputeIfAbsent(K key, Object keyRef, Function mappi } writer.delete(nodeKey[0], oldValue[0], cause[0]); - newValue[0] = statsAware(mappingFunction, isAsync).apply(key); + newValue[0] = mappingFunction.apply(key); if (newValue[0] == null) { removed[0] = n; n.retire(); @@ -1938,13 +1990,13 @@ public V computeIfPresent(K key, boolean computeIfAbsent = false; BiFunction statsAwareRemappingFunction = - statsAware(remappingFunction, false, false); + statsAware(remappingFunction, /* recordMiss */ false, /* recordLoad */ true); return remap(key, lookupKey, statsAwareRemappingFunction, now, computeIfAbsent); } @Override public V compute(K key, BiFunction remappingFunction, - boolean recordMiss, boolean isAsync) { + boolean recordMiss, boolean recordLoad) { requireNonNull(key); requireNonNull(remappingFunction); @@ -1952,7 +2004,7 @@ public V compute(K key, BiFunction remappingF boolean computeIfAbsent = true; Object keyRef = nodeFactory.newReferenceKey(key, keyReferenceQueue()); BiFunction statsAwareRemappingFunction = - statsAware(remappingFunction, recordMiss, isAsync); + statsAware(remappingFunction, recordMiss, recordLoad); return remap(key, keyRef, statsAwareRemappingFunction, now, computeIfAbsent); } diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalAsyncLoadingCache.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalAsyncLoadingCache.java index c6eadd1c8b..a8dcd69ef0 100644 --- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalAsyncLoadingCache.java +++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalAsyncLoadingCache.java @@ -114,6 +114,11 @@ public CompletableFuture get(@Nonnull K key, @Override public CompletableFuture get(K key, BiFunction> mappingFunction) { + return get(key, mappingFunction, true); + } + + CompletableFuture get(K key, + BiFunction> mappingFunction, boolean recordStats) { long startTime = cache.statsTicker().read(); @SuppressWarnings({"unchecked", "rawtypes"}) CompletableFuture[] result = new CompletableFuture[1]; @@ -123,7 +128,7 @@ public CompletableFuture get(K key, cache.statsCounter().recordLoadFailure(cache.statsTicker().read() - startTime); } return result[0]; - }, true); + }, recordStats, /* recordLoad */ false); if (result[0] != null) { AtomicBoolean completed = new AtomicBoolean(); result[0].whenComplete((value, error) -> { @@ -203,7 +208,7 @@ private CompletableFuture> composeResult(Map> } @SuppressWarnings("rawtypes") CompletableFuture[] array = futures.values().toArray(new CompletableFuture[0]); - return CompletableFuture.allOf(array).thenApply(ignored -> { + return CompletableFuture.allOf(array).thenApplyAsync(ignored -> { Map result = new HashMap<>(futures.size()); futures.forEach((key, future) -> { V value = future.getNow(null); @@ -212,7 +217,7 @@ private CompletableFuture> composeResult(Map> } }); return Collections.unmodifiableMap(result); - }); + }, cache.executor()); } @Override @@ -446,39 +451,56 @@ public void cleanUp() { public void refresh(K key) { requireNonNull(key); - BiFunction, CompletableFuture> refreshFunction = - (k, oldValueFuture) -> { - try { - V oldValue = Async.getWhenSuccessful(oldValueFuture); - if (loader instanceof CacheLoader) { - CacheLoader cacheLoader = (CacheLoader) loader; - V newValue = (oldValue == null) - ? cacheLoader.load(key) - : cacheLoader.reload(key, oldValue); - return (newValue == null) ? null : CompletableFuture.completedFuture(newValue); - } else { - // Hint that the async task should be run on this async task's thread - CompletableFuture newValueFuture = (oldValue == null) - ? loader.asyncLoad(key, Runnable::run) - : loader.asyncReload(key, oldValue, Runnable::run); - V newValue = newValueFuture.get(); - return (newValue == null) ? null : newValueFuture; + long[] writeTime = new long[1]; + CompletableFuture oldValueFuture = cache.getIfPresentQuietly(key, writeTime); + if ((oldValueFuture == null) + || (oldValueFuture.isDone() && oldValueFuture.isCompletedExceptionally())) { + LocalAsyncLoadingCache.this.get(key, loader::asyncLoad, false); + return; + } else if (!oldValueFuture.isDone()) { + // no-op if load is pending + return; + } + + oldValueFuture.thenAccept(oldValue -> { + long now = cache.statsTicker().read(); + CompletableFuture refreshFuture = (oldValue == null) + ? loader.asyncLoad(key, cache.executor()) + : loader.asyncReload(key, oldValue, cache.executor()); + refreshFuture.whenComplete((newValue, error) -> { + long loadTime = cache.statsTicker().read() - now; + if (error != null) { + cache.statsCounter().recordLoadFailure(loadTime); + logger.log(Level.WARNING, "Exception thrown during refresh", error); + return; + } + + boolean[] discard = new boolean[1]; + cache.compute(key, (k, currentValue) -> { + if (currentValue == null) { + return (newValue == null) ? null : refreshFuture; + } else if (currentValue == oldValueFuture) { + long expectedWriteTime = writeTime[0]; + if (cache.hasWriteTime()) { + cache.getIfPresentQuietly(key, writeTime); + } + if (writeTime[0] == expectedWriteTime) { + return (newValue == null) ? null : refreshFuture; } - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - return LocalCache.throwUnchecked(e); - } catch (ExecutionException e) { - return LocalCache.throwUnchecked(e.getCause()); - } catch (Exception e) { - return LocalCache.throwUnchecked(e); } - }; - cache.executor().execute(() -> { - try { - cache.compute(key, refreshFunction, false, false); - } catch (Throwable t) { - logger.log(Level.WARNING, "Exception thrown during refresh", t); - } + discard[0] = true; + return currentValue; + }, /* recordMiss */ false, /* recordLoad */ false); + + if (discard[0] && cache.hasRemovalListener()) { + cache.notifyRemoval(key, refreshFuture, RemovalCause.REPLACED); + } + if (newValue == null) { + cache.statsCounter().recordLoadFailure(loadTime); + } else { + cache.statsCounter().recordLoadSuccess(loadTime); + } + }); }); } @@ -633,7 +655,7 @@ public V compute(K key, } delegate.statsCounter().recordLoadSuccess(loadTime); return CompletableFuture.completedFuture(newValue); - }, false, true); + }, /* recordMiss */ false, /* recordLoad */ false); return Async.getWhenSuccessful(valueFuture); } diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalCache.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalCache.java index 1a6c1a1668..63fc25fe51 100644 --- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalCache.java +++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalCache.java @@ -42,14 +42,23 @@ interface LocalCache extends ConcurrentMap { @Nonnull StatsCounter statsCounter(); + /** Returns whether this cache notifies when an entry is removed. */ + boolean hasRemovalListener(); + /** Returns the {@link RemovalListener} used by this cache or null if not used. */ @Nullable RemovalListener removalListener(); + /** Asynchronously sends a removal notification to the listener. */ + void notifyRemoval(@Nullable K key, @Nullable V value, RemovalCause cause); + /** Returns the {@link Executor} used by this cache. */ @Nonnull Executor executor(); + /** Returns whether the cache captures the write time of the entry. */ + boolean hasWriteTime(); + /** Returns the {@link Ticker} used by this cache for expiration. */ @Nonnull Ticker expirationTicker(); @@ -69,6 +78,13 @@ interface LocalCache extends ConcurrentMap { @Nullable V getIfPresent(@Nonnull Object key, boolean recordStats); + /** + * See {@link Cache#getIfPresent(Object)}. This method differs by not recording the access with + * the statistics nor the eviction policy, and populates the write time is known. + */ + @Nullable + V getIfPresentQuietly(@Nonnull Object key, @Nonnull long[/* 1 */] writeTime); + /** See {@link Cache#getAllPresent}. */ @Nonnull Map getAllPresent(@Nonnull Iterable keys); @@ -77,31 +93,32 @@ interface LocalCache extends ConcurrentMap { * See {@link Cache#put(Object, Object)}. This method differs by allowing the operation to not * notify the writer when an entry was inserted or updated. */ - V put(K key, V value, boolean notifyWriter); + @Nullable + V put(@Nonnull K key, @Nonnull V value, boolean notifyWriter); @Override default V compute(K key, BiFunction remappingFunction) { - return compute(key, remappingFunction, false, false); + return compute(key, remappingFunction, /* recordMiss */ false, /* recordLoad */ true); } /** * See {@link ConcurrentMap#compute}. This method differs by accepting parameters indicating - * whether to record a miss statistic based on the success of this operation, and further - * qualified by whether the operation was called by an asynchronous cache. + * whether to record miss and load statistics based on the success of this operation. */ V compute(K key, BiFunction remappingFunction, - boolean recordMiss, boolean isAsync); + boolean recordMiss, boolean recordLoad); @Override default V computeIfAbsent(K key, Function mappingFunction) { - return computeIfAbsent(key, mappingFunction, false); + return computeIfAbsent(key, mappingFunction, /* recordMiss */ true, /* recordLoad */ true); } /** * See {@link ConcurrentMap#computeIfAbsent}. This method differs by accepting parameters * indicating whether the operation was called by an asynchronous cache. */ - V computeIfAbsent(K key, Function mappingFunction, boolean isAsync); + V computeIfAbsent(K key, Function mappingFunction, + boolean recordMiss, boolean recordLoad); /** See {@link Cache#invalidateAll(Iterable)}. */ default void invalidateAll(Iterable keys) { @@ -115,7 +132,7 @@ default void invalidateAll(Iterable keys) { /** Decorates the remapping function to record statistics if enabled. */ default Function statsAware( - Function mappingFunction, boolean isAsync) { + Function mappingFunction, boolean recordLoad) { if (!isRecordingStats()) { return mappingFunction; } @@ -130,7 +147,7 @@ default void invalidateAll(Iterable keys) { throw e; } long loadTime = statsTicker().read() - startTime; - if (!isAsync) { + if (recordLoad) { if (value == null) { statsCounter().recordLoadFailure(loadTime); } else { @@ -144,13 +161,13 @@ default void invalidateAll(Iterable keys) { /** Decorates the remapping function to record statistics if enabled. */ default BiFunction statsAware( BiFunction remappingFunction) { - return statsAware(remappingFunction, true, false); + return statsAware(remappingFunction, /* recordMiss */ true, /* recordLoad */ true); } /** Decorates the remapping function to record statistics if enabled. */ default BiFunction statsAware( BiFunction remappingFunction, - boolean recordMiss, boolean isAsync) { + boolean recordMiss, boolean recordLoad) { if (!isRecordingStats()) { return remappingFunction; } @@ -167,7 +184,7 @@ default void invalidateAll(Iterable keys) { throw e; } long loadTime = statsTicker().read() - startTime; - if (!isAsync) { + if (recordLoad) { if (result == null) { statsCounter().recordLoadFailure(loadTime); } else { @@ -177,9 +194,4 @@ default void invalidateAll(Iterable keys) { return result; }; } - - @SuppressWarnings({"unchecked", "TypeParameterUnusedInFormals"}) - static V throwUnchecked(Throwable t) throws T { - throw (T) t; - } } diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalLoadingCache.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalLoadingCache.java index 51362e73ff..a25326dbe6 100644 --- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalLoadingCache.java +++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalLoadingCache.java @@ -25,8 +25,8 @@ import java.util.Iterator; import java.util.Map; import java.util.Set; +import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionException; -import java.util.function.BiFunction; import java.util.function.Function; import java.util.logging.Level; import java.util.logging.Logger; @@ -44,6 +44,7 @@ interface LocalLoadingCache, K, V> /** Returns the {@link CacheLoader} used by this cache. */ CacheLoader cacheLoader(); + /** Returns the {@link CacheLoader} as a mapping function. */ Function mappingFunction(); /** Returns whether the cache loader supports bulk loading. */ @@ -154,23 +155,45 @@ default void bulkLoad(Set keysToLoad, Map result) { @Override default void refresh(K key) { requireNonNull(key); - cache().executor().execute(() -> { - BiFunction refreshFunction = (k, oldValue) -> { - try { - return (oldValue == null) - ? cacheLoader().load(key) - : cacheLoader().reload(key, oldValue); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - return LocalCache.throwUnchecked(e); - } catch (Exception e) { - return LocalCache.throwUnchecked(e); + + long[] writeTime = new long[1]; + long startTime = cache().statsTicker().read(); + V oldValue = cache().getIfPresentQuietly(key, writeTime); + CompletableFuture refreshFuture = (oldValue == null) + ? cacheLoader().asyncLoad(key, cache().executor()) + : cacheLoader().asyncReload(key, oldValue, cache().executor()); + refreshFuture.whenComplete((newValue, error) -> { + long loadTime = cache().statsTicker().read() - startTime; + if (error != null) { + logger.log(Level.WARNING, "Exception thrown during refresh", error); + cache().statsCounter().recordLoadFailure(loadTime); + return; + } + + boolean[] discard = new boolean[1]; + cache().compute(key, (k, currentValue) -> { + if (currentValue == null) { + return newValue; + } else if (currentValue == oldValue) { + long expectedWriteTime = writeTime[0]; + if (cache().hasWriteTime()) { + cache().getIfPresentQuietly(key, writeTime); + } + if (writeTime[0] == expectedWriteTime) { + return newValue; + } } - }; - try { - cache().compute(key, refreshFunction, false, false); - } catch (Throwable t) { - logger.log(Level.WARNING, "Exception thrown during refresh", t); + discard[0] = true; + return currentValue; + }, /* recordMiss */ false, /* recordLoad */ false); + + if (discard[0] && cache().hasRemovalListener()) { + cache().notifyRemoval(key, newValue, RemovalCause.REPLACED); + } + if (newValue == null) { + cache().statsCounter().recordLoadFailure(loadTime); + } else { + cache().statsCounter().recordLoadSuccess(loadTime); } }); } diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/RemovalCause.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/RemovalCause.java index 7a0cdadad7..fb83379ca2 100644 --- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/RemovalCause.java +++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/RemovalCause.java @@ -57,6 +57,7 @@ public enum RemovalCause { *
    *
  • {@link Cache#put}
  • *
  • {@link Cache#putAll}
  • + *
  • {@link LoadingCache#getAll}
  • *
  • {@link LoadingCache#refresh}
  • *
  • {@link java.util.Map#put}
  • *
  • {@link java.util.Map#putAll}
  • diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/UnboundedLocalCache.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/UnboundedLocalCache.java index 773b6e663c..649e2572c0 100644 --- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/UnboundedLocalCache.java +++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/UnboundedLocalCache.java @@ -73,6 +73,11 @@ final class UnboundedLocalCache implements LocalCache { this.ticker = builder.getTicker(); } + @Override + public boolean hasWriteTime() { + return false; + } + /* ---------------- Cache -------------- */ @Override @@ -89,6 +94,11 @@ public V getIfPresent(Object key, boolean recordStats) { return value; } + @Override + public V getIfPresentQuietly(Object key, long[/* 1 */] writeTime) { + return data.get(key); + } + @Override public long estimatedSize() { return data.mappingCount(); @@ -123,12 +133,8 @@ public StatsCounter statsCounter() { return statsCounter; } - void notifyRemoval(@Nullable K key, @Nullable V value, RemovalCause cause) { - requireNonNull(removalListener, "Notification should be guarded with a check"); - executor.execute(() -> removalListener.onRemoval(key, value, cause)); - } - - boolean hasRemovalListener() { + @Override + public boolean hasRemovalListener() { return (removalListener != null); } @@ -137,6 +143,12 @@ public RemovalListener removalListener() { return removalListener; } + @Override + public void notifyRemoval(@Nullable K key, @Nullable V value, RemovalCause cause) { + requireNonNull(removalListener, "Notification should be guarded with a check"); + executor.execute(() -> removalListener.onRemoval(key, value, cause)); + } + @Override public boolean isRecordingStats() { return isRecordingStats; @@ -198,13 +210,15 @@ public void replaceAll(BiFunction function) { @Override public V computeIfAbsent(K key, Function mappingFunction, - boolean isAsync) { + boolean recordStats, boolean recordLoad) { requireNonNull(mappingFunction); // optimistic fast path due to computeIfAbsent always locking V value = data.get(key); if (value != null) { - statsCounter.recordHits(1); + if (recordStats) { + statsCounter.recordHits(1); + } return value; } @@ -212,9 +226,11 @@ public V computeIfAbsent(K key, Function mappingFunction value = data.computeIfAbsent(key, k -> { // Do not communicate to CacheWriter on a load missed[0] = true; - return statsAware(mappingFunction, isAsync).apply(key); + return recordStats + ? statsAware(mappingFunction, recordLoad).apply(key) + : mappingFunction.apply(key); }); - if (!missed[0]) { + if (!missed[0] && recordStats) { statsCounter.recordHits(1); } return value; @@ -235,7 +251,9 @@ public V computeIfPresent(K key, V[] oldValue = (V[]) new Object[1]; RemovalCause[] cause = new RemovalCause[1]; V nv = data.computeIfPresent(key, (K k, V value) -> { - V newValue = statsAware(remappingFunction, false, false).apply(k, value); + BiFunction function = + statsAware(remappingFunction, /* recordMiss */ false, /* recordLoad */ true); + V newValue = function.apply(k, value); cause[0] = (newValue == null) ? RemovalCause.EXPLICIT : RemovalCause.REPLACED; if (hasRemovalListener() && (newValue != value)) { @@ -252,9 +270,9 @@ public V computeIfPresent(K key, @Override public V compute(K key, BiFunction remappingFunction, - boolean recordMiss, boolean isAsync) { + boolean recordMiss, boolean recordLoad) { requireNonNull(remappingFunction); - return remap(key, statsAware(remappingFunction, recordMiss, isAsync)); + return remap(key, statsAware(remappingFunction, recordMiss, recordLoad)); } @Override @@ -337,9 +355,10 @@ public V get(Object key) { @Override public V put(K key, V value) { - return put(key, value, true); + return put(key, value, /* notifyWriter */ true); } + @Override public V put(K key, V value, boolean notifyWriter) { requireNonNull(value); @@ -347,13 +366,17 @@ public V put(K key, V value, boolean notifyWriter) { // ensures that the removal notification is processed after the removal has completed @SuppressWarnings({"unchecked", "rawtypes"}) V oldValue[] = (V[]) new Object[1]; - data.compute(key, (k, v) -> { - if (notifyWriter && (value != v)) { - writer.write(key, value); - } - oldValue[0] = v; - return value; - }); + if ((writer == CacheWriter.disabledWriter()) || !notifyWriter) { + oldValue[0] = data.put(key, value); + } else { + data.compute(key, (k, v) -> { + if (value != v) { + writer.write(key, value); + } + oldValue[0] = v; + return value; + }); + } if (hasRemovalListener() && (oldValue[0] != null) && (oldValue[0] != value)) { notifyRemoval(key, oldValue[0], RemovalCause.REPLACED); @@ -391,11 +414,15 @@ public V remove(Object key) { @SuppressWarnings({"unchecked", "rawtypes"}) V oldValue[] = (V[]) new Object[1]; - data.computeIfPresent(castKey, (k, v) -> { - writer.delete(castKey, v, RemovalCause.EXPLICIT); - oldValue[0] = v; - return null; - }); + if (writer == CacheWriter.disabledWriter()) { + oldValue[0] = data.remove(key); + } else { + data.computeIfPresent(castKey, (k, v) -> { + writer.delete(castKey, v, RemovalCause.EXPLICIT); + oldValue[0] = v; + return null; + }); + } if (hasRemovalListener() && (oldValue[0] != null)) { notifyRemoval(castKey, oldValue[0], RemovalCause.EXPLICIT); diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/stats/CacheStats.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/stats/CacheStats.java index ecce77d2d7..24d1b46abf 100644 --- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/stats/CacheStats.java +++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/stats/CacheStats.java @@ -119,10 +119,11 @@ public CacheStats(@Nonnegative long hitCount, @Nonnegative long missCount, } /** - * Returns a statistics instance where cache events have been recorded. + * Returns a statistics instance where no cache events have been recorded. * * @return an empty statistics instance */ + @Nonnull public static CacheStats empty() { return EMPTY_STATS; } diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsyncLoadingCacheTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsyncLoadingCacheTest.java index 3b63f27801..e79e107b20 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsyncLoadingCacheTest.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsyncLoadingCacheTest.java @@ -64,7 +64,6 @@ import com.github.benmanes.caffeine.cache.testing.CheckNoWriter; import com.github.benmanes.caffeine.testing.Awaits; import com.google.common.collect.ImmutableList; -import com.google.common.util.concurrent.MoreExecutors; /** * The test cases for the {@link AsyncLoadingCache} interface that simulate the most generic usages. @@ -133,7 +132,7 @@ public void getFunc_nullKeyAndLoader(AsyncLoadingCache cache, @CheckNoWriter @Test(dataProvider = "caches") - @CacheSpec(loader = Loader.NULL, executor = CacheExecutor.DIRECT) + @CacheSpec(loader = Loader.NULL) public void getFunc_absent_null(AsyncLoadingCache cache, CacheContext context) { Integer key = context.absentKey(); @@ -148,8 +147,8 @@ public void getFunc_absent_null(AsyncLoadingCache cache, @CheckNoWriter @Test(dataProvider = "caches") - @CacheSpec(loader = Loader.NULL, - executor = CacheExecutor.SINGLE, executorFailure = ExecutorFailure.IGNORED) + @CacheSpec(loader = Loader.NULL, executor = CacheExecutor.THREADED, + executorFailure = ExecutorFailure.IGNORED) public void getFunc_absent_null_async(AsyncLoadingCache cache, CacheContext context) { Integer key = context.absentKey(); @@ -163,18 +162,17 @@ public void getFunc_absent_null_async(AsyncLoadingCache cache, ready.set(true); Awaits.await().untilTrue(done); - Awaits.await().until(() -> cache.getIfPresent(key) == null); - Awaits.await().until(() -> context, both(hasMissCount(2)).and(hasHitCount(0))); + Awaits.await().until(() -> !cache.synchronous().asMap().containsKey(context.absentKey())); + Awaits.await().until(() -> context, both(hasMissCount(1)).and(hasHitCount(0))); Awaits.await().until(() -> context, both(hasLoadSuccessCount(0)).and(hasLoadFailureCount(1))); - MoreExecutors.shutdownAndAwaitTermination(context.executor(), 1, TimeUnit.MINUTES); assertThat(valueFuture.isDone(), is(true)); assertThat(cache.synchronous().asMap(), not(hasKey(key))); } + @CacheSpec @CheckNoWriter @Test(dataProvider = "caches") - @CacheSpec(executor = CacheExecutor.DIRECT) public void getFunc_absent_failure(AsyncLoadingCache cache, CacheContext context) { CompletableFuture valueFuture = cache.get(context.absentKey(), @@ -189,7 +187,7 @@ public void getFunc_absent_failure(AsyncLoadingCache cache, @CheckNoWriter @Test(dataProvider = "caches") - @CacheSpec(executor = CacheExecutor.SINGLE, executorFailure = ExecutorFailure.IGNORED) + @CacheSpec(executor = CacheExecutor.THREADED, executorFailure = ExecutorFailure.IGNORED) public void getFunc_absent_failure_async(AsyncLoadingCache cache, CacheContext context) { AtomicBoolean ready = new AtomicBoolean(); @@ -202,10 +200,9 @@ public void getFunc_absent_failure_async(AsyncLoadingCache cac ready.set(true); Awaits.await().untilTrue(done); - Awaits.await().until(() -> cache.getIfPresent(context.absentKey()) == null); - Awaits.await().until(() -> context, both(hasMissCount(2)).and(hasHitCount(0))); + Awaits.await().until(() -> !cache.synchronous().asMap().containsKey(context.absentKey())); + Awaits.await().until(() -> context, both(hasMissCount(1)).and(hasHitCount(0))); Awaits.await().until(() -> context, both(hasLoadSuccessCount(0)).and(hasLoadFailureCount(1))); - MoreExecutors.shutdownAndAwaitTermination(context.executor(), 1, TimeUnit.MINUTES); assertThat(valueFuture.isCompletedExceptionally(), is(true)); assertThat(cache.getIfPresent(context.absentKey()), is(nullValue())); @@ -213,7 +210,7 @@ public void getFunc_absent_failure_async(AsyncLoadingCache cac @CheckNoWriter @Test(dataProvider = "caches") - @CacheSpec(loader = Loader.NULL, executor = CacheExecutor.SINGLE) + @CacheSpec(loader = Loader.NULL, executor = CacheExecutor.THREADED) public void getFunc_absent_cancelled(AsyncLoadingCache cache, CacheContext context) { AtomicBoolean done = new AtomicBoolean(); @@ -225,10 +222,8 @@ public void getFunc_absent_cancelled(AsyncLoadingCache cache, valueFuture.cancel(true); Awaits.await().untilTrue(done); - MoreExecutors.shutdownAndAwaitTermination(context.executor(), 1, TimeUnit.MINUTES); - - assertThat(context, both(hasMissCount(1)).and(hasHitCount(0))); - assertThat(context, both(hasLoadSuccessCount(0)).and(hasLoadFailureCount(1))); + await().until(() -> context, both(hasMissCount(1)).and(hasHitCount(0))); + await().until(() -> context, both(hasLoadSuccessCount(0)).and(hasLoadFailureCount(1))); assertThat(valueFuture.isDone(), is(true)); assertThat(cache.getIfPresent(context.absentKey()), is(nullValue())); @@ -420,8 +415,8 @@ public void get_absent_failure(AsyncLoadingCache cache, CacheC @CheckNoWriter @Test(dataProvider = "caches") - @CacheSpec(loader = Loader.EXCEPTIONAL, - executor = CacheExecutor.SINGLE, executorFailure = ExecutorFailure.IGNORED) + @CacheSpec(loader = Loader.EXCEPTIONAL, executor = CacheExecutor.THREADED, + executorFailure = ExecutorFailure.IGNORED) public void get_absent_failure_async(AsyncLoadingCache cache, CacheContext context) throws InterruptedException { AtomicBoolean done = new AtomicBoolean(); @@ -430,10 +425,9 @@ public void get_absent_failure_async(AsyncLoadingCache cache, valueFuture.whenComplete((r, e) -> done.set(true)); Awaits.await().untilTrue(done); - Awaits.await().until(() -> cache.getIfPresent(context.absentKey()) == null); - Awaits.await().until(() -> context, both(hasMissCount(2)).and(hasHitCount(0))); + Awaits.await().until(() -> !cache.synchronous().asMap().containsKey(context.absentKey())); + Awaits.await().until(() -> context, both(hasMissCount(1)).and(hasHitCount(0))); Awaits.await().until(() -> context, both(hasLoadSuccessCount(0)).and(hasLoadFailureCount(1))); - MoreExecutors.shutdownAndAwaitTermination(context.executor(), 1, TimeUnit.MINUTES); assertThat(valueFuture.isCompletedExceptionally(), is(true)); assertThat(cache.getIfPresent(key), is(nullValue())); @@ -691,7 +685,7 @@ public void put_replace(AsyncLoadingCache cache, CacheContext @Test(dataProvider = "caches") @CacheSpec(implementation = Implementation.Caffeine, population = Population.EMPTY, - executor = CacheExecutor.SINGLE, compute = Compute.ASYNC, values = ReferenceType.STRONG) + executor = CacheExecutor.THREADED, compute = Compute.ASYNC, values = ReferenceType.STRONG) public void refresh(Caffeine builder, CacheContext context) { AtomicBoolean done = new AtomicBoolean(); AsyncLoadingCache cache = builder.buildAsync(key -> { diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/EvictionTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/EvictionTest.java index cf6f922129..805fc4f4ec 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/EvictionTest.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/EvictionTest.java @@ -18,17 +18,18 @@ import static com.github.benmanes.caffeine.cache.testing.CacheWriterVerifier.verifyWriter; import static com.github.benmanes.caffeine.cache.testing.HasRemovalNotifications.hasRemovalNotifications; import static com.github.benmanes.caffeine.cache.testing.HasStats.hasEvictionCount; +import static com.github.benmanes.caffeine.testing.Awaits.await; import static com.github.benmanes.caffeine.testing.IsEmptyMap.emptyMap; import static java.util.Arrays.asList; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.verify; import java.util.Collections; import java.util.HashMap; -import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -148,13 +149,13 @@ public void evict_weighted(Cache> cache, cache.put(1, value1); cache.put(2, value2); cache.put(3, value3); - assertThat(cache.estimatedSize(), is(4L)); + await().until(cache::estimatedSize, is(4L)); assertThat(eviction.weightedSize().getAsLong(), is(10L)); // [0 | 1, 2, 3] -> [0, 4 | 2, 3] cache.put(4, value4); + await().until(cache::estimatedSize, is(4L)); assertThat(cache.asMap().containsKey(1), is(false)); - assertThat(cache.estimatedSize(), is(4L)); assertThat(eviction.weightedSize().getAsLong(), is(8L)); verifyWriter(context, (verifier, ignored) -> { verify(writer).delete(1, value1, RemovalCause.SIZE); @@ -163,7 +164,7 @@ public void evict_weighted(Cache> cache, // [0, 4 | 2, 3] remains (5 exceeds window and has the same usage history, so evicted) cache.put(5, value5); - assertThat(cache.estimatedSize(), is(4L)); + await().until(cache::estimatedSize, is(4L)); assertThat(eviction.weightedSize().getAsLong(), is(8L)); verifyWriter(context, (verifier, ignored) -> { verify(writer).delete(5, value5, RemovalCause.SIZE); @@ -598,17 +599,12 @@ public void coldest_partial(CacheContext context, Eviction evi removalListener = { Listener.DEFAULT, Listener.REJECTING }) public void coldest_order(CacheContext context, Eviction eviction) { int size = context.original().size(); - int main = (int) (BoundedLocalCache.PERCENT_MAIN * context.maximumSize()); - int eden = size - main; - if (eden < context.weigher().weigh(null, null)) { - main += eden; - } - Iterable keys = Iterables.concat( - Iterables.skip(context.original().keySet(), main), - Iterables.limit(ImmutableList.copyOf(context.original().keySet()), main)); + Iterables.skip(context.original().keySet(), size - 1), + Iterables.limit(ImmutableList.copyOf(context.original().keySet()), size - 1)); Set coldest = eviction.coldest(Integer.MAX_VALUE).keySet(); - assertThat(coldest, contains(Iterables.toArray(keys, Integer.class))); + assertThat(coldest, either(contains(Iterables.toArray(keys, Integer.class))) + .or(contains(Iterables.toArray(context.original().keySet(), Integer.class)))); } @Test(dataProvider = "caches") @@ -654,9 +650,15 @@ public void hottest_partial(CacheContext context, Eviction evi initialCapacity = InitialCapacity.EXCESSIVE, maximumSize = Maximum.FULL, removalListener = { Listener.DEFAULT, Listener.REJECTING }) public void hottest_order(CacheContext context, Eviction eviction) { - Map hottest = eviction.hottest(Integer.MAX_VALUE); - Set keys = new LinkedHashSet<>(ImmutableList.copyOf(hottest.keySet()).reverse()); - assertThat(keys, contains(context.original().keySet().toArray(new Integer[0]))); + Set hottest = eviction.hottest(Integer.MAX_VALUE).keySet(); + List coldest = ImmutableList.copyOf(hottest).reverse(); + + int size = context.original().size(); + Iterable keys = Iterables.concat( + Iterables.skip(context.original().keySet(), size - 1), + Iterables.limit(ImmutableList.copyOf(context.original().keySet()), size - 1)); + assertThat(coldest, either(contains(Iterables.toArray(keys, Integer.class))) + .or(contains(Iterables.toArray(context.original().keySet(), Integer.class)))); } @Test(dataProvider = "caches") diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/IsValidBoundedLocalCache.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/IsValidBoundedLocalCache.java index 93a0cfce19..0841733016 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/IsValidBoundedLocalCache.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/IsValidBoundedLocalCache.java @@ -85,8 +85,10 @@ private void checkReadBuffer(BoundedLocalCache cache) { private void checkCache(BoundedLocalCache cache, DescriptionBuilder desc) { desc.expectThat("Inconsistent size", cache.data.size(), is(cache.size())); if (cache.evicts()) { - desc.expectThat("overflow", cache.maximum(), - is(greaterThanOrEqualTo(cache.adjustedWeightedSize()))); + cache.evictionLock.lock(); + long weightedSize = cache.weightedSize(); + cache.evictionLock.unlock(); + desc.expectThat("overflow", cache.maximum(), is(greaterThanOrEqualTo(weightedSize))); } boolean locked = (cache.evictionLock instanceof NonReentrantLock) diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/LoadingCacheTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/LoadingCacheTest.java index 921f965d14..9c8d18b8c9 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/LoadingCacheTest.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/LoadingCacheTest.java @@ -20,8 +20,11 @@ import static com.github.benmanes.caffeine.cache.testing.HasStats.hasLoadFailureCount; import static com.github.benmanes.caffeine.cache.testing.HasStats.hasLoadSuccessCount; import static com.github.benmanes.caffeine.cache.testing.HasStats.hasMissCount; +import static com.github.benmanes.caffeine.testing.Awaits.await; +import static java.util.stream.Collectors.toList; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.both; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -30,10 +33,12 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionException; import java.util.concurrent.ExecutionException; +import java.util.concurrent.atomic.AtomicBoolean; import org.junit.Assert; import org.testng.annotations.Listeners; @@ -43,12 +48,14 @@ import com.github.benmanes.caffeine.cache.testing.CacheProvider; import com.github.benmanes.caffeine.cache.testing.CacheSpec; import com.github.benmanes.caffeine.cache.testing.CacheSpec.CacheExecutor; +import com.github.benmanes.caffeine.cache.testing.CacheSpec.Compute; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Implementation; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Listener; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Loader; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Population; import com.github.benmanes.caffeine.cache.testing.CacheValidationListener; import com.github.benmanes.caffeine.cache.testing.CheckNoWriter; +import com.github.benmanes.caffeine.cache.testing.RemovalNotification; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -249,7 +256,7 @@ public void refresh_null(LoadingCache cache, CacheContext cont @CheckNoWriter @Test(dataProvider = "caches") - @CacheSpec(implementation = Implementation.Caffeine, + @CacheSpec(implementation = Implementation.Caffeine, compute=Compute.SYNC, executor = CacheExecutor.DIRECT, loader = Loader.NULL, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }) public void refresh_remove(LoadingCache cache, CacheContext context) { @@ -282,8 +289,7 @@ public void refresh_absent_null(LoadingCache cache, CacheConte @CheckNoWriter @Test(dataProvider = "caches") - @CacheSpec(executor = CacheExecutor.DIRECT, - removalListener = { Listener.DEFAULT, Listener.REJECTING }) + @CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING }) public void refresh_absent(LoadingCache cache, CacheContext context) { cache.refresh(context.absentKey()); assertThat(cache.estimatedSize(), is(1 + context.initialSize())); @@ -296,8 +302,26 @@ public void refresh_absent(LoadingCache cache, CacheContext co @CheckNoWriter @Test(dataProvider = "caches") - @CacheSpec(executor = CacheExecutor.DIRECT, - population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }) + @CacheSpec(implementation = Implementation.Caffeine, loader = Loader.NULL, + population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }) + public void refresh_present_null(LoadingCache cache, CacheContext context) { + for (Integer key : context.firstMiddleLastKeys()) { + cache.refresh(key); + } + int count = context.firstMiddleLastKeys().size(); + assertThat(context, both(hasMissCount(0)).and(hasHitCount(0))); + assertThat(context, both(hasLoadSuccessCount(0)).and(hasLoadFailureCount(count))); + + for (Integer key : context.firstMiddleLastKeys()) { + assertThat(cache.getIfPresent(key), is(nullValue())); + } + assertThat(cache.estimatedSize(), is(context.initialSize() - count)); + assertThat(cache, hasRemovalNotifications(context, count, RemovalCause.EXPLICIT)); + } + + @CheckNoWriter + @Test(dataProvider = "caches") + @CacheSpec(population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }) public void refresh_present_sameValue( LoadingCache cache, CacheContext context) { for (Integer key : context.firstMiddleLastKeys()) { @@ -316,8 +340,8 @@ public void refresh_present_sameValue( @CheckNoWriter @Test(dataProvider = "caches") - @CacheSpec(executor = CacheExecutor.DIRECT, loader = Loader.IDENTITY, - population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }) + @CacheSpec(loader = Loader.IDENTITY, + population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }) public void refresh_present_differentValue( LoadingCache cache, CacheContext context) { for (Integer key : context.firstMiddleLastKeys()) { @@ -332,6 +356,58 @@ public void refresh_present_differentValue( assertThat(context, both(hasLoadSuccessCount(count)).and(hasLoadFailureCount(0))); } + @Test(dataProvider = "caches") + @CacheSpec(population = Population.EMPTY, + executor = CacheExecutor.THREADED, removalListener = Listener.CONSUMING) + public void refresh_conflict(CacheContext context) { + AtomicBoolean refresh = new AtomicBoolean(); + Integer key = context.absentKey(); + Integer original = 1; + Integer updated = 2; + Integer refreshed = 3; + LoadingCache cache = context.build(k -> { + await().untilTrue(refresh); + return refreshed; + }); + + cache.put(key, original); + cache.refresh(key); + assertThat(cache.asMap().put(key, updated), is(original)); + + refresh.set(true); + await().until(() -> context.consumedNotifications().size(), is(2)); + List removed = context.consumedNotifications().stream() + .map(RemovalNotification::getValue).collect(toList()); + + assertThat(cache.getIfPresent(key), is(updated)); + assertThat(removed, containsInAnyOrder(original, refreshed)); + assertThat(cache, hasRemovalNotifications(context, 2, RemovalCause.REPLACED)); + assertThat(context, both(hasLoadSuccessCount(1)).and(hasLoadFailureCount(0))); + } + + @Test(dataProvider = "caches") + @CacheSpec(population = Population.EMPTY, executor = CacheExecutor.THREADED, + removalListener = Listener.CONSUMING) + public void refresh_invalidate(CacheContext context) { + AtomicBoolean refresh = new AtomicBoolean(); + Integer key = context.absentKey(); + Integer original = 1; + Integer refreshed = 2; + LoadingCache cache = context.build(k -> { + await().untilTrue(refresh); + return refreshed; + }); + + cache.put(key, original); + cache.refresh(key); + cache.invalidate(key); + + refresh.set(true); + await().until(() -> cache.getIfPresent(key), is(refreshed)); + await().until(() -> cache, hasRemovalNotifications(context, 1, RemovalCause.EXPLICIT)); + await().until(() -> context, both(hasLoadSuccessCount(1)).and(hasLoadFailureCount(0))); + } + /* ---------------- CacheLoader -------------- */ @Test(expectedExceptions = UnsupportedOperationException.class) diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/MultiThreadedTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/MultiThreadedTest.java index 72c60dc275..5101d4b8d6 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/MultiThreadedTest.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/MultiThreadedTest.java @@ -55,9 +55,9 @@ public final class MultiThreadedTest { @Test(dataProvider = "caches") @CacheSpec(maximumSize = Maximum.DISABLED, stats = Stats.DISABLED, population = Population.EMPTY, expireAfterAccess = Expire.DISABLED, - expireAfterWrite = Expire.DISABLED, refreshAfterWrite = Expire.DISABLED, - removalListener = Listener.DEFAULT, keys = ReferenceType.STRONG, - values = ReferenceType.STRONG, writer = Writer.DISABLED) + expireAfterWrite = Expire.DISABLED, removalListener = Listener.DEFAULT, + refreshAfterWrite = { Expire.DISABLED, Expire.ONE_MILLISECOND }, + keys = ReferenceType.STRONG, values = ReferenceType.STRONG, writer = Writer.DISABLED) public void concurrent_unbounded(LoadingCache cache, CacheContext context) { Threads.runTest(cache, operations); } @@ -65,9 +65,9 @@ public void concurrent_unbounded(LoadingCache cache, CacheCont @Test(dataProvider = "caches") @CacheSpec(maximumSize = Maximum.FULL, weigher = {CacheWeigher.DEFAULT, CacheWeigher.RANDOM}, stats = Stats.DISABLED, population = Population.EMPTY, expireAfterAccess = Expire.FOREVER, - expireAfterWrite = Expire.FOREVER, refreshAfterWrite = Expire.DISABLED, - removalListener = Listener.DEFAULT, keys = ReferenceType.STRONG, - values = ReferenceType.STRONG, writer = Writer.DISABLED) + removalListener = Listener.DEFAULT, expireAfterWrite = Expire.FOREVER, + refreshAfterWrite = { Expire.DISABLED, Expire.ONE_MILLISECOND }, + keys = ReferenceType.STRONG, values = ReferenceType.STRONG, writer = Writer.DISABLED) public void concurrent_bounded(LoadingCache cache, CacheContext context) { Threads.runTest(cache, operations); } @@ -75,9 +75,9 @@ public void concurrent_bounded(LoadingCache cache, CacheContex @Test(dataProvider = "caches") @CacheSpec(maximumSize = Maximum.DISABLED, stats = Stats.DISABLED, population = Population.EMPTY, expireAfterAccess = Expire.DISABLED, - expireAfterWrite = Expire.DISABLED, refreshAfterWrite = Expire.DISABLED, - removalListener = Listener.DEFAULT, keys = ReferenceType.STRONG, - values = ReferenceType.STRONG, writer = Writer.DISABLED) + expireAfterWrite = Expire.DISABLED, removalListener = Listener.DEFAULT, + refreshAfterWrite = { Expire.DISABLED, Expire.ONE_MILLISECOND }, + keys = ReferenceType.STRONG, values = ReferenceType.STRONG, writer = Writer.DISABLED) public void async_concurrent_unbounded( AsyncLoadingCache cache, CacheContext context) { Threads.runTest(cache, asyncOperations); @@ -86,9 +86,9 @@ public void async_concurrent_unbounded( @Test(dataProvider = "caches") @CacheSpec(maximumSize = Maximum.FULL, weigher = {CacheWeigher.DEFAULT, CacheWeigher.RANDOM}, stats = Stats.DISABLED, population = Population.EMPTY, expireAfterAccess = Expire.FOREVER, - expireAfterWrite = Expire.FOREVER, refreshAfterWrite = Expire.DISABLED, - removalListener = Listener.DEFAULT, keys = ReferenceType.STRONG, - values = ReferenceType.STRONG, writer = Writer.DISABLED) + expireAfterWrite = Expire.FOREVER, removalListener = Listener.DEFAULT, + refreshAfterWrite = { Expire.DISABLED, Expire.ONE_MILLISECOND }, + keys = ReferenceType.STRONG, values = ReferenceType.STRONG, writer = Writer.DISABLED) public void async_concurrent_bounded( AsyncLoadingCache cache, CacheContext context) { Threads.runTest(cache, asyncOperations); diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/RefreshAfterWriteTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/RefreshAfterWriteTest.java index be27f04e08..d405fff90d 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/RefreshAfterWriteTest.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/RefreshAfterWriteTest.java @@ -16,11 +16,16 @@ package com.github.benmanes.caffeine.cache; import static com.github.benmanes.caffeine.cache.testing.HasRemovalNotifications.hasRemovalNotifications; +import static com.github.benmanes.caffeine.cache.testing.HasStats.hasLoadFailureCount; +import static com.github.benmanes.caffeine.cache.testing.HasStats.hasLoadSuccessCount; import static com.github.benmanes.caffeine.testing.Awaits.await; import static com.github.benmanes.caffeine.testing.IsEmptyMap.emptyMap; import static com.github.benmanes.caffeine.testing.IsFutureValue.futureOf; +import static java.util.stream.Collectors.toList; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.both; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -54,6 +59,7 @@ import com.github.benmanes.caffeine.cache.testing.CacheValidationListener; import com.github.benmanes.caffeine.cache.testing.CheckNoWriter; import com.github.benmanes.caffeine.cache.testing.RefreshAfterWrite; +import com.github.benmanes.caffeine.cache.testing.RemovalNotification; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -66,6 +72,8 @@ @Test(dataProviderClass = CacheProvider.class) public final class RefreshAfterWriteTest { + /* ---------------- getIfPresent -------------- */ + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(refreshAfterWrite = Expire.ONE_MINUTE, loader = Loader.NEGATIVE, @@ -94,6 +102,8 @@ public void getIfPresent(AsyncLoadingCache cache, CacheContext assertThat(cache, hasRemovalNotifications(context, 1, RemovalCause.REPLACED)); } + /* ---------------- getAllPresent -------------- */ + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(refreshAfterWrite = Expire.ONE_MINUTE, @@ -109,11 +119,13 @@ public void getAllPresent(LoadingCache cache, CacheContext con assertThat(cache, hasRemovalNotifications(context, count, RemovalCause.REPLACED)); } + /* ---------------- getFunc -------------- */ + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(refreshAfterWrite = Expire.ONE_MINUTE, population = { Population.PARTIAL, Population.FULL }) - public void get_mappingFun(LoadingCache cache, CacheContext context) { + public void getFunc(LoadingCache cache, CacheContext context) { Function mappingFunction = context.original()::get; context.ticker().advance(30, TimeUnit.SECONDS); cache.get(context.firstKey(), mappingFunction); @@ -128,7 +140,7 @@ public void get_mappingFun(LoadingCache cache, CacheContext co @Test(dataProvider = "caches") @CacheSpec(refreshAfterWrite = Expire.ONE_MINUTE, population = { Population.PARTIAL, Population.FULL }) - public void get_mappingFun(AsyncLoadingCache cache, CacheContext context) { + public void getFunc(AsyncLoadingCache cache, CacheContext context) { Function mappingFunction = context.original()::get; context.ticker().advance(30, TimeUnit.SECONDS); cache.get(context.firstKey(), mappingFunction); @@ -139,6 +151,8 @@ public void get_mappingFun(AsyncLoadingCache cache, CacheConte assertThat(cache, hasRemovalNotifications(context, 1, RemovalCause.REPLACED)); } + /* ---------------- get -------------- */ + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(refreshAfterWrite = Expire.ONE_MINUTE, @@ -169,7 +183,7 @@ public void get(AsyncLoadingCache cache, CacheContext context) @Test(dataProvider = "caches") @CacheSpec(implementation = Implementation.Caffeine, population = Population.EMPTY, - refreshAfterWrite = Expire.ONE_MINUTE, executor = CacheExecutor.SINGLE, + refreshAfterWrite = Expire.ONE_MINUTE, executor = CacheExecutor.THREADED, compute = Compute.ASYNC, values = ReferenceType.STRONG) public void get_sameFuture(Caffeine builder, CacheContext context) { AtomicBoolean done = new AtomicBoolean(); @@ -199,6 +213,8 @@ public void get_null(AsyncLoadingCache cache, CacheContext con await().until(() -> cache.synchronous().getIfPresent(key), is(nullValue())); } + /* ---------------- getAll -------------- */ + @CheckNoWriter @Test(dataProvider = "caches") @CacheSpec(refreshAfterWrite = Expire.ONE_MINUTE, loader = Loader.IDENTITY, @@ -239,6 +255,66 @@ public void getAll(AsyncLoadingCache cache, CacheContext conte assertThat(cache, hasRemovalNotifications(context, 1, RemovalCause.REPLACED)); } + /* ---------------- put -------------- */ + + @Test(dataProvider = "caches") + @CacheSpec(population = Population.EMPTY, refreshAfterWrite = Expire.ONE_MINUTE, + executor = CacheExecutor.THREADED, removalListener = Listener.CONSUMING) + public void put(CacheContext context) { + AtomicBoolean refresh = new AtomicBoolean(); + Integer key = context.absentKey(); + Integer original = 1; + Integer updated = 2; + Integer refreshed = 3; + LoadingCache cache = context.build(k -> { + await().untilTrue(refresh); + return refreshed; + }); + + cache.put(key, original); + context.ticker().advance(2, TimeUnit.MINUTES); + assertThat(cache.getIfPresent(key), is(original)); + + assertThat(cache.asMap().put(key, updated), is(original)); + refresh.set(true); + + await().until(() -> context.consumedNotifications().size(), is(2)); + List removed = context.consumedNotifications().stream() + .map(RemovalNotification::getValue).collect(toList()); + + assertThat(cache.getIfPresent(key), is(updated)); + assertThat(removed, containsInAnyOrder(original, refreshed)); + assertThat(cache, hasRemovalNotifications(context, 2, RemovalCause.REPLACED)); + assertThat(context, both(hasLoadSuccessCount(1)).and(hasLoadFailureCount(0))); + } + + /* ---------------- invalidate -------------- */ + + @Test(dataProvider = "caches") + @CacheSpec(population = Population.EMPTY, refreshAfterWrite = Expire.ONE_MINUTE, + executor = CacheExecutor.THREADED, removalListener = Listener.CONSUMING) + public void invalidate(CacheContext context) { + AtomicBoolean refresh = new AtomicBoolean(); + Integer key = context.absentKey(); + Integer original = 1; + Integer refreshed = 2; + LoadingCache cache = context.build(k -> { + await().untilTrue(refresh); + return refreshed; + }); + + cache.put(key, original); + context.ticker().advance(2, TimeUnit.MINUTES); + assertThat(cache.getIfPresent(key), is(original)); + + cache.invalidate(key); + refresh.set(true); + + await().until(() -> cache.getIfPresent(key), is(refreshed)); + assertThat(cache, hasRemovalNotifications(context, 1, RemovalCause.EXPLICIT)); + assertThat(context, both(hasLoadSuccessCount(1)).and(hasLoadFailureCount(0))); + } + /* ---------------- Policy -------------- */ @Test(dataProvider = "caches") diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheContext.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheContext.java index 0a39e5e3c7..a375a6417d 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheContext.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheContext.java @@ -32,8 +32,10 @@ import com.github.benmanes.caffeine.cache.AsyncLoadingCache; import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.CacheLoader; import com.github.benmanes.caffeine.cache.CacheWriter; import com.github.benmanes.caffeine.cache.Caffeine; +import com.github.benmanes.caffeine.cache.LoadingCache; import com.github.benmanes.caffeine.cache.RemovalListener; import com.github.benmanes.caffeine.cache.stats.CacheStats; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Advance; @@ -50,8 +52,11 @@ import com.github.benmanes.caffeine.cache.testing.CacheSpec.ReferenceType; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Stats; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Writer; +import com.github.benmanes.caffeine.cache.testing.GuavaCacheFromContext.GuavaLoadingCache; +import com.github.benmanes.caffeine.cache.testing.GuavaCacheFromContext.SingleLoader; import com.github.benmanes.caffeine.cache.testing.RemovalListeners.ConsumingRemovalListener; import com.google.common.base.MoreObjects; +import com.google.common.cache.CacheBuilder; import com.google.common.collect.ImmutableSet; /** @@ -86,7 +91,8 @@ public final class CacheContext { final boolean isAsyncLoading; Cache cache; - Caffeine builder; + Caffeine caffeine; + CacheBuilder guava; AsyncLoadingCache asyncCache; @Nullable Integer firstKey; @@ -361,6 +367,20 @@ public FakeTicker ticker() { return ticker; } + public LoadingCache build(CacheLoader loader) { + LoadingCache cache = null; + if (isCaffeine()) { + cache = isAsync() ? caffeine.buildAsync(loader).synchronous() : caffeine.build(loader); + } else { + cache = new GuavaLoadingCache<>(guava.build( + com.google.common.cache.CacheLoader.asyncReloading( + new SingleLoader<>(loader), executor.delegate())), + ticker(), isRecordingStats()); + } + this.cache = cache; + return cache; + } + public Implementation implementation() { return implementation; } diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheProvider.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheProvider.java index 1d5116de7d..f2613ab581 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheProvider.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheProvider.java @@ -97,7 +97,7 @@ private static Iterator asTestCases(Method testMethod, if (clazz.isAssignableFrom(CacheContext.class)) { params[i] = context; } else if (clazz.isAssignableFrom(Caffeine.class)) { - params[i] = context.builder; + params[i] = context.caffeine; } else if (clazz.isAssignableFrom(cache.getClass())) { params[i] = cache; } else if (clazz.isAssignableFrom(AsyncLoadingCache.class)) { diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheSpec.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheSpec.java index 7c84d022c5..0ab55ee1fa 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheSpec.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheSpec.java @@ -29,6 +29,7 @@ import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; @@ -252,6 +253,8 @@ enum Expire { DISABLED(Long.MIN_VALUE), /** A configuration where entries are evicted immediately. */ IMMEDIATELY(0L), + /** A configuration where entries are evicted almost immediately. */ + ONE_MILLISECOND(TimeUnit.MILLISECONDS.toNanos(1L)), /** A configuration that holds a single entry. */ ONE_MINUTE(TimeUnit.MINUTES.toNanos(1L)), /** A configuration that holds the {@link Population#FULL} count. */ @@ -535,6 +538,9 @@ enum ExecutorFailure { EXPECTED, DISALLOWED, IGNORED } + ExecutorService cachedExecutorService = Executors.newCachedThreadPool( + new ThreadFactoryBuilder().setDaemon(true).build()); + /** The executors that the cache can be configured with. */ enum CacheExecutor implements Supplier { DEFAULT { // fork-join common pool @@ -549,11 +555,9 @@ enum CacheExecutor implements Supplier { return new TrackingExecutor(MoreExecutors.newDirectExecutorService()); } }, - SINGLE { + THREADED { @Override public TrackingExecutor get() { - // Isolated to the test execution - may be shutdown by test to assert completion - return new TrackingExecutor(Executors.newSingleThreadExecutor( - new ThreadFactoryBuilder().setDaemon(true).build())); + return new TrackingExecutor(cachedExecutorService); } }, REJECTING { diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheValidationListener.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheValidationListener.java index 28ef41e788..08f0e3d769 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheValidationListener.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheValidationListener.java @@ -56,21 +56,30 @@ public void beforeInvocation(IInvokedMethod method, ITestResult testResult) {} public void afterInvocation(IInvokedMethod method, ITestResult testResult) { try { if (testResult.isSuccess()) { + boolean foundCache = false; CacheContext context = null; for (Object param : testResult.getParameters()) { if (param instanceof Cache) { + foundCache = true; assertThat((Cache) param, is(validCache())); } else if (param instanceof AsyncLoadingCache) { + foundCache = true; assertThat((AsyncLoadingCache) param, is(validAsyncCache())); } else if (param instanceof Map) { + foundCache = true; assertThat((Map) param, is(validAsMap())); } else if (param instanceof CacheContext) { context = (CacheContext) param; } } - checkWriter(testResult, context); - checkNoStats(testResult, context); - checkExecutor(testResult, context); + if (context != null) { + if (!foundCache) { + assertThat(context.cache, is(validCache())); + } + checkWriter(testResult, context); + checkNoStats(testResult, context); + checkExecutor(testResult, context); + } } else { testResult.setThrowable(new AssertionError(getTestName(method), testResult.getThrowable())); } diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CaffeineCacheFromContext.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CaffeineCacheFromContext.java index 09bc6b64c9..c8c3c9c45e 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CaffeineCacheFromContext.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CaffeineCacheFromContext.java @@ -39,7 +39,7 @@ private CaffeineCacheFromContext() {} public static Cache newCaffeineCache(CacheContext context) { Caffeine builder = Caffeine.newBuilder(); - context.builder = builder; + context.caffeine = builder; if (context.initialCapacity != InitialCapacity.DEFAULT) { builder.initialCapacity(context.initialCapacity.size()); diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/GuavaCacheFromContext.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/GuavaCacheFromContext.java index f839412a83..623880c789 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/GuavaCacheFromContext.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/GuavaCacheFromContext.java @@ -68,8 +68,9 @@ public static Cache newGuavaCache(CacheContext context) { checkState(!context.isAsync(), "Guava caches are synchronous only"); CacheBuilder builder = CacheBuilder.newBuilder(); - builder.concurrencyLevel(1); + context.guava = builder; + builder.concurrencyLevel(1); if (context.initialCapacity != InitialCapacity.DEFAULT) { builder.initialCapacity(context.initialCapacity.size()); } diff --git a/config/pmd/rulesSets.xml b/config/pmd/rulesSets.xml index e63f102581..62e771a111 100644 --- a/config/pmd/rulesSets.xml +++ b/config/pmd/rulesSets.xml @@ -69,6 +69,7 @@ + diff --git a/gradle/dependencies.gradle b/gradle/dependencies.gradle index b86484dba8..ddb440f4df 100644 --- a/gradle/dependencies.gradle +++ b/gradle/dependencies.gradle @@ -46,13 +46,13 @@ ext { jcache_tck: '1.0.1', jctools: '1.2', junit: '4.12', - mockito: '2.0.48-beta', - pax_exam: '4.8.0', + mockito: '2.0.52-beta', + pax_exam: '4.9.0', testng: '6.9.11', truth: '0.24', ] benchmark_versions = [ - cache2k: '0.24-BETA', + cache2k: '0.25-BETA', concurrentlinkedhashmap: '1.4.2', ehcache2: '2.10.2.1.7', ehcache3: '3.0.0', diff --git a/simulator/src/main/java/com/github/benmanes/caffeine/cache/simulator/policy/product/Cache2kPolicy.java b/simulator/src/main/java/com/github/benmanes/caffeine/cache/simulator/policy/product/Cache2kPolicy.java index 5d6e81e2d4..13e6bcde31 100644 --- a/simulator/src/main/java/com/github/benmanes/caffeine/cache/simulator/policy/product/Cache2kPolicy.java +++ b/simulator/src/main/java/com/github/benmanes/caffeine/cache/simulator/policy/product/Cache2kPolicy.java @@ -22,12 +22,7 @@ import java.util.logging.Logger; import org.cache2k.Cache; -import org.cache2k.CacheBuilder; -import org.cache2k.impl.ArcCache; -import org.cache2k.impl.ClockCache; -import org.cache2k.impl.ClockProPlusCache; -import org.cache2k.impl.LruCache; -import org.cache2k.impl.RandomCache; +import org.cache2k.Cache2kBuilder; import com.github.benmanes.caffeine.cache.simulator.BasicSettings; import com.github.benmanes.caffeine.cache.simulator.policy.Policy; @@ -55,9 +50,8 @@ public Cache2kPolicy(Config config) { System.setErr(new PrintStream(ByteStreams.nullOutputStream())); try { this.policyStats = new PolicyStats("product.Cache2k"); - Cache2kSettings settings = new Cache2kSettings(config); - cache = CacheBuilder.newCache(Object.class, Object.class) - .implementation(settings.implementation()) + BasicSettings settings = new BasicSettings(config); + cache = Cache2kBuilder.of(Object.class, Object.class) .entryCapacity(settings.maximumSize()) .eternal(true) .build(); @@ -91,27 +85,4 @@ public void record(long key) { public PolicyStats stats() { return policyStats; } - - static final class Cache2kSettings extends BasicSettings { - public Cache2kSettings(Config config) { - super(config); - } - public Class implementation() { - String policy = config().getString("cache2k.policy").toLowerCase(); - switch (policy) { - case "arc": - return ArcCache.class; - case "clock": - return ClockCache.class; - case "clockpro": - return ClockProPlusCache.class; - case "lru": - return LruCache.class; - case "random": - return RandomCache.class; - default: - throw new IllegalArgumentException("Unknown policy type: " + policy); - } - } - } } diff --git a/simulator/src/main/resources/reference.conf b/simulator/src/main/resources/reference.conf index 6b095ff7df..78925a737e 100644 --- a/simulator/src/main/resources/reference.conf +++ b/simulator/src/main/resources/reference.conf @@ -215,11 +215,6 @@ caffeine.simulator { percent-fast-path = "0.0" # "0.05" is reasonable } - cache2k { - # Policies: Arc, Clock, ClockPro, Lru, Random - policy = "clockpro" - } - ehcache2 { # Policies: Lru, Lfu, Fifo, Clock policy = "lru"