Skip to content

Commit

Permalink
consistant behavior for null lookups to into unmodifiable maps (fixes #…
Browse files Browse the repository at this point in the history
…864)

While a null lookup is not allowed for cache the operations (e.g. asMap.get), if
an operation returned a map then this might not be consistant. Originally all of
these cases used Collections' emptyMap() or unmodifiableMap(m), but a few cases
switched to Map's of() or copyOf() alternatives. The former allows for null
queries whereas the latter does not, and this could vary depending on if the
result was populated. For consistency and retaining the original behavior, this
is restored to the Collections' methods. Further, the empty map is now wrapped
as unmodifiable due to the inconsistency otherwise with Map.clear()'s behavior.
  • Loading branch information
ben-manes committed Feb 2, 2023
1 parent 6fc12bb commit 3b36377
Show file tree
Hide file tree
Showing 11 changed files with 141 additions and 25 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
Expand Down Expand Up @@ -1661,8 +1662,7 @@ void rescheduleCleanUpIfIncomplete() {
}

// If a scheduler was configured then the maintenance can be deferred onto the custom executor
// to be run in the near future. This is only used if there is no scheduled set, else the next
// run depends on other activity to trigger it.
// and run in the near future. Otherwise, it will be handled due to other cache activity.
var pacer = pacer();
if ((pacer != null) && !pacer.isScheduled() && evictionLock.tryLock()) {
try {
Expand Down Expand Up @@ -3985,7 +3985,7 @@ static final class BoundedPolicy<K, V> implements Policy<K, V> {
@Override public Map<K, CompletableFuture<V>> refreshes() {
var refreshes = cache.refreshes;
if ((refreshes == null) || refreshes.isEmpty()) {
return Map.of();
return Collections.unmodifiableMap(Collections.emptyMap());
} else if (cache.collectKeys()) {
var inFlight = new IdentityHashMap<K, CompletableFuture<V>>(refreshes.size());
for (var entry : refreshes.entrySet()) {
Expand All @@ -4001,7 +4001,7 @@ static final class BoundedPolicy<K, V> implements Policy<K, V> {
}
@SuppressWarnings("unchecked")
var castedRefreshes = (Map<K, CompletableFuture<V>>) (Object) refreshes;
return Map.copyOf(castedRefreshes);
return Collections.unmodifiableMap(new HashMap<>(castedRefreshes));
}
@Override public Optional<Eviction<K, V>> eviction() {
return cache.evicts()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ default CompletableFuture<Map<K, V>> getAll(Iterable<? extends K> keys,
*/
static <K, V> CompletableFuture<Map<K, V>> composeResult(Map<K, CompletableFuture<V>> futures) {
if (futures.isEmpty()) {
return CompletableFuture.completedFuture(Map.of());
return CompletableFuture.completedFuture(Collections.unmodifiableMap(Collections.emptyMap()));
}
@SuppressWarnings("rawtypes")
CompletableFuture<?>[] array = futures.values().toArray(new CompletableFuture[0]);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
import java.util.AbstractSet;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
Expand Down Expand Up @@ -1068,12 +1069,12 @@ static final class UnboundedPolicy<K, V> implements Policy<K, V> {
}
@Override public Map<K, CompletableFuture<V>> refreshes() {
var refreshes = cache.refreshes;
if (refreshes == null) {
return Map.of();
if ((refreshes == null) || refreshes.isEmpty()) {
return Collections.unmodifiableMap(Collections.emptyMap());
}
@SuppressWarnings("unchecked")
var castedRefreshes = (Map<K, CompletableFuture<V>>) (Object) refreshes;
return Map.copyOf(castedRefreshes);
return Collections.unmodifiableMap(new HashMap<>(castedRefreshes));
}
@Override public Optional<Eviction<K, V>> eviction() {
return Optional.empty();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import static com.github.benmanes.caffeine.testing.Awaits.await;
import static com.github.benmanes.caffeine.testing.CollectionSubject.assertThat;
import static com.github.benmanes.caffeine.testing.FutureSubject.assertThat;
import static com.github.benmanes.caffeine.testing.IntSubject.assertThat;
import static com.github.benmanes.caffeine.testing.MapSubject.assertThat;
import static com.google.common.collect.ImmutableMap.toImmutableMap;
import static com.google.common.truth.Truth.assertThat;
Expand Down Expand Up @@ -64,6 +65,7 @@
import com.github.benmanes.caffeine.testing.Int;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import com.google.common.primitives.Ints;

/**
Expand Down Expand Up @@ -373,6 +375,16 @@ public void getAllFunction_iterable_empty(AsyncCache<Int, Int> cache, CacheConte
assertThat(result).isExhaustivelyEmpty();
}

@Test(dataProvider = "caches")
@CacheSpec(removalListener = { Listener.DISABLED, Listener.REJECTING })
public void getAllFunction_nullLookup(AsyncCache<Int, Int> cache, CacheContext context) {
var result = cache.getAll(context.firstMiddleLastKeys(),
keys -> Maps.asMap(keys, Int::negate)).join();
assertThat(result.containsValue(null)).isFalse();
assertThat(result.containsKey(null)).isFalse();
assertThat(result.get(null)).isNull();
}

@CacheSpec
@Test(dataProvider = "caches")
public void getAllFunction_immutable_keys(AsyncCache<Int, Int> cache, CacheContext context) {
Expand All @@ -387,7 +399,7 @@ public void getAllFunction_immutable_keys(AsyncCache<Int, Int> cache, CacheConte
@CacheSpec
@Test(dataProvider = "caches", expectedExceptions = UnsupportedOperationException.class)
public void getAllFunction_immutable_result(AsyncCache<Int, Int> cache, CacheContext context) {
var result = cache.getAll(context.absentKeys(),
var result = cache.getAll(context.firstMiddleLastKeys(),
keys -> keys.stream().collect(toImmutableMap(identity(), identity()))).join();
result.clear();
}
Expand Down Expand Up @@ -592,6 +604,16 @@ public void getAllBifunction_iterable_empty(AsyncCache<Int, Int> cache, CacheCon
assertThat(result).isExhaustivelyEmpty();
}

@Test(dataProvider = "caches")
@CacheSpec(removalListener = { Listener.DISABLED, Listener.REJECTING })
public void getAllBiFunction_nullLookup(AsyncCache<Int, Int> cache, CacheContext context) {
var result = cache.getAll(context.firstMiddleLastKeys(), (keys, executor) ->
CompletableFuture.completedFuture(Maps.asMap(keys, Int::negate))).join();
assertThat(result.containsValue(null)).isFalse();
assertThat(result.containsKey(null)).isFalse();
assertThat(result.get(null)).isNull();
}

@CacheSpec
@Test(dataProvider = "caches", expectedExceptions = UnsupportedOperationException.class)
public void getAllBifunction_immutable_keys(AsyncCache<Int, Int> cache, CacheContext context) {
Expand All @@ -604,7 +626,7 @@ public void getAllBifunction_immutable_keys(AsyncCache<Int, Int> cache, CacheCon
@CacheSpec
@Test(dataProvider = "caches", expectedExceptions = UnsupportedOperationException.class)
public void getAllBifunction_immutable_result(AsyncCache<Int, Int> cache, CacheContext context) {
var result = cache.getAll(context.absentKeys(), (keys, executor) -> {
var result = cache.getAll(context.firstMiddleLastKeys(), (keys, executor) -> {
return CompletableFuture.completedFuture(
keys.stream().collect(toImmutableMap(identity(), identity())));
}).join();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import static com.github.benmanes.caffeine.testing.Awaits.await;
import static com.github.benmanes.caffeine.testing.CollectionSubject.assertThat;
import static com.github.benmanes.caffeine.testing.FutureSubject.assertThat;
import static com.github.benmanes.caffeine.testing.IntSubject.assertThat;
import static com.github.benmanes.caffeine.testing.MapSubject.assertThat;
import static com.google.common.collect.ImmutableMap.toImmutableMap;
import static com.google.common.truth.Truth.assertThat;
Expand Down Expand Up @@ -204,7 +205,16 @@ public void getAll_immutable_keys_asyncLoader(
@CacheSpec
@Test(dataProvider = "caches", expectedExceptions = UnsupportedOperationException.class)
public void getAll_immutable_result(AsyncLoadingCache<Int, Int> cache, CacheContext context) {
cache.getAll(context.absentKeys()).join().clear();
cache.getAll(context.firstMiddleLastKeys()).join().clear();
}

@CacheSpec
@Test(dataProvider = "caches")
public void getAll_nullLookup(AsyncLoadingCache<Int, Int> cache, CacheContext context) {
var result = cache.getAll(context.firstMiddleLastKeys()).join();
assertThat(result.containsValue(null)).isFalse();
assertThat(result.containsKey(null)).isFalse();
assertThat(result.get(null)).isNull();
}

@CacheSpec(loader = Loader.BULK_NULL)
Expand Down Expand Up @@ -334,7 +344,7 @@ public void getAll_duplicates(AsyncLoadingCache<Int, Int> cache, CacheContext co
@CacheSpec(loader = { Loader.NEGATIVE, Loader.BULK_NEGATIVE },
population = { Population.SINGLETON, Population.PARTIAL, Population.FULL },
removalListener = { Listener.DISABLED, Listener.REJECTING })
public void getAllPresent_ordered_absent(
public void getAll_present_ordered_absent(
AsyncLoadingCache<Int, Int> cache, CacheContext context) {
var keys = new ArrayList<>(context.absentKeys());
Collections.shuffle(keys);
Expand All @@ -347,7 +357,7 @@ public void getAllPresent_ordered_absent(
@CacheSpec(loader = { Loader.NEGATIVE, Loader.BULK_NEGATIVE },
population = { Population.SINGLETON, Population.PARTIAL },
removalListener = { Listener.DISABLED, Listener.REJECTING })
public void getAllPresent_ordered_partial(
public void getAll_present_ordered_partial(
AsyncLoadingCache<Int, Int> cache, CacheContext context) {
var keys = new ArrayList<>(context.original().keySet());
keys.addAll(context.absentKeys());
Expand All @@ -361,7 +371,7 @@ public void getAllPresent_ordered_partial(
@CacheSpec(loader = { Loader.EXCEPTIONAL, Loader.BULK_NEGATIVE_EXCEEDS },
population = { Population.SINGLETON, Population.PARTIAL, Population.FULL },
removalListener = { Listener.DISABLED, Listener.REJECTING })
public void getAllPresent_ordered_present(
public void getAll_present_ordered_present(
AsyncLoadingCache<Int, Int> cache, CacheContext context) {
var keys = new ArrayList<>(context.original().keySet());
Collections.shuffle(keys);
Expand All @@ -373,7 +383,7 @@ public void getAllPresent_ordered_present(
@Test(dataProvider = "caches")
@CacheSpec(loader = Loader.BULK_NEGATIVE_EXCEEDS,
removalListener = { Listener.DISABLED, Listener.REJECTING })
public void getAllPresent_ordered_exceeds(
public void getAll_present_ordered_exceeds(
AsyncLoadingCache<Int, Int> cache, CacheContext context) {
var keys = new ArrayList<>(context.original().keySet());
keys.addAll(context.absentKeys());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,16 @@ public void getAllPresent_absent(Cache<Int, Int> cache, CacheContext context) {
@CacheSpec
@Test(dataProvider = "caches", expectedExceptions = UnsupportedOperationException.class)
public void getAllPresent_immutable(Cache<Int, Int> cache, CacheContext context) {
cache.getAllPresent(context.absentKeys()).clear();
cache.getAllPresent(context.firstMiddleLastKeys()).clear();
}

@Test(dataProvider = "caches")
@CacheSpec(removalListener = { Listener.DISABLED, Listener.REJECTING })
public void getAllPresent_nullLookup(Cache<Int, Int> cache, CacheContext context) {
var result = cache.getAllPresent(context.firstMiddleLastKeys());
assertThat(result.containsValue(null)).isFalse();
assertThat(result.containsKey(null)).isFalse();
assertThat(result.get(null)).isNull();
}

@Test(dataProvider = "caches")
Expand Down Expand Up @@ -364,10 +373,19 @@ public void getAll_immutable_keys(Cache<Int, Int> cache, CacheContext context) {
@CacheSpec
@Test(dataProvider = "caches", expectedExceptions = UnsupportedOperationException.class)
public void getAll_immutable_result(Cache<Int, Int> cache, CacheContext context) {
var result = cache.getAll(context.absentKeys(), bulkMappingFunction());
var result = cache.getAll(context.firstMiddleLastKeys(), bulkMappingFunction());
result.clear();
}

@CacheSpec
@Test(dataProvider = "caches")
public void getAll_nullLookup(Cache<Int, Int> cache, CacheContext context) {
var result = cache.getAll(context.firstMiddleLastKeys(), bulkMappingFunction());
assertThat(result.containsValue(null)).isFalse();
assertThat(result.containsKey(null)).isFalse();
assertThat(result.get(null)).isNull();
}

@CacheSpec
@Test(dataProvider = "caches", expectedExceptions = IllegalStateException.class)
public void getAll_absent_throwsException(Cache<Int, Int> cache, CacheContext context) {
Expand Down Expand Up @@ -965,6 +983,14 @@ public void refreshes_unmodifiable(Cache<Int, Int> cache, CacheContext context)
cache.policy().refreshes().clear();
}

@CacheSpec
@Test(dataProvider = "caches")
public void refreshes_nullLookup(Cache<Int, Int> cache, CacheContext context) {
assertThat(cache.policy().refreshes().containsValue(null)).isFalse();
assertThat(cache.policy().refreshes().containsKey(null)).isFalse();
assertThat(cache.policy().refreshes().get(null)).isNull();
}

/* --------------- Policy: CacheEntry --------------- */

@Test(expectedExceptions = UnsupportedOperationException.class)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,16 @@ public void getAll_immutable_keys(LoadingCache<Int, Int> cache, CacheContext con
@CheckNoEvictions
@Test(dataProvider = "caches", expectedExceptions = UnsupportedOperationException.class)
public void getAll_immutable_result(LoadingCache<Int, Int> cache, CacheContext context) {
cache.getAll(context.absentKeys()).clear();
cache.getAll(context.firstMiddleLastKeys()).clear();
}

@CacheSpec
@Test(dataProvider = "caches")
public void getAll_nullLookup(LoadingCache<Int, Int> cache, CacheContext context) {
var result = cache.getAll(context.firstMiddleLastKeys());
assertThat(result.containsValue(null)).isFalse();
assertThat(result.containsKey(null)).isFalse();
assertThat(result.get(null)).isNull();
}

@CheckNoEvictions
Expand Down Expand Up @@ -967,6 +976,23 @@ public void refreshAll_nullKey(LoadingCache<Int, Int> cache, CacheContext contex
cache.refreshAll(Collections.singletonList(null));
}

@CacheSpec
@CheckNoEvictions
@Test(dataProvider = "caches")
public void refreshAll_nullLookup(LoadingCache<Int, Int> cache, CacheContext context) {
var result = cache.refreshAll(context.firstMiddleLastKeys()).join();
assertThat(result.containsValue(null)).isFalse();
assertThat(result.containsKey(null)).isFalse();
assertThat(result.get(null)).isNull();
}

@CacheSpec
@CheckNoEvictions
@Test(dataProvider = "caches", expectedExceptions = UnsupportedOperationException.class)
public void refreshAll_immutable(LoadingCache<Int, Int> cache, CacheContext context) {
cache.refreshAll(context.firstMiddleLastKeys()).join().clear();
}

@CheckNoEvictions
@Test(dataProvider = "caches")
@CacheSpec(removalListener = { Listener.DISABLED, Listener.REJECTING })
Expand Down Expand Up @@ -1170,4 +1196,17 @@ public void refreshes(LoadingCache<Int, Int> cache, CacheContext context) {
future2.cancel(true);
assertThat(cache.policy().refreshes()).isExhaustivelyEmpty();
}

@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine, loader = Loader.ASYNC_INCOMPLETE)
public void refreshes_nullLookup(LoadingCache<Int, Int> cache, CacheContext context) {
cache.refreshAll(context.absentKeys());
assertThat(cache.policy().refreshes().get(null)).isNull();
assertThat(cache.policy().refreshes().containsKey(null)).isFalse();
assertThat(cache.policy().refreshes().containsValue(null)).isFalse();

for (var future : cache.policy().refreshes().values()) {
future.cancel(true);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -857,6 +857,21 @@ public void refreshes(LoadingCache<Int, Int> cache, CacheContext context) {
assertThat(cache).containsEntry(context.firstKey(), Int.MAX_VALUE);
}

@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine, loader = Loader.ASYNC_INCOMPLETE,
refreshAfterWrite = Expire.ONE_MINUTE, population = Population.FULL)
public void refreshes_nullLookup(LoadingCache<Int, Int> cache, CacheContext context) {
context.ticker().advance(2, TimeUnit.MINUTES);
cache.getIfPresent(context.firstKey());
var future = cache.policy().refreshes().get(context.firstKey());

assertThat(cache.policy().refreshes().get(null)).isNull();
assertThat(cache.policy().refreshes().containsKey(null)).isFalse();
assertThat(cache.policy().refreshes().containsValue(null)).isFalse();

future.cancel(true);
}

/* --------------- Policy: refreshAfterWrite --------------- */

@Test(dataProvider = "caches")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,9 @@ public Int lastKey() {
}

public ImmutableSet<Int> firstMiddleLastKeys() {
return ImmutableSet.of(firstKey(), middleKey(), lastKey());
return (firstKey == null)
? ImmutableSet.of()
: ImmutableSet.of(firstKey(), middleKey(), lastKey());
}

public void cleanUp() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -470,7 +470,7 @@ final class GuavaPolicy implements Policy<K, V> {
return new GuavaCacheEntry<>(key, value, snapshotAt);
}
@Override public Map<K, CompletableFuture<V>> refreshes() {
return Map.of();
return Collections.unmodifiableMap(Collections.emptyMap());
}
@Override public Optional<Eviction<K, V>> eviction() {
return Optional.empty();
Expand Down Expand Up @@ -568,7 +568,8 @@ public CompletableFuture<Map<K, V>> refreshAll(Iterable<? extends K> keys) {

CompletableFuture<Map<K, V>> composeResult(Map<K, CompletableFuture<V>> futures) {
if (futures.isEmpty()) {
return CompletableFuture.completedFuture(Map.of());
return CompletableFuture.completedFuture(
Collections.unmodifiableMap(Collections.emptyMap()));
}
@SuppressWarnings("rawtypes")
CompletableFuture<?>[] array = futures.values().toArray(new CompletableFuture[0]);
Expand Down
Loading

0 comments on commit 3b36377

Please sign in to comment.