From 003be234f334c5c1d85709ae7d06611fcccf64e3 Mon Sep 17 00:00:00 2001
From: Ben Manes <ben.manes@gmail.com>
Date: Sun, 13 Mar 2022 20:33:14 -0700
Subject: [PATCH] Removed unneccesary top-level cache fields

Previously the cache implemented AbstractMap for the its convenient
putAll, equals, hashCode, and toString implementations. This came at
the cost of its private, unused keySet and values fields. Those Object
methods iterated over the entrySet, which returns a copy of the entry
to avoid instabiliy due to concurrent mutations. This changes to
internal iterators which avoids this unnecessary garbage creation.

Equality is a complex subject and the implementation's internal docs
now better clarify the semantics. Due to the cache potentially holding
entries that are pending removal (expiration, reference collection),
we expect that usages call Cache.cleanUp() first.

Removed an adapter for the async cache loader as not needed, dropping
two instance fields.

Simplified some serialization code by removing redundancies.
---
 .../cache/LocalCacheFactoryGenerator.java     |   8 +-
 .../caffeine/cache/Specifications.java        |   8 +-
 .../caffeine/cache/local/AddConstructor.java  |  17 +-
 .../caffeine/cache/BoundedLocalCache.java     | 204 ++++++++++++------
 .../caffeine/cache/LocalAsyncCache.java       | 155 ++++++++-----
 .../cache/LocalAsyncLoadingCache.java         |   2 +-
 .../benmanes/caffeine/cache/LocalCache.java   |   7 +
 .../caffeine/cache/LocalLoadingCache.java     |   4 +-
 .../caffeine/cache/UnboundedLocalCache.java   |  20 +-
 .../benmanes/caffeine/cache/Weigher.java      |   3 +-
 .../benmanes/caffeine/cache/AsMapTest.java    |  58 +++--
 .../caffeine/cache/AsyncAsMapTest.java        |  37 ++--
 .../caffeine/cache/ExpirationTest.java        | 123 +++++++++--
 .../caffeine/cache/ReferenceTest.java         |  72 ++++++-
 config/spotbugs/exclude.xml                   |   4 +
 15 files changed, 536 insertions(+), 186 deletions(-)

diff --git a/caffeine/src/javaPoet/java/com/github/benmanes/caffeine/cache/LocalCacheFactoryGenerator.java b/caffeine/src/javaPoet/java/com/github/benmanes/caffeine/cache/LocalCacheFactoryGenerator.java
index bf35fd556c..2d89066b44 100644
--- a/caffeine/src/javaPoet/java/com/github/benmanes/caffeine/cache/LocalCacheFactoryGenerator.java
+++ b/caffeine/src/javaPoet/java/com/github/benmanes/caffeine/cache/LocalCacheFactoryGenerator.java
@@ -18,8 +18,8 @@
 import static com.github.benmanes.caffeine.cache.Specifications.BOUNDED_LOCAL_CACHE;
 import static com.github.benmanes.caffeine.cache.Specifications.BUILDER;
 import static com.github.benmanes.caffeine.cache.Specifications.BUILDER_PARAM;
-import static com.github.benmanes.caffeine.cache.Specifications.CACHE_LOADER;
-import static com.github.benmanes.caffeine.cache.Specifications.CACHE_LOADER_PARAM;
+import static com.github.benmanes.caffeine.cache.Specifications.ASYNC_CACHE_LOADER;
+import static com.github.benmanes.caffeine.cache.Specifications.ASYNC_CACHE_LOADER_PARAM;
 import static com.github.benmanes.caffeine.cache.Specifications.kTypeVar;
 import static com.github.benmanes.caffeine.cache.Specifications.vTypeVar;
 import static java.nio.charset.StandardCharsets.UTF_8;
@@ -90,7 +90,7 @@ public final class LocalCacheFactoryGenerator {
       .build();
   static final FieldSpec FACTORY = FieldSpec.builder(MethodType.class, "FACTORY")
       .initializer("$T.methodType($T.class, $T.class, $T.class, $T.class)",
-          MethodType.class, void.class, BUILDER, CACHE_LOADER.rawType, TypeName.BOOLEAN)
+          MethodType.class, void.class, BUILDER, ASYNC_CACHE_LOADER.rawType, TypeName.BOOLEAN)
       .addModifiers(Modifier.PUBLIC, Modifier.STATIC, Modifier.FINAL)
       .build();
 
@@ -136,7 +136,7 @@ private void addFactoryMethods() {
         .addModifiers(Modifier.STATIC)
         .addCode(LocalCacheSelectorCode.get())
         .addParameter(BUILDER_PARAM)
-        .addParameter(CACHE_LOADER_PARAM.toBuilder().addAnnotation(Nullable.class).build())
+        .addParameter(ASYNC_CACHE_LOADER_PARAM.toBuilder().addAnnotation(Nullable.class).build())
         .addParameter(boolean.class, "async")
         .addJavadoc("Returns a cache optimized for this configuration.\n")
         .build());
diff --git a/caffeine/src/javaPoet/java/com/github/benmanes/caffeine/cache/Specifications.java b/caffeine/src/javaPoet/java/com/github/benmanes/caffeine/cache/Specifications.java
index 2ed919b9a4..378fc8fc85 100644
--- a/caffeine/src/javaPoet/java/com/github/benmanes/caffeine/cache/Specifications.java
+++ b/caffeine/src/javaPoet/java/com/github/benmanes/caffeine/cache/Specifications.java
@@ -76,10 +76,10 @@ public final class Specifications {
       ClassName.get(PACKAGE_NAME, "BoundedLocalCache"), kTypeVar, vTypeVar);
   public static final TypeName NODE = ParameterizedTypeName.get(nodeType, kTypeVar, vTypeVar);
 
-  public static final ParameterizedTypeName CACHE_LOADER = ParameterizedTypeName.get(
-      ClassName.get(PACKAGE_NAME, "CacheLoader"), TypeVariableName.get("? super K"), vTypeVar);
-  public static final ParameterSpec CACHE_LOADER_PARAM =
-      ParameterSpec.builder(CACHE_LOADER, "cacheLoader").build();
+  public static final ParameterizedTypeName ASYNC_CACHE_LOADER = ParameterizedTypeName.get(
+      ClassName.get(PACKAGE_NAME, "AsyncCacheLoader"), TypeVariableName.get("? super K"), vTypeVar);
+  public static final ParameterSpec ASYNC_CACHE_LOADER_PARAM =
+      ParameterSpec.builder(ASYNC_CACHE_LOADER, "cacheLoader").build();
 
   public static final TypeName REMOVAL_LISTENER = ParameterizedTypeName.get(
       ClassName.get(PACKAGE_NAME, "RemovalListener"), kTypeVar, vTypeVar);
diff --git a/caffeine/src/javaPoet/java/com/github/benmanes/caffeine/cache/local/AddConstructor.java b/caffeine/src/javaPoet/java/com/github/benmanes/caffeine/cache/local/AddConstructor.java
index 0d6ed88882..5001ac4812 100644
--- a/caffeine/src/javaPoet/java/com/github/benmanes/caffeine/cache/local/AddConstructor.java
+++ b/caffeine/src/javaPoet/java/com/github/benmanes/caffeine/cache/local/AddConstructor.java
@@ -15,9 +15,9 @@
  */
 package com.github.benmanes.caffeine.cache.local;
 
+import static com.github.benmanes.caffeine.cache.Specifications.ASYNC_CACHE_LOADER_PARAM;
 import static com.github.benmanes.caffeine.cache.Specifications.BOUNDED_LOCAL_CACHE;
 import static com.github.benmanes.caffeine.cache.Specifications.BUILDER_PARAM;
-import static com.github.benmanes.caffeine.cache.Specifications.CACHE_LOADER_PARAM;
 
 /**
  * Adds the constructor to the cache.
@@ -33,17 +33,16 @@ protected boolean applies() {
 
   @Override
   protected void execute() {
-    String cacheLoader;
+    context.constructor
+        .addParameter(BUILDER_PARAM)
+        .addParameter(ASYNC_CACHE_LOADER_PARAM)
+        .addParameter(boolean.class, "async");
     if (context.superClass.equals(BOUNDED_LOCAL_CACHE)) {
-      cacheLoader = "(CacheLoader<K, V>) cacheLoader";
       context.suppressedWarnings.add("unchecked");
+      context.constructor.addStatement(
+          "super(builder, (AsyncCacheLoader<K, V>) cacheLoader, async)");
     } else {
-      cacheLoader = "cacheLoader";
+      context.constructor.addStatement("super(builder, cacheLoader, async)");
     }
-    context.constructor
-        .addParameter(BUILDER_PARAM)
-        .addParameter(CACHE_LOADER_PARAM)
-        .addParameter(boolean.class, "async")
-        .addStatement("super(builder, $L, async)", cacheLoader);
   }
 }
diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java
index 2c6fa0a478..62e3d64e05 100644
--- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java
+++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java
@@ -29,6 +29,7 @@
 import static java.util.Spliterator.IMMUTABLE;
 import static java.util.Spliterator.NONNULL;
 import static java.util.Spliterator.ORDERED;
+import static java.util.function.Function.identity;
 import static java.util.stream.Collectors.toMap;
 
 import java.io.InvalidObjectException;
@@ -43,7 +44,6 @@
 import java.lang.ref.WeakReference;
 import java.time.Duration;
 import java.util.AbstractCollection;
-import java.util.AbstractMap;
 import java.util.AbstractSet;
 import java.util.Collection;
 import java.util.Collections;
@@ -99,7 +99,7 @@
  * @param <K> the type of keys maintained by this cache
  * @param <V> the type of mapped values
  */
-abstract class BoundedLocalCache<K, V> extends BLCHeader.DrainStatusRef<K, V>
+abstract class BoundedLocalCache<K, V> extends BLCHeader.DrainStatusRef
     implements LocalCache<K, V> {
 
   /*
@@ -224,7 +224,7 @@ abstract class BoundedLocalCache<K, V> extends BLCHeader.DrainStatusRef<K, V>
   static final VarHandle REFRESHES;
 
   final @Nullable RemovalListener<K, V> evictionListener;
-  final @Nullable CacheLoader<K, V> cacheLoader;
+  final @Nullable AsyncCacheLoader<K, V> cacheLoader;
 
   final MpscGrowableArrayQueue<Runnable> writeBuffer;
   final ConcurrentHashMap<Object, Node<K, V>> data;
@@ -246,7 +246,7 @@ abstract class BoundedLocalCache<K, V> extends BLCHeader.DrainStatusRef<K, V>
 
   /** Creates an instance based on the builder's configuration. */
   protected BoundedLocalCache(Caffeine<K, V> builder,
-      @Nullable CacheLoader<K, V> cacheLoader, boolean isAsync) {
+      @Nullable AsyncCacheLoader<K, V> cacheLoader, boolean isAsync) {
     this.isAsync = isAsync;
     this.cacheLoader = cacheLoader;
     executor = builder.getExecutor();
@@ -2069,6 +2069,17 @@ public boolean containsValue(Object value) {
     return value;
   }
 
+  @Override
+  public @Nullable V getIfPresentQuietly(Object key) {
+    V value;
+    Node<K, V> node = data.get(nodeFactory.newLookupKey(key));
+    if ((node == null) || ((value = node.getValue()) == null)
+        || hasExpired(node, expirationTicker().read())) {
+      return null;
+    }
+    return value;
+  }
+
   @Override
   public @Nullable V getIfPresentQuietly(K key, long[/* 1 */] writeTime) {
     V value;
@@ -2116,6 +2127,11 @@ public Map<K, V> getAllPresent(Iterable<? extends K> keys) {
     return Collections.unmodifiableMap(castedResult);
   }
 
+  @Override
+  public void putAll(Map<? extends K, ? extends V> map) {
+    map.forEach(this::put);
+  }
+
   @Override
   public @Nullable V put(K key, V value) {
     return put(key, value, expiry(), /* onlyIfAbsent */ false);
@@ -2778,6 +2794,98 @@ public Set<Entry<K, V>> entrySet() {
     return (es == null) ? (entrySet = new EntrySetView<>(this)) : es;
   }
 
+  /**
+   * Object equality requires reflexive, symmetric, transitive, and consistent properties. Of these,
+   * symmetry and consistency requires further clarification for how it is upheld.
+   * <p>
+   * The <i>consistency</i> property between invocations requires that the results are the same if
+   * there are no modifications to the information used. Therefore, usages should expect that this
+   * operation may return misleading results if either the map or the data held by them is modified
+   * during the execution of this method. This characteristic allows for comparing the map sizes and
+   * assuming stable mappings, as done by {@link AbstractMap}-based maps.
+   * <p>
+   * The <i>symmetric</i> property requires that the result is the same for all implementations of
+   * {@link Map#equals(Object)}. That contract is defined in terms of the stable mappings provided
+   * by {@link #entrySet()}, meaning that the {@link #size()} optimization forces that count be
+   * consistent with the mappings when used for an equality check.
+   * <p>
+   * The cache's {@link #size()} method may include entries that have expired or have been reference
+   * collected, but have not yet been removed from the backing map. An iteration over the map may
+   * trigger the removal of these dead entries when skipped over during traversal. To honor both
+   * consistency and symmetry, usages should call {@link #cleanUp()} prior to the comparison. This
+   * is not done implicitly by {@link #size()} as many usages assume it to be instantaneous and
+   * lock-free.
+   */
+  @Override
+  public boolean equals(Object o) {
+    if (o == this) {
+      return true;
+    } else if (!(o instanceof Map)) {
+      return false;
+    }
+
+    var map = (Map<?, ?>) o;
+    if (size() != map.size()) {
+      return false;
+    }
+
+    long now = expirationTicker().read();
+    for (var node : data.values()) {
+      K key = node.getKey();
+      V value = node.getValue();
+      if ((key == null) || (value == null)
+          || !node.isAlive() || hasExpired(node, now)) {
+        scheduleDrainBuffers();
+      } else {
+        var val = map.get(key);
+        if ((val == null) || ((val != value) && !val.equals(value))) {
+          return false;
+        }
+      }
+    }
+    return true;
+  }
+
+  @Override
+  @SuppressWarnings("NullAway")
+  public int hashCode() {
+    int hash = 0;
+    long now = expirationTicker().read();
+    for (var node : data.values()) {
+      K key = node.getKey();
+      V value = node.getValue();
+      if ((key == null) || (value == null)
+          || !node.isAlive() || hasExpired(node, now)) {
+        scheduleDrainBuffers();
+      } else {
+        hash += key.hashCode() ^ value.hashCode();
+      }
+    }
+    return hash;
+  }
+
+  @Override
+  public String toString() {
+    var result = new StringBuilder().append('{');
+    long now = expirationTicker().read();
+    for (var node : data.values()) {
+      K key = node.getKey();
+      V value = node.getValue();
+      if ((key == null) || (value == null)
+          || !node.isAlive() || hasExpired(node, now)) {
+        scheduleDrainBuffers();
+      } else {
+        if (result.length() != 1) {
+          result.append(',').append(' ');
+        }
+        result.append((key == this) ? "(this Map)" : key);
+        result.append('=');
+        result.append((value == this) ? "(this Map)" : value);
+      }
+    }
+    return result.append('}').toString();
+  }
+
   /**
    * Returns the computed result from the ordered traversal of the cache entries.
    *
@@ -3313,7 +3421,7 @@ public boolean hasNext() {
           value = next.getValue();
           key = next.getKey();
 
-          boolean evictable = cache.hasExpired(next, now) || (key == null) || (value == null);
+          boolean evictable = (key == null) || (value == null) || cache.hasExpired(next, now);
           if (evictable || !next.isAlive()) {
             if (evictable) {
               cache.scheduleDrainBuffers();
@@ -3360,7 +3468,7 @@ public Entry<K, V> next() {
         throw new NoSuchElementException();
       }
       @SuppressWarnings("NullAway")
-      Entry<K, V> entry = new WriteThroughEntry<>(cache, key, value);
+      var entry = new WriteThroughEntry<>(cache, key, value);
       removalKey = key;
       value = null;
       next = null;
@@ -3510,6 +3618,9 @@ static <K, V> SerializationProxy<K, V> makeSerializationProxy(BoundedLocalCache<
     if (cache.expiresVariable()) {
       proxy.expiry = cache.expiry();
     }
+    if (cache.refreshAfterWrite()) {
+      proxy.refreshAfterWriteNanos = cache.refreshAfterWriteNanos();
+    }
     if (cache.evicts()) {
       if (cache.isWeighted) {
         proxy.weigher = cache.weigher;
@@ -3518,6 +3629,8 @@ static <K, V> SerializationProxy<K, V> makeSerializationProxy(BoundedLocalCache<
         proxy.maximumSize = cache.maximum();
       }
     }
+    proxy.cacheLoader = cache.cacheLoader;
+    proxy.async = cache.isAsync;
     return proxy;
   }
 
@@ -3545,9 +3658,8 @@ public BoundedLocalCache<K, V> cache() {
 
     @Override
     public Policy<K, V> policy() {
-      return (policy == null)
-          ? (policy = new BoundedPolicy<>(cache, Function.identity(), cache.isWeighted))
-          : policy;
+      var p = policy;
+      return (p == null) ? (policy = new BoundedPolicy<>(cache, identity(), cache.isWeighted)) : p;
     }
 
     @SuppressWarnings("UnusedVariable")
@@ -3555,7 +3667,7 @@ private void readObject(ObjectInputStream stream) throws InvalidObjectException
       throw new InvalidObjectException("Proxy required");
     }
 
-    Object writeReplace() {
+    private Object writeReplace() {
       return makeSerializationProxy(cache);
     }
   }
@@ -3581,11 +3693,7 @@ static final class BoundedPolicy<K, V> implements Policy<K, V> {
       return cache.isRecordingStats();
     }
     @Override public @Nullable V getIfPresentQuietly(K key) {
-      Node<K, V> node = cache.data.get(cache.nodeFactory.newLookupKey(key));
-      if ((node == null) || cache.hasExpired(node, cache.expirationTicker().read())) {
-        return null;
-      }
-      return transformer.apply(node.getValue());
+      return transformer.apply(cache.getIfPresentQuietly(key));
     }
     @Override public @Nullable CacheEntry<K, V> getEntryIfPresentQuietly(K key) {
       Node<K, V> node = cache.data.get(cache.nodeFactory.newLookupKey(key));
@@ -3929,9 +4037,8 @@ final class BoundedVarExpiration implements VarExpiration<K, V> {
 
         @SuppressWarnings({"unchecked", "rawtypes"})
         V[] newValue = (V[]) new Object[1];
-        long[] writeTime = new long[1];
         for (;;) {
-          Async.getWhenSuccessful(delegate.getIfPresentQuietly(key, writeTime));
+          Async.getWhenSuccessful(delegate.getIfPresentQuietly(key));
 
           CompletableFuture<V> valueFuture = delegate.compute(key, (k, oldValueFuture) -> {
             if ((oldValueFuture != null) && !oldValueFuture.isDone()) {
@@ -4033,7 +4140,7 @@ static final class BoundedLocalLoadingCache<K, V>
 
     @Override
     @SuppressWarnings("NullAway")
-    public CacheLoader<? super K, V> cacheLoader() {
+    public AsyncCacheLoader<? super K, V> cacheLoader() {
       return cache.cacheLoader;
     }
 
@@ -4052,15 +4159,8 @@ private void readObject(ObjectInputStream stream) throws InvalidObjectException
       throw new InvalidObjectException("Proxy required");
     }
 
-    @Override
-    Object writeReplace() {
-      @SuppressWarnings("unchecked")
-      SerializationProxy<K, V> proxy = (SerializationProxy<K, V>) super.writeReplace();
-      if (cache.refreshAfterWrite()) {
-        proxy.refreshAfterWriteNanos = cache.refreshAfterWriteNanos();
-      }
-      proxy.cacheLoader = cache.cacheLoader;
-      return proxy;
+    private Object writeReplace() {
+      return makeSerializationProxy(cache);
     }
   }
 
@@ -4116,13 +4216,8 @@ private void readObject(ObjectInputStream stream) throws InvalidObjectException
       throw new InvalidObjectException("Proxy required");
     }
 
-    Object writeReplace() {
-      SerializationProxy<K, V> proxy = makeSerializationProxy(cache);
-      if (cache.refreshAfterWrite()) {
-        proxy.refreshAfterWriteNanos = cache.refreshAfterWriteNanos();
-      }
-      proxy.async = true;
-      return proxy;
+    private Object writeReplace() {
+      return makeSerializationProxy(cache);
     }
   }
 
@@ -4143,7 +4238,7 @@ static final class BoundedLocalAsyncLoadingCache<K, V>
       super(loader);
       isWeighted = builder.isWeighted();
       cache = (BoundedLocalCache<K, CompletableFuture<V>>) LocalCacheFactory
-          .newBoundedLocalCache(builder, new AsyncLoader<>(loader, builder), /* async */ true);
+          .newBoundedLocalCache(builder, loader, /* async */ true);
     }
 
     @Override
@@ -4174,39 +4269,8 @@ private void readObject(ObjectInputStream stream) throws InvalidObjectException
       throw new InvalidObjectException("Proxy required");
     }
 
-    Object writeReplace() {
-      SerializationProxy<K, V> proxy = makeSerializationProxy(cache);
-      if (cache.refreshAfterWrite()) {
-        proxy.refreshAfterWriteNanos = cache.refreshAfterWriteNanos();
-      }
-      proxy.cacheLoader = cacheLoader;
-      proxy.async = true;
-      return proxy;
-    }
-
-    static final class AsyncLoader<K, V> implements CacheLoader<K, V> {
-      final AsyncCacheLoader<? super K, V> loader;
-      final Executor executor;
-
-      AsyncLoader(AsyncCacheLoader<? super K, V> loader, Caffeine<?, ?> builder) {
-        this.executor = requireNonNull(builder.getExecutor());
-        this.loader = requireNonNull(loader);
-      }
-
-      @Override public V load(K key) throws Exception {
-        @SuppressWarnings("unchecked")
-        V newValue = (V) loader.asyncLoad(key, executor);
-        return newValue;
-      }
-      @Override public V reload(K key, V oldValue) throws Exception {
-        @SuppressWarnings("unchecked")
-        V newValue = (V) loader.asyncReload(key, oldValue, executor);
-        return newValue;
-      }
-      @Override public CompletableFuture<? extends V> asyncReload(
-          K key, V oldValue, Executor executor) throws Exception {
-        return loader.asyncReload(key, oldValue, executor);
-      }
+    private Object writeReplace() {
+      return makeSerializationProxy(cache);
     }
   }
 }
@@ -4214,7 +4278,7 @@ static final class AsyncLoader<K, V> implements CacheLoader<K, V> {
 /** The namespace for field padding through inheritance. */
 final class BLCHeader {
 
-  abstract static class PadDrainStatus<K, V> extends AbstractMap<K, V> {
+  static class PadDrainStatus {
     byte p000, p001, p002, p003, p004, p005, p006, p007;
     byte p008, p009, p010, p011, p012, p013, p014, p015;
     byte p016, p017, p018, p019, p020, p021, p022, p023;
@@ -4233,7 +4297,7 @@ abstract static class PadDrainStatus<K, V> extends AbstractMap<K, V> {
   }
 
   /** Enforces a memory layout to avoid false sharing by padding the drain status. */
-  abstract static class DrainStatusRef<K, V> extends PadDrainStatus<K, V> {
+  abstract static class DrainStatusRef extends PadDrainStatus {
     static final VarHandle DRAIN_STATUS;
 
     /** A drain is not taking place. */
diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalAsyncCache.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalAsyncCache.java
index 7d0ce44177..9ef578832f 100644
--- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalAsyncCache.java
+++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalAsyncCache.java
@@ -15,13 +15,13 @@
  */
 package com.github.benmanes.caffeine.cache;
 
+import static com.github.benmanes.caffeine.cache.Caffeine.requireState;
 import static java.util.Objects.requireNonNull;
 
 import java.io.Serializable;
 import java.lang.System.Logger;
 import java.lang.System.Logger.Level;
 import java.util.AbstractCollection;
-import java.util.AbstractMap;
 import java.util.AbstractSet;
 import java.util.Collection;
 import java.util.Collections;
@@ -590,7 +590,7 @@ public ConcurrentMap<K, V> asMap() {
     }
   }
 
-  final class AsMapView<K, V> extends AbstractMap<K, V> implements ConcurrentMap<K, V> {
+  final class AsMapView<K, V> implements ConcurrentMap<K, V> {
     final LocalCache<K, CompletableFuture<V>> delegate;
 
     @Nullable Collection<V> values;
@@ -643,11 +643,10 @@ public boolean containsValue(Object value) {
 
       // Keep in sync with BoundedVarExpiration.putIfAbsentAsync(key, value, duration, unit)
       CompletableFuture<V> priorFuture = null;
-      long[] writeTime = new long[1];
       for (;;) {
         priorFuture = (priorFuture == null)
             ? delegate.get(key)
-            : delegate.getIfPresentQuietly(key, writeTime);
+            : delegate.getIfPresentQuietly(key);
         if (priorFuture != null) {
           if (!priorFuture.isDone()) {
             Async.getWhenSuccessful(priorFuture);
@@ -679,6 +678,11 @@ public boolean containsValue(Object value) {
       }
     }
 
+    @Override
+    public void putAll(Map<? extends K, ? extends V> map) {
+      map.forEach(this::put);
+    }
+
     @Override
     public @Nullable V put(K key, V value) {
       requireNonNull(value);
@@ -704,12 +708,11 @@ public boolean remove(Object key, Object value) {
       K castedKey = (K) key;
       boolean[] done = { false };
       boolean[] removed = { false };
-      long[] writeTime = new long[1];
       CompletableFuture<V> future = null;
       for (;;) {
         future = (future == null)
             ? delegate.get(key)
-            : delegate.getIfPresentQuietly(castedKey, writeTime);
+            : delegate.getIfPresentQuietly(castedKey);
         if ((future == null) || future.isCompletedExceptionally()) {
           return false;
         }
@@ -743,9 +746,8 @@ public boolean remove(Object key, Object value) {
       @SuppressWarnings({"unchecked", "rawtypes"})
       V[] oldValue = (V[]) new Object[1];
       boolean[] done = { false };
-      long[] writeTime = new long[1];
       for (;;) {
-        CompletableFuture<V> future = delegate.getIfPresentQuietly(key, writeTime);
+        CompletableFuture<V> future = delegate.getIfPresentQuietly(key);
         if ((future == null) || future.isCompletedExceptionally()) {
           return null;
         }
@@ -778,9 +780,8 @@ public boolean replace(K key, V oldValue, V newValue) {
 
       boolean[] done = { false };
       boolean[] replaced = { false };
-      long[] writeTime = new long[1];
       for (;;) {
-        CompletableFuture<V> future = delegate.getIfPresentQuietly(key, writeTime);
+        CompletableFuture<V> future = delegate.getIfPresentQuietly(key);
         if ((future == null) || future.isCompletedExceptionally()) {
           return false;
         }
@@ -810,12 +811,11 @@ public boolean replace(K key, V oldValue, V newValue) {
     public @Nullable V computeIfAbsent(K key, Function<? super K, ? extends V> mappingFunction) {
       requireNonNull(mappingFunction);
 
-      long[] writeTime = new long[1];
       CompletableFuture<V> priorFuture = null;
       for (;;) {
         priorFuture = (priorFuture == null)
             ? delegate.get(key)
-            : delegate.getIfPresentQuietly(key, writeTime);
+            : delegate.getIfPresentQuietly(key);
         if (priorFuture != null) {
           if (!priorFuture.isDone()) {
             Async.getWhenSuccessful(priorFuture);
@@ -860,9 +860,8 @@ public boolean replace(K key, V oldValue, V newValue) {
 
       @SuppressWarnings({"unchecked", "rawtypes"})
       V[] newValue = (V[]) new Object[1];
-      long[] writeTime = new long[1];
       for (;;) {
-        Async.getWhenSuccessful(delegate.getIfPresentQuietly(key, writeTime));
+        Async.getWhenSuccessful(delegate.getIfPresentQuietly(key));
 
         CompletableFuture<V> valueFuture = delegate.computeIfPresent(key, (k, oldValueFuture) -> {
           if (!oldValueFuture.isDone()) {
@@ -894,9 +893,8 @@ public boolean replace(K key, V oldValue, V newValue) {
 
       @SuppressWarnings({"unchecked", "rawtypes"})
       V[] newValue = (V[]) new Object[1];
-      long[] writeTime = new long[1];
       for (;;) {
-        Async.getWhenSuccessful(delegate.getIfPresentQuietly(key, writeTime));
+        Async.getWhenSuccessful(delegate.getIfPresentQuietly(key));
 
         CompletableFuture<V> valueFuture = delegate.compute(key, (k, oldValueFuture) -> {
           if ((oldValueFuture != null) && !oldValueFuture.isDone()) {
@@ -928,9 +926,8 @@ public boolean replace(K key, V oldValue, V newValue) {
 
       CompletableFuture<V> newValueFuture = CompletableFuture.completedFuture(value);
       boolean[] merged = { false };
-      long[] writeTime = new long[1];
       for (;;) {
-        Async.getWhenSuccessful(delegate.getIfPresentQuietly(key, writeTime));
+        Async.getWhenSuccessful(delegate.getIfPresentQuietly(key));
 
         CompletableFuture<V> mergedValueFuture = delegate.merge(
             key, newValueFuture, (oldValueFuture, valueFuture) -> {
@@ -975,6 +972,56 @@ public Set<Entry<K, V>> entrySet() {
       return (entries == null) ? (entries = new EntrySet()) : entries;
     }
 
+    /** See {@link BoundedLocalCache#equals(Object)} for semantics. */
+    @Override
+    public boolean equals(Object o) {
+      if (o == this) {
+        return true;
+      } else if (!(o instanceof Map)) {
+        return false;
+      }
+
+      var map = (Map<?, ?>) o;
+      if (size() != map.size()) {
+        return false;
+      }
+
+      for (var iterator = new EntryIterator(); iterator.hasNext();) {
+        var entry = iterator.next();
+        var value = map.get(entry.getKey());
+        if ((value == null) || ((value != entry.getValue()) && !value.equals(entry.getValue()))) {
+          return false;
+        }
+      }
+      return true;
+    }
+
+    @Override
+    public int hashCode() {
+      int hash = 0;
+      for (var iterator = new EntryIterator(); iterator.hasNext();) {
+        var entry = iterator.next();
+        hash += entry.hashCode();
+      }
+      return hash;
+    }
+
+    @Override
+    public String toString() {
+      var result = new StringBuilder().append('{');
+      for (var iterator = new EntryIterator(); iterator.hasNext();) {
+        var entry = iterator.next();
+        result.append((entry.getKey() == this) ? "(this Map)" : entry.getKey());
+        result.append('=');
+        result.append((entry.getValue() == this) ? "(this Map)" : entry.getValue());
+
+        if (iterator.hasNext()) {
+          result.append(',').append(' ');
+        }
+      }
+      return result.append('}').toString();
+    }
+
     private final class Values extends AbstractCollection<V> {
 
       @Override
@@ -1063,43 +1110,49 @@ public void clear() {
 
       @Override
       public Iterator<Entry<K, V>> iterator() {
-        return new Iterator<Entry<K, V>>() {
-          Iterator<Entry<K, CompletableFuture<V>>> iterator = delegate.entrySet().iterator();
-          @Nullable Entry<K, V> cursor;
-          @Nullable K removalKey;
+        return new EntryIterator();
+      }
+    }
 
-          @Override
-          public boolean hasNext() {
-            while ((cursor == null) && iterator.hasNext()) {
-              Entry<K, CompletableFuture<V>> entry = iterator.next();
-              V value = Async.getIfReady(entry.getValue());
-              if (value != null) {
-                cursor = new WriteThroughEntry<>(AsMapView.this, entry.getKey(), value);
-              }
-            }
-            return (cursor != null);
-          }
+    private final class EntryIterator implements Iterator<Entry<K, V>> {
+      Iterator<Entry<K, CompletableFuture<V>>> iterator;
+      @Nullable Entry<K, V> cursor;
+      @Nullable K removalKey;
 
-          @Override
-          public Entry<K, V> next() {
-            if (!hasNext()) {
-              throw new NoSuchElementException();
-            }
-            @SuppressWarnings("NullAway")
-            K key = cursor.getKey();
-            Entry<K, V> entry = cursor;
-            removalKey = key;
-            cursor = null;
-            return entry;
-          }
+      EntryIterator() {
+        iterator = delegate.entrySet().iterator();
+      }
 
-          @Override
-          public void remove() {
-            Caffeine.requireState(removalKey != null);
-            delegate.remove(removalKey);
-            removalKey = null;
+      @Override
+      public boolean hasNext() {
+        while ((cursor == null) && iterator.hasNext()) {
+          Entry<K, CompletableFuture<V>> entry = iterator.next();
+          V value = Async.getIfReady(entry.getValue());
+          if (value != null) {
+            cursor = new WriteThroughEntry<>(AsMapView.this, entry.getKey(), value);
           }
-        };
+        }
+        return (cursor != null);
+      }
+
+      @Override
+      public Entry<K, V> next() {
+        if (!hasNext()) {
+          throw new NoSuchElementException();
+        }
+        @SuppressWarnings("NullAway")
+        K key = cursor.getKey();
+        Entry<K, V> entry = cursor;
+        removalKey = key;
+        cursor = null;
+        return entry;
+      }
+
+      @Override
+      public void remove() {
+        requireState(removalKey != null);
+        delegate.remove(removalKey);
+        removalKey = null;
       }
     }
   }
diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalAsyncLoadingCache.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalAsyncLoadingCache.java
index e5276b0d0f..d37b1da2b1 100644
--- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalAsyncLoadingCache.java
+++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalAsyncLoadingCache.java
@@ -222,7 +222,7 @@ public CompletableFuture<Map<K, V>> refreshAll(Iterable<? extends K> keys) {
       }
 
       // If the entry is absent then perform a new load, else if in-flight then return it
-      var oldValueFuture = asyncCache.cache().getIfPresentQuietly(key, /* writeTime */ new long[1]);
+      var oldValueFuture = asyncCache.cache().getIfPresentQuietly(key);
       if ((oldValueFuture == null)
           || (oldValueFuture.isDone() && oldValueFuture.isCompletedExceptionally())) {
         if (oldValueFuture != null) {
diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalCache.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalCache.java
index 06ea0c2b9e..5566e28826 100644
--- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalCache.java
+++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalCache.java
@@ -74,6 +74,13 @@ interface LocalCache<K, V> extends ConcurrentMap<K, V> {
   @Nullable
   V getIfPresent(K key, boolean recordStats);
 
+  /**
+   * See {@link Cache#getIfPresent(K)}. This method differs by not recording the access with
+   * the statistics nor the eviction policy.
+   */
+  @Nullable
+  V getIfPresentQuietly(Object key);
+
   /**
    * See {@link Cache#getIfPresent(K)}. This method differs by not recording the access with
    * the statistics nor the eviction policy, and populates the write-time if known.
diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalLoadingCache.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalLoadingCache.java
index ec63de7327..9dcb5418a5 100644
--- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalLoadingCache.java
+++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/LocalLoadingCache.java
@@ -42,8 +42,8 @@
 interface LocalLoadingCache<K, V> extends LocalManualCache<K, V>, LoadingCache<K, V> {
   Logger logger = System.getLogger(LocalLoadingCache.class.getName());
 
-  /** Returns the {@link CacheLoader} used by this cache. */
-  CacheLoader<? super K, V> cacheLoader();
+  /** Returns the {@link AsyncCacheLoader} used by this cache. */
+  AsyncCacheLoader<? super K, V> cacheLoader();
 
   /** Returns the {@link CacheLoader#load} as a mapping function. */
   Function<K, V> mappingFunction();
diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/UnboundedLocalCache.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/UnboundedLocalCache.java
index 483ee08654..eeb16d84eb 100644
--- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/UnboundedLocalCache.java
+++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/UnboundedLocalCache.java
@@ -129,6 +129,11 @@ public Object referenceKey(K key) {
     return value;
   }
 
+  @Override
+  public @Nullable V getIfPresentQuietly(Object key) {
+    return data.get(key);
+  }
+
   @Override
   public @Nullable V getIfPresentQuietly(K key, long[/* 1 */] writeTime) {
     return data.get(key);
@@ -535,7 +540,7 @@ public boolean replace(K key, V oldValue, V newValue) {
 
   @Override
   public boolean equals(Object o) {
-    return data.equals(o);
+    return (o == this) || data.equals(o);
   }
 
   @Override
@@ -545,7 +550,16 @@ public int hashCode() {
 
   @Override
   public String toString() {
-    return data.toString();
+    var result = new StringBuilder().append('{');
+    data.forEach((key, value) -> {
+      if (result.length() != 1) {
+        result.append(',').append(' ');
+      }
+      result.append((key == this) ? "(this Map)" : key);
+      result.append('=');
+      result.append((value == this) ? "(this Map)" : value);
+    });
+    return result.append('}').toString();
   }
 
   @Override
@@ -977,7 +991,7 @@ static final class UnboundedLocalLoadingCache<K, V> extends UnboundedLocalManual
     }
 
     @Override
-    public CacheLoader<? super K, V> cacheLoader() {
+    public AsyncCacheLoader<? super K, V> cacheLoader() {
       return cacheLoader;
     }
 
diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/Weigher.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/Weigher.java
index fe6e284aab..60c5e771d1 100644
--- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/Weigher.java
+++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/Weigher.java
@@ -15,6 +15,7 @@
  */
 package com.github.benmanes.caffeine.cache;
 
+import static com.github.benmanes.caffeine.cache.Caffeine.requireArgument;
 import static java.util.Objects.requireNonNull;
 
 import java.io.Serializable;
@@ -89,7 +90,7 @@ final class BoundedWeigher<K, V> implements Weigher<K, V>, Serializable {
   @Override
   public int weigh(K key, V value) {
     int weight = delegate.weigh(key, value);
-    Caffeine.requireArgument(weight >= 0);
+    requireArgument(weight >= 0);
     return weight;
   }
 
diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsMapTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsMapTest.java
index eca12ca1cb..b6b789e07d 100644
--- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsMapTest.java
+++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsMapTest.java
@@ -39,6 +39,7 @@
 import java.util.Set;
 import java.util.Spliterators;
 import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.function.BiFunction;
 import java.util.function.Function;
@@ -60,6 +61,8 @@
 import com.github.benmanes.caffeine.cache.testing.CheckNoStats;
 import com.github.benmanes.caffeine.testing.ConcurrentTestHarness;
 import com.github.benmanes.caffeine.testing.Int;
+import com.google.common.base.Splitter;
+import com.google.common.collect.Maps;
 import com.google.common.testing.SerializableTester;
 
 /**
@@ -1551,6 +1554,15 @@ public void equals_self(Map<Int, Int> map, CacheContext context) {
   public void equals(Map<Int, Int> map, CacheContext context) {
     assertThat(map.equals(context.original())).isTrue();
     assertThat(context.original().equals(map)).isTrue();
+
+    assertThat(map.equals(context.absent())).isFalse();
+    assertThat(context.absent().equals(map)).isFalse();
+
+    if (!map.isEmpty()) {
+      var other = Maps.asMap(map.keySet(), CompletableFuture::completedFuture);
+      assertThat(map.equals(other)).isFalse();
+      assertThat(other.equals(map)).isFalse();
+    }
   }
 
   @CheckNoStats
@@ -1600,12 +1612,19 @@ public void equalsAndHashCodeFail_present(Map<Int, Int> map, CacheContext contex
   @Test(dataProvider = "caches")
   @CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING })
   public void toString(Map<Int, Int> map, CacheContext context) {
-    var toString = map.toString();
-    if (!context.original().toString().equals(toString)) {
-      map.forEach((key, value) -> {
-        assertThat(toString).contains(key + "=" + value);
-      });
-    }
+    assertThat(parseToString(map)).containsExactlyEntriesIn(parseToString(context.original()));
+  }
+
+  @Test(dataProvider = "caches")
+  @CacheSpec(implementation = Implementation.Caffeine)
+  public void toString_self(Map<Object, Object> map, CacheContext context) {
+    map.put(context.absentKey(), map);
+    assertThat(map.toString()).contains(context.absentKey() + "=(this Map)");
+  }
+
+  private static Map<String, String> parseToString(Map<Int, Int> map) {
+    return Splitter.on(',').trimResults().omitEmptyStrings().withKeyValueSeparator("=")
+        .split(map.toString().replaceAll("\\{|\\}", ""));
   }
 
   /* --------------- Key Set --------------- */
@@ -1613,14 +1632,14 @@ public void toString(Map<Int, Int> map, CacheContext context) {
   @CheckNoStats
   @CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING })
   @Test(dataProvider = "caches", expectedExceptions = NullPointerException.class)
-  public void keySetToArray_null(Map<Int, Int> map, CacheContext context) {
+  public void keySet_toArray_null(Map<Int, Int> map, CacheContext context) {
     map.keySet().toArray((Int[]) null);
   }
 
   @CheckNoStats
   @Test(dataProvider = "caches")
   @CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING })
-  public void keySetToArray(Map<Int, Int> map, CacheContext context) {
+  public void keySet_toArray(Map<Int, Int> map, CacheContext context) {
     var array = map.keySet().toArray();
     assertThat(array).asList().containsExactlyElementsIn(context.original().keySet());
 
@@ -1782,14 +1801,14 @@ public void keySpliterator_estimateSize(Map<Int, Int> map, CacheContext context)
   @CheckNoStats
   @CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING })
   @Test(dataProvider = "caches", expectedExceptions = NullPointerException.class)
-  public void valuesToArray_null(Map<Int, Int> map, CacheContext context) {
+  public void values_toArray_null(Map<Int, Int> map, CacheContext context) {
     map.values().toArray((Int[]) null);
   }
 
   @CheckNoStats
   @Test(dataProvider = "caches")
   @CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING })
-  public void valuesToArray(Map<Int, Int> map, CacheContext context) {
+  public void values_toArray(Map<Int, Int> map, CacheContext context) {
     var array = map.values().toArray();
     assertThat(array).asList().containsExactlyElementsIn(context.original().values());
 
@@ -1976,14 +1995,14 @@ public void valueSpliterator_estimateSize(Map<Int, Int> map, CacheContext contex
   @CheckNoStats
   @CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING })
   @Test(dataProvider = "caches", expectedExceptions = NullPointerException.class)
-  public void entrySetToArray_null(Map<Int, Int> map, CacheContext context) {
+  public void entrySet_toArray_null(Map<Int, Int> map, CacheContext context) {
     map.entrySet().toArray((Map.Entry<?, ?>[]) null);
   }
 
   @CheckNoStats
   @Test(dataProvider = "caches")
   @CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING })
-  public void entriesToArray(Map<Int, Int> map, CacheContext context) {
+  public void entrySet_toArray(Map<Int, Int> map, CacheContext context) {
     var array = map.entrySet().toArray();
     assertThat(array).asList().containsExactlyElementsIn(context.original().entrySet());
 
@@ -2231,6 +2250,21 @@ public void writeThroughEntry_serialize(Map<Int, Int> map, CacheContext context)
     assertThat(entry).isEqualTo(copy);
   }
 
+  @Test
+  public void writeThroughEntry_equals_hashCode_toString() {
+    var map = new ConcurrentHashMap<>();
+    var entry = new WriteThroughEntry<>(map, 1, 2);
+
+    assertThat(entry.equals(Map.entry(1, 2))).isTrue();
+    assertThat(entry.hashCode()).isEqualTo(Map.entry(1, 2).hashCode());
+    assertThat(entry.toString()).isEqualTo(Map.entry(1, 2).toString());
+
+    var other = new WriteThroughEntry<>(map, 3, 4);
+    assertThat(entry.equals(other)).isFalse();
+    assertThat(entry.hashCode()).isNotEqualTo(other.hashCode());
+    assertThat(entry.toString()).isNotEqualTo(other.toString());
+  }
+
   @SuppressWarnings("serial")
   static final class ExpectedError extends Error {}
 }
diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsyncAsMapTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsyncAsMapTest.java
index ee7316a5f2..6e26cbc998 100644
--- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsyncAsMapTest.java
+++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsyncAsMapTest.java
@@ -58,6 +58,8 @@
 import com.github.benmanes.caffeine.cache.testing.CacheValidationListener;
 import com.github.benmanes.caffeine.cache.testing.CheckNoStats;
 import com.github.benmanes.caffeine.testing.Int;
+import com.google.common.base.Splitter;
+import com.google.common.collect.Maps;
 
 /**
  * The test cases for the {@link AsyncCache#asMap()} view and its serializability. These tests do
@@ -1304,6 +1306,16 @@ public void equals(AsyncCache<Int, Int> cache, CacheContext context) {
     var map = Map.copyOf(cache.asMap());
     assertThat(cache.asMap().equals(map)).isTrue();
     assertThat(map.equals(cache.asMap())).isTrue();
+
+    var absent = Maps.asMap(context.absentKeys(), CompletableFuture::completedFuture);
+    assertThat(cache.asMap().equals(absent)).isFalse();
+    assertThat(absent.equals(cache.asMap())).isFalse();
+
+    if (!cache.asMap().isEmpty()) {
+      var other = Maps.asMap(cache.asMap().keySet(), CompletableFuture::completedFuture);
+      assertThat(cache.asMap().equals(other)).isFalse();
+      assertThat(other.equals(cache.asMap())).isFalse();
+    }
   }
 
   @CheckNoStats
@@ -1354,12 +1366,13 @@ public void equalsAndHashCodeFail_present(
   @Test(dataProvider = "caches")
   @CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING })
   public void toString(AsyncCache<Int, Int> cache, CacheContext context) {
-    String toString = cache.asMap().toString();
-    if (!context.original().toString().equals(toString)) {
-      cache.asMap().forEach((key, value) -> {
-        assertThat(toString).contains(key + "=" + value);
-      });
-    }
+    assertThat(parseToString(cache.asMap()))
+        .containsExactlyEntriesIn(parseToString(Map.copyOf(cache.asMap())));
+  }
+
+  private static Map<String, String> parseToString(Map<Int, CompletableFuture<Int>> map) {
+    return Splitter.on(',').trimResults().omitEmptyStrings().withKeyValueSeparator("=")
+        .split(map.toString().replaceAll("\\{|\\}", ""));
   }
 
   /* ---------------- Key Set -------------- */
@@ -1367,14 +1380,14 @@ public void toString(AsyncCache<Int, Int> cache, CacheContext context) {
   @CheckNoStats
   @CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING })
   @Test(dataProvider = "caches", expectedExceptions = NullPointerException.class)
-  public void keySetToArray_null(AsyncCache<Int, Int> cache, CacheContext context) {
+  public void keySet_toArray_null(AsyncCache<Int, Int> cache, CacheContext context) {
     cache.asMap().keySet().toArray((Int[]) null);
   }
 
   @CheckNoStats
   @Test(dataProvider = "caches")
   @CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING })
-  public void keySetToArray(AsyncCache<Int, Int> cache, CacheContext context) {
+  public void keySet_toArray(AsyncCache<Int, Int> cache, CacheContext context) {
     var ints = cache.asMap().keySet().toArray(new Int[0]);
     assertThat(ints).asList().containsExactlyElementsIn(context.original().keySet());
 
@@ -1536,14 +1549,14 @@ public void keySpliterator_estimateSize(AsyncCache<Int, Int> cache, CacheContext
   @CheckNoStats
   @CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING })
   @Test(dataProvider = "caches", expectedExceptions = NullPointerException.class)
-  public void valuesToArray_null(AsyncCache<Int, Int> cache, CacheContext context) {
+  public void values_toArray_null(AsyncCache<Int, Int> cache, CacheContext context) {
     cache.asMap().values().toArray((CompletableFuture<Int>[]) null);
   }
 
   @CheckNoStats
   @Test(dataProvider = "caches")
   @CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING })
-  public void valuesToArray(AsyncCache<Int, Int> cache, CacheContext context) {
+  public void values_toArray(AsyncCache<Int, Int> cache, CacheContext context) {
     var futures = cache.asMap().values().toArray(new CompletableFuture<?>[0]);
     var values1 = Stream.of(futures).map(CompletableFuture::join).collect(toList());
     assertThat(values1).containsExactlyElementsIn(context.original().values());
@@ -1733,14 +1746,14 @@ public void valueSpliterator_estimateSize(AsyncCache<Int, Int> cache, CacheConte
   @CheckNoStats
   @CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING })
   @Test(dataProvider = "caches", expectedExceptions = NullPointerException.class)
-  public void entrySetToArray_null(AsyncCache<Int, Int> cache, CacheContext context) {
+  public void entrySet_toArray_null(AsyncCache<Int, Int> cache, CacheContext context) {
     cache.asMap().entrySet().toArray((Map.Entry<?, ?>[]) null);
   }
 
   @CheckNoStats
   @Test(dataProvider = "caches")
   @CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING })
-  public void entriesToArray(AsyncCache<Int, Int> cache, CacheContext context) {
+  public void entrySet_toArray(AsyncCache<Int, Int> cache, CacheContext context) {
     @SuppressWarnings("unchecked")
     var entries = (Map.Entry<Int, CompletableFuture<Int>>[])
         cache.asMap().entrySet().toArray(new Map.Entry<?, ?>[0]);
diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/ExpirationTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/ExpirationTest.java
index 79143cf7f0..e76fbf083f 100644
--- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/ExpirationTest.java
+++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/ExpirationTest.java
@@ -68,6 +68,7 @@
 import com.github.benmanes.caffeine.cache.testing.CacheValidationListener;
 import com.github.benmanes.caffeine.cache.testing.CheckNoStats;
 import com.github.benmanes.caffeine.testing.Int;
+import com.google.common.base.Splitter;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Range;
 import com.google.common.util.concurrent.Futures;
@@ -1232,6 +1233,100 @@ public void merge_writeTime(Map<Int, Int> map, CacheContext context) {
     assertThat(map).containsKey(key);
   }
 
+  @Test(dataProvider = "caches")
+  @CacheSpec(population = Population.FULL, expiryTime = Expire.ONE_MINUTE,
+      mustExpireWithAnyOf = { AFTER_ACCESS, AFTER_WRITE, VARIABLE },
+      expiry = { CacheExpiry.DISABLED, CacheExpiry.CREATE, CacheExpiry.WRITE, CacheExpiry.ACCESS },
+      expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE},
+      expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE})
+  public void entrySet_equals(Map<Int, Int> map, CacheContext context) {
+    context.ticker().advance(30, TimeUnit.SECONDS);
+    map.putAll(context.absent());
+
+    context.ticker().advance(45, TimeUnit.SECONDS);
+    assertThat(map.entrySet().equals(context.absent().entrySet())).isFalse();
+    assertThat(context.absent().entrySet().equals(map.entrySet())).isFalse();
+
+    context.cleanUp();
+    assertThat(map.entrySet().equals(context.absent().entrySet())).isTrue();
+    assertThat(context.absent().entrySet().equals(map.entrySet())).isTrue();
+  }
+
+  @Test(dataProvider = "caches")
+  @CacheSpec(population = Population.FULL, expiryTime = Expire.ONE_MINUTE,
+      mustExpireWithAnyOf = { AFTER_ACCESS, AFTER_WRITE, VARIABLE },
+      expiry = { CacheExpiry.DISABLED, CacheExpiry.CREATE, CacheExpiry.WRITE, CacheExpiry.ACCESS },
+      expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE},
+      expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE})
+  public void entrySet_hashCode(Map<Int, Int> map, CacheContext context) {
+    context.ticker().advance(30, TimeUnit.SECONDS);
+    map.putAll(context.absent());
+
+    context.ticker().advance(45, TimeUnit.SECONDS);
+    assertThat(map.hashCode()).isEqualTo(context.absent().hashCode());
+
+    context.cleanUp();
+    assertThat(map.hashCode()).isEqualTo(context.absent().hashCode());
+  }
+
+  @Test(dataProvider = "caches")
+  @CacheSpec(population = Population.FULL, expiryTime = Expire.ONE_MINUTE,
+      mustExpireWithAnyOf = { AFTER_ACCESS, AFTER_WRITE, VARIABLE },
+      expiry = { CacheExpiry.DISABLED, CacheExpiry.CREATE, CacheExpiry.WRITE, CacheExpiry.ACCESS },
+      expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE},
+      expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE})
+  public void equals(Map<Int, Int> map, CacheContext context) {
+    context.ticker().advance(30, TimeUnit.SECONDS);
+    map.putAll(context.absent());
+
+    context.ticker().advance(45, TimeUnit.SECONDS);
+    assertThat(map.equals(context.absent())).isFalse();
+    assertThat(context.absent().equals(map)).isFalse();
+
+    context.cleanUp();
+    assertThat(map.equals(context.absent())).isTrue();
+    assertThat(context.absent().equals(map)).isTrue();
+  }
+
+  @Test(dataProvider = "caches")
+  @CacheSpec(population = Population.FULL, expiryTime = Expire.ONE_MINUTE,
+      mustExpireWithAnyOf = { AFTER_ACCESS, AFTER_WRITE, VARIABLE },
+      expiry = { CacheExpiry.DISABLED, CacheExpiry.CREATE, CacheExpiry.WRITE, CacheExpiry.ACCESS },
+      expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE},
+      expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE})
+  public void hashCode(Map<Int, Int> map, CacheContext context) {
+    context.ticker().advance(30, TimeUnit.SECONDS);
+    map.putAll(context.absent());
+
+    context.ticker().advance(45, TimeUnit.SECONDS);
+    assertThat(map.hashCode()).isEqualTo(context.absent().hashCode());
+
+    context.cleanUp();
+    assertThat(map.hashCode()).isEqualTo(context.absent().hashCode());
+  }
+
+  @Test(dataProvider = "caches")
+  @CacheSpec(population = Population.FULL, expiryTime = Expire.ONE_MINUTE,
+      mustExpireWithAnyOf = { AFTER_ACCESS, AFTER_WRITE, VARIABLE },
+      expiry = { CacheExpiry.DISABLED, CacheExpiry.CREATE, CacheExpiry.WRITE, CacheExpiry.ACCESS },
+      expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE},
+      expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE})
+  public void toString(Map<Int, Int> map, CacheContext context) {
+    context.ticker().advance(30, TimeUnit.SECONDS);
+    map.putAll(context.absent());
+
+    context.ticker().advance(45, TimeUnit.SECONDS);
+    assertThat(parseToString(map)).containsExactlyEntriesIn(parseToString(context.absent()));
+
+    context.cleanUp();
+    assertThat(parseToString(map)).containsExactlyEntriesIn(parseToString(context.absent()));
+  }
+
+  private static Map<String, String> parseToString(Map<Int, Int> map) {
+    return Splitter.on(',').trimResults().omitEmptyStrings().withKeyValueSeparator("=")
+        .split(map.toString().replaceAll("\\{|\\}", ""));
+  }
+
   /* --------------- Weights --------------- */
 
   @Test(dataProvider = "caches")
@@ -1329,7 +1424,7 @@ public void merge_weighted(Cache<Int, List<Int>> cache, CacheContext context) {
       expiry = { CacheExpiry.DISABLED, CacheExpiry.CREATE, CacheExpiry.WRITE, CacheExpiry.ACCESS },
       expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE},
       expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, expiryTime = Expire.ONE_MINUTE)
-  public void keySetToArray(Map<Int, Int> map, CacheContext context) {
+  public void keySet_toArray(Map<Int, Int> map, CacheContext context) {
     context.ticker().advance(2 * context.expiryTime().timeNanos(), TimeUnit.NANOSECONDS);
     assertThat(map.keySet().toArray(new Int[0])).isEmpty();
     assertThat(map.keySet().toArray(Int[]::new)).isEmpty();
@@ -1394,7 +1489,7 @@ public void keySet_inFlight(AsyncCache<Int, Int> cache, CacheContext context) {
       expiry = { CacheExpiry.DISABLED, CacheExpiry.CREATE, CacheExpiry.WRITE, CacheExpiry.ACCESS },
       expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE},
       expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, expiryTime = Expire.ONE_MINUTE)
-  public void valuesToArray(Map<Int, Int> map, CacheContext context) {
+  public void values_toArray(Map<Int, Int> map, CacheContext context) {
     context.ticker().advance(2 * context.expiryTime().timeNanos(), TimeUnit.NANOSECONDS);
     assertThat(map.values().toArray(new Int[0])).isEmpty();
     assertThat(map.values().toArray(Int[]::new)).isEmpty();
@@ -1459,7 +1554,7 @@ public void values_inFlight(AsyncCache<Int, Int> cache, CacheContext context) {
       expiry = { CacheExpiry.DISABLED, CacheExpiry.CREATE, CacheExpiry.WRITE, CacheExpiry.ACCESS },
       expireAfterAccess = {Expire.DISABLED, Expire.ONE_MINUTE},
       expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE}, expiryTime = Expire.ONE_MINUTE)
-  public void entrySetToArray(Map<Int, Int> map, CacheContext context) {
+  public void entrySet_toArray(Map<Int, Int> map, CacheContext context) {
     context.ticker().advance(2 * context.expiryTime().timeNanos(), TimeUnit.NANOSECONDS);
     assertThat(map.entrySet().toArray(new Map.Entry<?, ?>[0])).isEmpty();
     assertThat(map.entrySet().toArray(Map.Entry<?, ?>[]::new)).isEmpty();
@@ -1518,17 +1613,6 @@ public void entrySet_inFlight(AsyncCache<Int, Int> cache, CacheContext context)
     future.complete(null);
   }
 
-  /**
-   * Ensures that variable expiration is run, as it may not have due to expiring in coarse batches.
-   */
-  private static void runVariableExpiration(CacheContext context) {
-    if (context.expiresVariably()) {
-      // Variable expires in coarse buckets at a time
-      context.ticker().advance(2, TimeUnit.SECONDS);
-      context.cleanUp();
-    }
-  }
-
   /* --------------- Policy --------------- */
 
   @CheckNoStats
@@ -1543,4 +1627,15 @@ public void getIfPresentQuietly_expired(Cache<Int, Int> cache, CacheContext cont
     context.ticker().advance(10, TimeUnit.MINUTES);
     assertThat(cache.policy().getIfPresentQuietly(context.firstKey())).isNull();
   }
+
+  /**
+   * Ensures that variable expiration is run, as it may not have due to expiring in coarse batches.
+   */
+  private static void runVariableExpiration(CacheContext context) {
+    if (context.expiresVariably()) {
+      // Variable expires in coarse buckets at a time
+      context.ticker().advance(2, TimeUnit.SECONDS);
+      context.cleanUp();
+    }
+  }
 }
diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/ReferenceTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/ReferenceTest.java
index 46bfbe63a6..e85169910e 100644
--- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/ReferenceTest.java
+++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/ReferenceTest.java
@@ -56,6 +56,7 @@
 import com.github.benmanes.caffeine.cache.testing.CacheValidationListener;
 import com.github.benmanes.caffeine.cache.testing.CheckNoStats;
 import com.github.benmanes.caffeine.testing.Int;
+import com.google.common.base.Splitter;
 import com.google.common.collect.Maps;
 import com.google.common.testing.GcFinalization;
 
@@ -829,7 +830,7 @@ public void merge_weighted(Cache<Int, List<Int>> cache, CacheContext context) {
       expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED,
       maximumSize = Maximum.UNREACHABLE, weigher = CacheWeigher.DEFAULT,
       stats = Stats.ENABLED, removalListener = Listener.DEFAULT)
-  public void keySetToArray(Map<Int, Int> map, CacheContext context) {
+  public void keySet_toArray(Map<Int, Int> map, CacheContext context) {
     context.clear();
     GcFinalization.awaitFullGc();
     assertThat(map.keySet().toArray()).isEmpty();
@@ -850,7 +851,7 @@ public void keySet_contains(Map<Int, Int> map, CacheContext context) {
       expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED,
       maximumSize = Maximum.UNREACHABLE, weigher = CacheWeigher.DEFAULT,
       stats = Stats.ENABLED, removalListener = Listener.DEFAULT)
-  public void valuesToArray(Map<Int, Int> map, CacheContext context) {
+  public void values_toArray(Map<Int, Int> map, CacheContext context) {
     context.clear();
     GcFinalization.awaitFullGc();
     assertThat(map.values().toArray()).isEmpty();
@@ -872,7 +873,7 @@ public void values_contains(Map<Int, Int> map, CacheContext context) {
       expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED,
       maximumSize = Maximum.UNREACHABLE, weigher = CacheWeigher.DEFAULT,
       stats = Stats.ENABLED, removalListener = Listener.DEFAULT)
-  public void entrySetToArray(Map<Int, Int> map, CacheContext context) {
+  public void entrySet_toArray(Map<Int, Int> map, CacheContext context) {
     context.clear();
     GcFinalization.awaitFullGc();
     assertThat(map.entrySet().toArray()).isEmpty();
@@ -901,4 +902,69 @@ public void entrySet_contains_nullValue(Map<Int, Int> map, CacheContext context)
     GcFinalization.awaitFullGc();
     assertThat(map.entrySet().contains(entry)).isFalse();
   }
+
+  @Test(dataProvider = "caches")
+  @CacheSpec(population = Population.FULL, requiresWeakOrSoft = true)
+  public void entrySet_equals(Map<Int, Int> map, CacheContext context) {
+    var expected = context.absent();
+    map.putAll(expected);
+    context.clear();
+
+    GcFinalization.awaitFullGc();
+    assertThat(map.entrySet().equals(expected.entrySet())).isFalse();
+    assertThat(expected.entrySet().equals(map.entrySet())).isFalse();
+
+    assertThat(context.cache()).whenCleanedUp().hasSize(expected.size());
+    assertThat(map.entrySet().equals(expected.entrySet())).isTrue();
+    assertThat(expected.entrySet().equals(map.entrySet())).isTrue();
+  }
+
+  @Test(dataProvider = "caches")
+  @CacheSpec(population = Population.FULL, requiresWeakOrSoft = true)
+  public void equals(Map<Int, Int> map, CacheContext context) {
+    var expected = context.absent();
+    map.putAll(expected);
+    context.clear();
+
+    GcFinalization.awaitFullGc();
+    assertThat(map.equals(expected)).isFalse();
+    assertThat(expected.equals(map)).isFalse();
+
+    assertThat(context.cache()).whenCleanedUp().hasSize(expected.size());
+    assertThat(map.equals(expected)).isTrue();
+    assertThat(expected.equals(map)).isTrue();
+  }
+
+  @Test(dataProvider = "caches")
+  @CacheSpec(population = Population.FULL, requiresWeakOrSoft = true)
+  public void hashCode(Map<Int, Int> map, CacheContext context) {
+    var expected = context.absent();
+    map.putAll(expected);
+    context.clear();
+
+    GcFinalization.awaitFullGc();
+    assertThat(map.hashCode()).isEqualTo(expected.hashCode());
+
+    assertThat(context.cache()).whenCleanedUp().hasSize(expected.size());
+    assertThat(map.hashCode()).isEqualTo(expected.hashCode());
+  }
+
+  @Test(dataProvider = "caches")
+  @CacheSpec(population = Population.FULL, requiresWeakOrSoft = true)
+  public void toString(Map<Int, Int> map, CacheContext context) {
+    var expected = context.absent();
+    map.putAll(expected);
+    context.clear();
+
+    GcFinalization.awaitFullGc();
+    assertThat(parseToString(map)).containsExactlyEntriesIn(parseToString(expected));
+
+    assertThat(context.cache()).whenCleanedUp().hasSize(expected.size());
+    assertThat(parseToString(map)).containsExactlyEntriesIn(parseToString(expected));
+  }
+
+  private static Map<String, String> parseToString(Map<Int, Int> map) {
+    return Splitter.on(',').trimResults().omitEmptyStrings().withKeyValueSeparator("=")
+        .split(map.toString().replaceAll("\\{|\\}", ""));
+  }
 }
diff --git a/config/spotbugs/exclude.xml b/config/spotbugs/exclude.xml
index 245a310627..16f75871ca 100644
--- a/config/spotbugs/exclude.xml
+++ b/config/spotbugs/exclude.xml
@@ -117,6 +117,10 @@
     <Method name="next"/>
     <Bug pattern="UWF_FIELD_NOT_INITIALIZED_IN_CONSTRUCTOR"/>
   </Match>
+  <Match>
+    <Class name="com.github.benmanes.caffeine.cache.LocalAsyncCache$AsMapView$EntryIterator"/>
+    <Bug pattern="UWF_FIELD_NOT_INITIALIZED_IN_CONSTRUCTOR"/>
+  </Match>
   <Match>
     <Class name="com.github.benmanes.caffeine.cache.LocalAsyncCache$AsyncBulkCompleter$NullMapCompletionException"/>
     <Method name="&lt;init&gt;"/>