Skip to content

Commit

Permalink
Re-read the current time after a computation (fixes #191)
Browse files Browse the repository at this point in the history
Previously, the current time was read once at the beginning of the
computation. This was used to determine if the entry had expired and,
when computed, the associated timestamp. For long computations and short
expiration times, this could result in the newly computed entry being
expired. A chain of computations would build up, each with timestamps
older than the last due to waiting for its predecessor.

Now the ticker is read again after the computation completes and the
timestamps are set. This means there are two reads on a computation,
but only 1 in the happy path of a cache hit. Note that obtaining the
time can be an expensive operation, so minimizing this is useful for
high loads.

Updated dependencies and fixed issues discovered by Spotbugs and
ErrorProne.
  • Loading branch information
ben-manes committed Oct 30, 2017
1 parent 78be027 commit 66764ad
Show file tree
Hide file tree
Showing 12 changed files with 186 additions and 61 deletions.
5 changes: 5 additions & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,11 @@ subprojects {
dependencies {
errorprone libraries.errorProneCore
}
configurations.all {
resolutionStrategy.dependencySubstitution {
substitute module('com.github.stephenc.jcip:jcip-annotations') with module('net.jcip:jcip-annotations:1.0')
}
}
}

sourceCompatibility = JavaVersion.VERSION_1_8
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ public ComputeBenchmark() {
}

@Setup
@SuppressWarnings("ReturnValueIgnored")
public void setup() {
if (computeType.equals("ConcurrentHashMap")) {
setupConcurrentHashMap();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1636,8 +1636,6 @@ V put(K key, V value, boolean notifyWriter, boolean onlyIfAbsent) {
node = nodeFactory.newNode(key, keyReferenceQueue(),
value, valueReferenceQueue(), newWeight, now);
setVariableTime(node, expireAfterCreate(key, value, now));
setAccessTime(node, now);
setWriteTime(node, now);
}
if (notifyWriter && hasWriter()) {
Node<K, V> computed = node;
Expand Down Expand Up @@ -1978,7 +1976,7 @@ public void replaceAll(BiFunction<? super K, ? super V, ? extends V> function) {
return newValue;
};
for (K key : keySet()) {
long now = expirationTicker().read();
long[] now = { expirationTicker().read() };
Object lookupKey = nodeFactory.newLookupKey(key);
remap(key, lookupKey, remappingFunction, now, /* computeIfAbsent */ false);
}
Expand Down Expand Up @@ -2009,12 +2007,12 @@ public V computeIfAbsent(K key, Function<? super K, ? extends V> mappingFunction
mappingFunction = statsAware(mappingFunction, recordLoad);
}
Object keyRef = nodeFactory.newReferenceKey(key, keyReferenceQueue());
return doComputeIfAbsent(key, keyRef, mappingFunction, now);
return doComputeIfAbsent(key, keyRef, mappingFunction, new long[] { now });
}

/** Returns the current value from a computeIfAbsent invocation. */
V doComputeIfAbsent(K key, Object keyRef,
Function<? super K, ? extends V> mappingFunction, long now) {
Function<? super K, ? extends V> mappingFunction, long[/* 1 */] now) {
@SuppressWarnings("unchecked")
V[] oldValue = (V[]) new Object[1];
@SuppressWarnings("unchecked")
Expand All @@ -2032,12 +2030,11 @@ V doComputeIfAbsent(K key, Object keyRef,
if (newValue[0] == null) {
return null;
}
now[0] = expirationTicker().read();
weight[1] = weigher.weigh(key, newValue[0]);
n = nodeFactory.newNode(key, keyReferenceQueue(),
newValue[0], valueReferenceQueue(), weight[1], now);
setVariableTime(n, expireAfterCreate(key, newValue[0], now));
setAccessTime(n, now);
setWriteTime(n, now);
newValue[0], valueReferenceQueue(), weight[1], now[0]);
setVariableTime(n, expireAfterCreate(key, newValue[0], now[0]));
return n;
}

Expand All @@ -2047,7 +2044,7 @@ V doComputeIfAbsent(K key, Object keyRef,
oldValue[0] = n.getValue();
if ((nodeKey[0] == null) || (oldValue[0] == null)) {
cause[0] = RemovalCause.COLLECTED;
} else if (hasExpired(n, now)) {
} else if (hasExpired(n, now[0])) {
cause[0] = RemovalCause.EXPIRED;
} else {
return n;
Expand All @@ -2064,9 +2061,10 @@ V doComputeIfAbsent(K key, Object keyRef,
n.setValue(newValue[0], valueReferenceQueue());
n.setWeight(weight[1]);

setVariableTime(n, expireAfterCreate(key, newValue[0], now));
setAccessTime(n, now);
setWriteTime(n, now);
now[0] = expirationTicker().read();
setVariableTime(n, expireAfterCreate(key, newValue[0], now[0]));
setAccessTime(n, now[0]);
setWriteTime(n, now[0]);
return n;
}
});
Expand All @@ -2085,11 +2083,11 @@ V doComputeIfAbsent(K key, Object keyRef,
}
if (newValue[0] == null) {
if (!isComputingAsync(node)) {
setVariableTime(node, expireAfterRead(node, key, oldValue[0], now));
setAccessTime(node, now);
setVariableTime(node, expireAfterRead(node, key, oldValue[0], now[0]));
setAccessTime(node, now[0]);
}

afterRead(node, now, /* recordHit */ true);
afterRead(node, now[0], /* recordHit */ true);
return oldValue[0];
}
if ((oldValue[0] == null) && (cause[0] == null)) {
Expand Down Expand Up @@ -2121,7 +2119,8 @@ public V computeIfPresent(K key,

BiFunction<? super K, ? super V, ? extends V> statsAwareRemappingFunction =
statsAware(remappingFunction, /* recordMiss */ false, /* recordLoad */ true);
return remap(key, lookupKey, statsAwareRemappingFunction, now, /* computeIfAbsent */ false);
return remap(key, lookupKey, statsAwareRemappingFunction,
new long[] { now }, /* computeIfAbsent */ false);
}

@Override
Expand All @@ -2130,7 +2129,7 @@ public V compute(K key, BiFunction<? super K, ? super V, ? extends V> remappingF
requireNonNull(key);
requireNonNull(remappingFunction);

long now = expirationTicker().read();
long[] now = { expirationTicker().read() };
Object keyRef = nodeFactory.newReferenceKey(key, keyReferenceQueue());
BiFunction<? super K, ? super V, ? extends V> statsAwareRemappingFunction =
statsAware(remappingFunction, recordMiss, recordLoad);
Expand All @@ -2143,7 +2142,7 @@ public V merge(K key, V value, BiFunction<? super V, ? super V, ? extends V> rem
requireNonNull(value);
requireNonNull(remappingFunction);

long now = expirationTicker().read();
long[] now = { expirationTicker().read() };
Object keyRef = nodeFactory.newReferenceKey(key, keyReferenceQueue());
BiFunction<? super K, ? super V, ? extends V> mergeFunction = (k, oldValue) ->
(oldValue == null) ? value : statsAware(remappingFunction).apply(oldValue, value);
Expand All @@ -2167,7 +2166,7 @@ public V merge(K key, V value, BiFunction<? super V, ? super V, ? extends V> rem
*/
@SuppressWarnings("PMD.EmptyIfStmt")
V remap(K key, Object keyRef, BiFunction<? super K, ? super V, ? extends V> remappingFunction,
long now, boolean computeIfAbsent) {
long[/* 1 */] now, boolean computeIfAbsent) {
@SuppressWarnings("unchecked")
K[] nodeKey = (K[]) new Object[1];
@SuppressWarnings("unchecked")
Expand All @@ -2189,12 +2188,11 @@ V remap(K key, Object keyRef, BiFunction<? super K, ? super V, ? extends V> rema
if (newValue[0] == null) {
return null;
}
now[0] = expirationTicker().read();
weight[1] = weigher.weigh(key, newValue[0]);
n = nodeFactory.newNode(keyRef, newValue[0],
valueReferenceQueue(), weight[1], now);
setVariableTime(n, expireAfterCreate(key, newValue[0], now));
setAccessTime(n, now);
setWriteTime(n, now);
valueReferenceQueue(), weight[1], now[0]);
setVariableTime(n, expireAfterCreate(key, newValue[0], now[0]));
return n;
}

Expand All @@ -2203,7 +2201,7 @@ V remap(K key, Object keyRef, BiFunction<? super K, ? super V, ? extends V> rema
oldValue[0] = n.getValue();
if ((nodeKey[0] == null) || (oldValue[0] == null)) {
cause[0] = RemovalCause.COLLECTED;
} else if (hasExpired(n, now)) {
} else if (hasExpired(n, now[0])) {
cause[0] = RemovalCause.EXPIRED;
}
if (cause[0] != null) {
Expand All @@ -2228,18 +2226,19 @@ V remap(K key, Object keyRef, BiFunction<? super K, ? super V, ? extends V> rema

weight[0] = n.getWeight();
weight[1] = weigher.weigh(key, newValue[0]);
now[0] = expirationTicker().read();
if (cause[0] == null) {
if (newValue[0] != oldValue[0]) {
cause[0] = RemovalCause.REPLACED;
}
setVariableTime(n, expireAfterUpdate(n, key, newValue[0], now));
setVariableTime(n, expireAfterUpdate(n, key, newValue[0], now[0]));
} else {
setVariableTime(n, expireAfterCreate(key, newValue[0], now));
setVariableTime(n, expireAfterCreate(key, newValue[0], now[0]));
}
n.setValue(newValue[0], valueReferenceQueue());
n.setWeight(weight[1]);
setAccessTime(n, now);
setWriteTime(n, now);
setAccessTime(n, now[0]);
setWriteTime(n, now[0]);
return n;
}
});
Expand All @@ -2266,13 +2265,13 @@ V remap(K key, Object keyRef, BiFunction<? super K, ? super V, ? extends V> rema
} else {
if (cause[0] == null) {
if (!isComputingAsync(node)) {
setVariableTime(node, expireAfterRead(node, key, newValue[0], now));
setAccessTime(node, now);
setVariableTime(node, expireAfterRead(node, key, newValue[0], now[0]));
setAccessTime(node, now[0]);
}
} else if (cause[0] == RemovalCause.COLLECTED) {
scheduleDrainBuffers();
}
afterRead(node, now, /* recordHit */ false);
afterRead(node, now[0], /* recordHit */ false);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ public BaseMpscLinkedArrayQueue(final int initialCapacity) {

int p2capacity = ceilingPowerOfTwo(initialCapacity);
// leave lower bit of mask clear
long mask = (p2capacity - 1) << 1;
long mask = (p2capacity - 1L) << 1;
// need extra element to point at next array
E[] buffer = allocate(p2capacity + 1);
producerBuffer = buffer;
Expand Down Expand Up @@ -397,7 +397,7 @@ private E newBufferPeek(E[] nextBuffer, final long index) {

private long newBufferAndOffset(E[] nextBuffer, final long index) {
consumerBuffer = nextBuffer;
consumerMask = (nextBuffer.length - 2) << 1;
consumerMask = (nextBuffer.length - 2L) << 1;
final long offsetInNew = modifiedCalcElementOffset(index, consumerMask);
return offsetInNew;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,23 @@ public void getIfPresent_writerFails(Cache<Integer, Integer> cache, CacheContext
}
}

@Test(dataProvider = "caches")
@CacheSpec(population = Population.EMPTY, expiryTime = Expire.ONE_MINUTE,
mustExpiresWithAnyOf = { AFTER_WRITE, VARIABLE },
expiry = { CacheExpiry.DISABLED, CacheExpiry.WRITE },
expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE})
public void get_writeTime(Cache<Integer, Integer> cache, CacheContext context) {
Integer key = context.absentKey();
Integer value = context.absentValue();

cache.get(key, k -> {
context.ticker().advance(5, TimeUnit.MINUTES);
return value;
});
assertThat(cache.estimatedSize(), is(1L));
assertThat(cache.getIfPresent(key), is(value));
}

@Test(dataProvider = "caches", expectedExceptions = DeleteException.class)
@CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG,
population = Population.FULL, expiryTime = Expire.ONE_MINUTE,
Expand Down Expand Up @@ -503,6 +520,24 @@ public void get(AsyncLoadingCache<Integer, Integer> cache, CacheContext context)
assertThat(cache, hasRemovalNotifications(context, count, RemovalCause.EXPIRED));
}

@Test(dataProvider = "caches")
@CacheSpec(population = Population.EMPTY, expiryTime = Expire.ONE_MINUTE,
mustExpiresWithAnyOf = { AFTER_WRITE, VARIABLE },
expiry = { CacheExpiry.DISABLED, CacheExpiry.WRITE },
expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE})
@SuppressWarnings("FutureReturnValueIgnored")
public void get_writeTime(AsyncLoadingCache<Integer, Integer> cache, CacheContext context) {
Integer key = context.absentKey();
Integer value = context.absentValue();

cache.get(key, k -> {
context.ticker().advance(5, TimeUnit.MINUTES);
return value;
});
assertThat(cache.synchronous().estimatedSize(), is(1L));
assertThat(cache.getIfPresent(key), futureOf(value));
}

@Test(dataProvider = "caches")
@CacheSpec(population = Population.EMPTY, removalListener = Listener.CONSUMING,
mustExpiresWithAnyOf = { AFTER_ACCESS, AFTER_WRITE, VARIABLE },
Expand Down Expand Up @@ -941,6 +976,23 @@ public void computeIfAbsent(Map<Integer, Integer> map, CacheContext context) {
verifyWriter(context, (verifier, writer) -> verifier.deletions(count, RemovalCause.EXPIRED));
}

@Test(dataProvider = "caches")
@CacheSpec(population = Population.EMPTY, expiryTime = Expire.ONE_MINUTE,
mustExpiresWithAnyOf = { AFTER_WRITE, VARIABLE },
expiry = { CacheExpiry.DISABLED, CacheExpiry.WRITE },
expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE})
public void computeIfAbsent_writeTime(Map<Integer, Integer> map, CacheContext context) {
Integer key = context.absentKey();
Integer value = context.absentValue();

map.computeIfAbsent(key, k -> {
context.ticker().advance(5, TimeUnit.MINUTES);
return value;
});
assertThat(map.size(), is(1));
assertThat(map.containsKey(key), is(true));
}

@Test(dataProvider = "caches", expectedExceptions = DeleteException.class)
@CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG,
population = Population.FULL, expiryTime = Expire.ONE_MINUTE,
Expand Down Expand Up @@ -983,6 +1035,24 @@ public void computeIfPresent(Map<Integer, Integer> map, CacheContext context) {
verifyWriter(context, (verifier, writer) -> verifier.deletions(count, RemovalCause.EXPIRED));
}

@Test(dataProvider = "caches")
@CacheSpec(population = Population.FULL, expiryTime = Expire.ONE_MINUTE,
mustExpiresWithAnyOf = { AFTER_WRITE, VARIABLE },
expiry = { CacheExpiry.DISABLED, CacheExpiry.WRITE },
expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE})
public void computeIfPresent_writeTime(Map<Integer, Integer> map, CacheContext context) {
Integer key = context.firstKey();
Integer value = context.absentValue();

map.computeIfPresent(key, (k, v) -> {
context.ticker().advance(5, TimeUnit.MINUTES);
return value;
});
context.cleanUp();
assertThat(map.size(), is(1));
assertThat(map.containsKey(key), is(true));
}

@Test(dataProvider = "caches", expectedExceptions = DeleteException.class)
@CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG,
population = Population.FULL, expiryTime = Expire.ONE_MINUTE,
Expand Down Expand Up @@ -1024,6 +1094,24 @@ public void compute(Map<Integer, Integer> map, CacheContext context) {
verifyWriter(context, (verifier, writer) -> verifier.deletions(count, RemovalCause.EXPIRED));
}

@Test(dataProvider = "caches")
@CacheSpec(population = Population.FULL, expiryTime = Expire.ONE_MINUTE,
mustExpiresWithAnyOf = { AFTER_WRITE, VARIABLE },
expiry = { CacheExpiry.DISABLED, CacheExpiry.WRITE },
expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE})
public void compute_writeTime(Map<Integer, Integer> map, CacheContext context) {
Integer key = context.firstKey();
Integer value = context.absentValue();

map.compute(key, (k, v) -> {
context.ticker().advance(5, TimeUnit.MINUTES);
return value;
});
context.cleanUp();
assertThat(map.size(), is(1));
assertThat(map.containsKey(key), is(true));
}

@Test(dataProvider = "caches", expectedExceptions = DeleteException.class)
@CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG,
population = Population.FULL, expiryTime = Expire.ONE_MINUTE,
Expand Down Expand Up @@ -1063,6 +1151,24 @@ public void merge(Map<Integer, Integer> map, CacheContext context) {
verifyWriter(context, (verifier, writer) -> verifier.deletions(count, RemovalCause.EXPIRED));
}

@Test(dataProvider = "caches")
@CacheSpec(population = Population.FULL, expiryTime = Expire.ONE_MINUTE,
mustExpiresWithAnyOf = { AFTER_WRITE, VARIABLE },
expiry = { CacheExpiry.DISABLED, CacheExpiry.WRITE },
expireAfterWrite = {Expire.DISABLED, Expire.ONE_MINUTE})
public void merge_writeTime(Map<Integer, Integer> map, CacheContext context) {
Integer key = context.firstKey();
Integer value = context.absentValue();

map.merge(key, value, (oldValue, v) -> {
context.ticker().advance(5, TimeUnit.MINUTES);
return value;
});
context.cleanUp();
assertThat(map.size(), is(1));
assertThat(map.containsKey(key), is(true));
}

@Test(dataProvider = "caches", expectedExceptions = DeleteException.class)
@CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG,
population = Population.FULL, expiryTime = Expire.ONE_MINUTE,
Expand Down
10 changes: 10 additions & 0 deletions config/findbugs/exclude.xml → config/spotbugs/exclude.xml
Original file line number Diff line number Diff line change
Expand Up @@ -89,4 +89,14 @@
</Or>
<Bug code="ST"/>
</Match>
<Match>
<Class name="com.github.benmanes.caffeine.cache.simulator.policy.opt.ClairvoyantPolicy"/>
<Method name="finished"/>
<Bug pattern="RV_RETURN_VALUE_IGNORED"/>
</Match>
<Match>
<Class name="com.github.benmanes.caffeine.cache.simulator.policy.PolicyActor"/>
<Method name="process"/>
<Bug pattern="RV_RETURN_VALUE_IGNORED"/>
</Match>
</FindBugsFilter>
Loading

0 comments on commit 66764ad

Please sign in to comment.