Skip to content

Commit d74ae2d

Browse files
author
Peter Alfonsi
committed
make policy rejections count as neither hit or miss
Signed-off-by: Peter Alfonsi <[email protected]>
1 parent 8ad8b7a commit d74ae2d

File tree

2 files changed

+26
-22
lines changed

2 files changed

+26
-22
lines changed

modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCache.java

Lines changed: 24 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -307,18 +307,23 @@ public V computeIfAbsent(ICacheKey<K> key, LoadAwareCacheLoader<ICacheKey<K>, V>
307307
// Add the value to the onHeap cache. We are calling computeIfAbsent which does another get inside.
308308
// This is needed as there can be many requests for the same key at the same time and we only want to load
309309
// the value once.
310-
Tuple<V, Boolean> computedValueTuple = compute(key, loader, future);
311-
// Handle stats
312-
if (computedValueTuple.v2()) {
313-
// The value was just computed and added to the cache by this thread, or it was rejected by the policy.
314-
// Register a miss for the heap cache, and the disk cache if present
315-
statsHolder.incrementMisses(heapDimensionValues);
316-
if (caches.get(diskCache).isEnabled()) {
317-
statsHolder.incrementMisses(diskDimensionValues);
310+
Tuple<V, Tuple<Boolean, Boolean>> computedValueTuple = compute(key, loader, future);
311+
boolean wasCacheMiss = computedValueTuple.v2().v1();
312+
boolean wasRejectedByPolicy = computedValueTuple.v2().v2();
313+
// If the value was rejected by policy, it counts as neither a hit or miss.
314+
if (!wasRejectedByPolicy) {
315+
// Handle stats
316+
if (wasCacheMiss) {
317+
// The value was just computed and added to the cache by this thread.
318+
// Register a miss for the heap cache, and the disk cache if present
319+
statsHolder.incrementMisses(heapDimensionValues);
320+
if (caches.get(diskCache).isEnabled()) {
321+
statsHolder.incrementMisses(diskDimensionValues);
322+
}
323+
} else {
324+
// Another thread requesting this key already loaded the value. Register a hit for the heap cache
325+
statsHolder.incrementHits(heapDimensionValues);
318326
}
319-
} else {
320-
// Another thread requesting this key already loaded the value. Register a hit for the heap cache
321-
statsHolder.incrementHits(heapDimensionValues);
322327
}
323328
return computedValueTuple.v1();
324329
} else {
@@ -335,14 +340,15 @@ public V computeIfAbsent(ICacheKey<K> key, LoadAwareCacheLoader<ICacheKey<K>, V>
335340
return cacheValueTuple.v1();
336341
}
337342

338-
private Tuple<V, Boolean> compute(
343+
private Tuple<V, Tuple<Boolean, Boolean>> compute(
339344
ICacheKey<K> key,
340345
LoadAwareCacheLoader<ICacheKey<K>, V> loader,
341346
CompletableFuture<Tuple<ICacheKey<K>, V>> future
342347
) throws Exception {
343348
// Handler to handle results post-processing. Takes a tuple<key, value> or exception as an input and returns
344349
// the value. Also before returning value, puts the value in cache.
345350
boolean wasCacheMiss = false;
351+
boolean wasRejectedByPolicy = false;
346352
BiFunction<Tuple<ICacheKey<K>, V>, Throwable, Void> handler = (pair, ex) -> {
347353
if (pair != null) {
348354
boolean didAddToCache = false;
@@ -381,33 +387,34 @@ private Tuple<V, Boolean> compute(
381387
future.completeExceptionally(npe);
382388
throw new ExecutionException(npe);
383389
} else {
384-
wasCacheMiss = true;
385390
if (evaluatePoliciesList(value, policies)) {
386391
future.complete(new Tuple<>(key, value));
392+
wasCacheMiss = true;
387393
} else {
388394
future.complete(null); // Passing null would skip the logic to put this into onHeap cache.
389395
// Signal to the caller that the key didn't enter the cache by sending a removal notification.
390-
// This case also counts as a cache miss.
396+
// This case does not count as a cache miss.
391397
removalListener.onRemoval(new RemovalNotification<>(key, value, RemovalReason.EXPLICIT));
398+
wasRejectedByPolicy = true;
392399
}
393400
}
394401
} else {
395402
try {
396403
Tuple<ICacheKey<K>, V> futureTuple = future.get();
397404
if (futureTuple == null) {
398405
// This case can happen if we earlier completed the future with null to skip putting the value into the cache.
399-
// It should behave the same as a cache miss.
400-
wasCacheMiss = true;
406+
// It does not count as a cache miss.
401407
value = loader.load(key);
402408
removalListener.onRemoval(new RemovalNotification<>(key, value, RemovalReason.EXPLICIT));
409+
wasRejectedByPolicy = true;
403410
} else {
404411
value = futureTuple.v2();
405412
}
406413
} catch (InterruptedException ex) {
407414
throw new IllegalStateException(ex);
408415
}
409416
}
410-
return new Tuple<>(value, wasCacheMiss);
417+
return new Tuple<>(value, new Tuple<>(wasCacheMiss, wasRejectedByPolicy));
411418
}
412419

413420
@Override

modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1727,11 +1727,8 @@ public void testEntryPoliciesConcurrentlyWithComputeIfAbsent() throws Exception
17271727
assertEquals(expectedKeys, getItemsForTier(tieredSpilloverCache, TIER_DIMENSION_VALUE_ON_HEAP));
17281728
// We should have (numRepetitionsPerKey - 1) * (expectedKeys) hits
17291729
assertEquals((numRepetitionsPerKey - 1) * expectedKeys, getHitsForTier(tieredSpilloverCache, TIER_DIMENSION_VALUE_ON_HEAP));
1730-
// We should have numRepetitionsPerKey misses for each rejected key, and 1 miss for each accepted key
1731-
assertEquals(
1732-
numRepetitionsPerKey * (keyValuePairs.size() - expectedKeys) + expectedKeys,
1733-
getMissesForTier(tieredSpilloverCache, TIER_DIMENSION_VALUE_ON_HEAP)
1734-
);
1730+
// We should have 1 miss for each accepted key. Rejected keys should not cause misses.
1731+
assertEquals(expectedKeys, getMissesForTier(tieredSpilloverCache, TIER_DIMENSION_VALUE_ON_HEAP));
17351732

17361733
for (String key : keyValuePairs.keySet()) {
17371734
ICacheKey<String> iCacheKey = getICacheKey(key);

0 commit comments

Comments
 (0)