From 0620efd8bebda44f54b84a3020cf6a9fcca30930 Mon Sep 17 00:00:00 2001 From: Ben Manes Date: Sun, 27 Feb 2022 12:41:39 -0800 Subject: [PATCH] Warn if writes are stalled due to blocked eviction (fixes #672) Eviction occcurs under an exclusive lock which is typically held for very short periods to update the policy and possibly remove a victim entry. Writes are applied to the hash table and the policy updated are buffered, allowing the writer threads to schedule their work, tryLock to update the policy, and move on if busy. If the writer buffer then becomes full then to avoid a memory leak the writers must block to assist, creating back pressure if the write rate exceeds the eviction rate. The eviction's removal of the victim from the hash table may cause unexpected blocking. A map computation performs its work under the same lock guarding the entry, which allows for atomicity of that operation and requiring that other writes to the entry wait until it completes. Typically this is quick, as caches are ready-heavy and the victim entry is unlikely to be computed on. However, since the locking in ConcurrentHashMap is based on the hash bin, not the entry, a hash collision can cause writes to different keys to delay each other. A slow, long-running computation then blocks eviction even though the entries differ. When this happens then the writer buffer fills up and other writes are blocked, causing no more write activity until the eviction is allowed to proceed again. That scenario goes against the advice of this library and the JavaDoc in ConcurrentHashMap, which encourages short and fast computations. That is milliseconds to seconds in practice, not minutes or hours. Instead we offer AsyncCache to decouple the computation from the map, which sacrafices linearizability for more efficient processing. Of course few users will be aware of these implementation details to make that decision early enough. Sadly some will find out only when they observe production problems. To assist we now include a log warning in hopes that it helps highlight it earlier, helps debugging, and hints towards the required fix. --- .../caffeine/cache/BoundedLocalCache.java | 42 ++++++++++++--- .../caffeine/cache/BoundedLocalCacheTest.java | 52 +++++++++++++++++++ .../caffeine/cache/LoadingCacheTest.java | 6 +-- .../caffeine/cache/RefreshAfterWriteTest.java | 6 +-- .../caffeine/cache/testing/CacheContext.java | 19 ++++--- .../cache/testing/CacheGenerator.java | 27 +++++----- .../caffeine/cache/testing/CacheSpec.java | 7 +++ .../testing/CaffeineCacheFromContext.java | 7 +-- .../cache/testing/GuavaCacheFromContext.java | 3 +- gradle/dependencies.gradle | 8 +-- gradle/jmh.gradle | 3 ++ 11 files changed, 136 insertions(+), 44 deletions(-) diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java index 601f63f20a..7739252eab 100644 --- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java +++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java @@ -218,6 +218,8 @@ abstract class BoundedLocalCache extends BLCHeader.DrainStatusRef static final long EXPIRE_WRITE_TOLERANCE = TimeUnit.SECONDS.toNanos(1); /** The maximum duration before an entry expires. */ static final long MAXIMUM_EXPIRY = (Long.MAX_VALUE >> 1); // 150 years + /** The duration to wait on the eviction lock before warning that of a possible misuse. */ + static final long WARN_AFTER_LOCK_WAIT_NANOS = TimeUnit.SECONDS.toNanos(30); /** The handle for the in-flight refresh operations. */ static final VarHandle REFRESHES; @@ -1469,15 +1471,43 @@ void afterWrite(Runnable task) { scheduleDrainBuffers(); } - // The maintenance task may be scheduled but not running. This might occur due to all of the - // executor's threads being busy (perhaps writing into this cache), the write rate greatly - // exceeds the consuming rate, priority inversion, or if the executor silently discarded the - // maintenance task. In these scenarios then the writing threads cannot make progress and - // instead writers provide assistance by performing this work directly. + // In scenarios where the writing threads cannot make progress then they attempt to provide + // assistance by performing the eviction work directly. This can resolve cases where the + // maintenance task is scheduled but not running. That might occur due to all of the executor's + // threads being busy (perhaps writing into this cache), the write rate greatly exceeds the + // consuming rate, priority inversion, or if the executor silently discarded the maintenance + // task. Unfortunately this cannot resolve when the eviction is blocked waiting on a long + // running computation due to an eviction listener, the victim being computed on by other write, + // or the victim residing in the same hash bin as a computing entry. In those cases a warning is + // logged to encourage the application to decouple these computations from the map operations. + lock(); try { - performCleanUp(task); + maintenance(task); } catch (RuntimeException e) { logger.log(Level.ERROR, "Exception thrown when performing the maintenance task", e); + } finally { + evictionLock.unlock(); + } + } + + /** Acquires the eviction lock. */ + void lock() { + long remainingNanos = WARN_AFTER_LOCK_WAIT_NANOS; + long end = System.nanoTime() + remainingNanos; + for (;;) { + try { + if (evictionLock.tryLock(remainingNanos, TimeUnit.NANOSECONDS)) { + return; + } + logger.log(Level.WARNING, "The cache is experiencing excessive wait times for acquiring " + + "the eviction lock. This may indicate that a long-running computation has halted " + + "eviction when trying to remove the victim entry. Consider using AsyncCache to " + + "decouple the computation from the map operation.", new TimeoutException()); + evictionLock.lock(); + return; + } catch (InterruptedException e) { + remainingNanos = end - System.nanoTime(); + } } } diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/BoundedLocalCacheTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/BoundedLocalCacheTest.java index e230c3a7d6..3cbe299ac4 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/BoundedLocalCacheTest.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/BoundedLocalCacheTest.java @@ -21,6 +21,7 @@ import static com.github.benmanes.caffeine.cache.BLCHeader.DrainStatusRef.REQUIRED; import static com.github.benmanes.caffeine.cache.BoundedLocalCache.EXPIRE_WRITE_TOLERANCE; import static com.github.benmanes.caffeine.cache.BoundedLocalCache.PERCENT_MAIN_PROTECTED; +import static com.github.benmanes.caffeine.cache.BoundedLocalCache.WARN_AFTER_LOCK_WAIT_NANOS; import static com.github.benmanes.caffeine.cache.BoundedLocalCache.WRITE_BUFFER_MAX; import static com.github.benmanes.caffeine.cache.RemovalCause.COLLECTED; import static com.github.benmanes.caffeine.cache.RemovalCause.EXPIRED; @@ -41,6 +42,7 @@ import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import static uk.org.lidalia.slf4jext.ConventionalLevelHierarchy.WARN_LEVELS; import java.lang.Thread.State; import java.lang.ref.Reference; @@ -74,6 +76,7 @@ import com.github.benmanes.caffeine.cache.testing.CacheSpec.Implementation; import com.github.benmanes.caffeine.cache.testing.CacheSpec.InitialCapacity; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Listener; +import com.github.benmanes.caffeine.cache.testing.CacheSpec.Loader; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Maximum; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Population; import com.github.benmanes.caffeine.cache.testing.CacheSpec.ReferenceType; @@ -81,8 +84,11 @@ import com.github.benmanes.caffeine.cache.testing.CacheValidationListener; import com.github.benmanes.caffeine.testing.ConcurrentTestHarness; import com.github.benmanes.caffeine.testing.Int; +import com.github.valfirst.slf4jtest.TestLogger; +import com.github.valfirst.slf4jtest.TestLoggerFactory; import com.google.common.collect.Iterables; import com.google.common.testing.GcFinalization; +import com.google.common.util.concurrent.Uninterruptibles; /** * The test cases for the implementation details of {@link BoundedLocalCache}. @@ -1056,6 +1062,52 @@ public void put_expireTolerance_expiry(BoundedLocalCache cache, CacheC assertThat(cache.writeBuffer.producerIndex).isEqualTo(8); } + @Test(dataProvider = "caches", groups = "slow") + @CacheSpec(implementation = Implementation.Caffeine, population = Population.EMPTY, + refreshAfterWrite = Expire.DISABLED, expireAfterAccess = Expire.DISABLED, + expireAfterWrite = Expire.DISABLED, expiry = CacheExpiry.DISABLED, + maximumSize = Maximum.UNREACHABLE, weigher = CacheWeigher.DEFAULT, + compute = Compute.SYNC, loader = Loader.DISABLED, stats = Stats.DISABLED, + removalListener = Listener.DEFAULT, evictionListener = Listener.DEFAULT, + keys = ReferenceType.STRONG, values = ReferenceType.STRONG) + public void put_warnIfEvictionBlocked(BoundedLocalCache cache, CacheContext context) { + var testLogger = new AtomicReference(); + var thread = new AtomicReference(); + var done = new AtomicBoolean(); + cache.evictionLock.lock(); + try { + ConcurrentTestHarness.execute(() -> { + var logger = TestLoggerFactory.getTestLogger(BoundedLocalCache.class); + logger.setEnabledLevels(WARN_LEVELS); + thread.set(Thread.currentThread()); + testLogger.set(logger); + + for (int i = 0; true; i++) { + if (done.get()) { + return; + } + cache.put(Int.valueOf(i), Int.valueOf(i)); + } + }); + + var halfWaitTime = Duration.ofNanos(WARN_AFTER_LOCK_WAIT_NANOS / 2); + await().until(cache.evictionLock::hasQueuedThreads); + thread.get().interrupt(); + + Uninterruptibles.sleepUninterruptibly(halfWaitTime); + assertThat(cache.evictionLock.hasQueuedThreads()).isTrue(); + assertThat(testLogger.get().getAllLoggingEvents()).isEmpty(); + + Uninterruptibles.sleepUninterruptibly(halfWaitTime); + await().until(() -> !testLogger.get().getAllLoggingEvents().isEmpty()); + + assertThat(cache.evictionLock.hasQueuedThreads()).isTrue(); + } finally { + done.set(true); + cache.evictionLock.unlock(); + } + } + @Test(dataProvider = "caches") @CacheSpec(compute = Compute.SYNC, population = Population.EMPTY, scheduler = CacheScheduler.MOCKITO, expiryTime = Expire.ONE_MINUTE, diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/LoadingCacheTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/LoadingCacheTest.java index 3356df17d1..84f4b0df0b 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/LoadingCacheTest.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/LoadingCacheTest.java @@ -710,7 +710,7 @@ public void refresh_cancel_noLog(CacheContext context) { LoadingCache cache = context.isAsync() ? context.buildAsync(cacheLoader).synchronous() : context.build(cacheLoader); - TestLoggerFactory.getAllTestLoggers().values().stream() + TestLoggerFactory.getAllTestLoggers().values() .forEach(logger -> logger.setEnabledLevels(INFO_LEVELS)); cache.refresh(context.absentKey()); @@ -734,7 +734,7 @@ public void refresh_timeout_noLog(CacheContext context) { LoadingCache cache = context.isAsync() ? context.buildAsync(cacheLoader).synchronous() : context.build(cacheLoader); - TestLoggerFactory.getAllTestLoggers().values().stream() + TestLoggerFactory.getAllTestLoggers().values() .forEach(logger -> logger.setEnabledLevels(INFO_LEVELS)); cache.refresh(context.absentKey()); @@ -749,7 +749,7 @@ public void refresh_error_log(CacheContext context) throws Exception { LoadingCache cache = context.isAsync() ? context.buildAsync(cacheLoader).synchronous() : context.build(cacheLoader); - TestLoggerFactory.getAllTestLoggers().values().stream() + TestLoggerFactory.getAllTestLoggers().values() .forEach(logger -> logger.setEnabledLevels(INFO_LEVELS)); cache.refresh(context.absentKey()); diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/RefreshAfterWriteTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/RefreshAfterWriteTest.java index 386a490909..1775a81733 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/RefreshAfterWriteTest.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/RefreshAfterWriteTest.java @@ -271,7 +271,7 @@ public void refreshIfNeeded_cancel_noLog(CacheContext context) { ? context.buildAsync(cacheLoader).synchronous() : context.build(cacheLoader); cache.put(context.absentKey(), context.absentValue()); - TestLoggerFactory.getAllTestLoggers().values().stream() + TestLoggerFactory.getAllTestLoggers().values() .forEach(logger -> logger.setEnabledLevels(INFO_LEVELS)); context.ticker().advance(2, TimeUnit.MINUTES); @@ -299,7 +299,7 @@ public void refreshIfNeeded_timeout_noLog(CacheContext context) { ? context.buildAsync(cacheLoader).synchronous() : context.build(cacheLoader); cache.put(context.absentKey(), context.absentValue()); - TestLoggerFactory.getAllTestLoggers().values().stream() + TestLoggerFactory.getAllTestLoggers().values() .forEach(logger -> logger.setEnabledLevels(INFO_LEVELS)); context.ticker().advance(2, TimeUnit.MINUTES); @@ -317,7 +317,7 @@ public void refreshIfNeeded_error_log(CacheContext context) { ? context.buildAsync(cacheLoader).synchronous() : context.build(cacheLoader); cache.put(context.absentKey(), context.absentValue()); - TestLoggerFactory.getAllTestLoggers().values().stream() + TestLoggerFactory.getAllTestLoggers().values() .forEach(logger -> logger.setEnabledLevels(INFO_LEVELS)); context.ticker().advance(2, TimeUnit.MINUTES); diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheContext.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheContext.java index f8009139d6..1bfb7025fb 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheContext.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheContext.java @@ -103,7 +103,7 @@ public final class CacheContext { final Loader loader; final Stats stats; - final boolean isAsyncLoading; + final boolean isAsyncLoader; CacheBuilder guava; Caffeine caffeine; @@ -125,9 +125,8 @@ public CacheContext(InitialCapacity initialCapacity, Stats stats, CacheWeigher w Maximum maximumSize, CacheExpiry expiryType, Expire afterAccess, Expire afterWrite, Expire refresh, ReferenceType keyStrength, ReferenceType valueStrength, CacheExecutor cacheExecutor, CacheScheduler cacheScheduler, Listener removalListenerType, - Listener evictionListenerType, Population population, boolean isLoading, - boolean isAsyncLoading, Compute compute, Loader loader, Implementation implementation, - CacheSpec cacheSpec) { + Listener evictionListenerType, Population population, boolean isAsyncLoader, Compute compute, + Loader loader, Implementation implementation, CacheSpec cacheSpec) { this.initialCapacity = requireNonNull(initialCapacity); this.stats = requireNonNull(stats); this.weigher = requireNonNull(weigher); @@ -146,8 +145,8 @@ public CacheContext(InitialCapacity initialCapacity, Stats stats, CacheWeigher w this.evictionListenerType = evictionListenerType; this.evictionListener = evictionListenerType.create(); this.population = requireNonNull(population); - this.loader = isLoading ? requireNonNull(loader) : null; - this.isAsyncLoading = isAsyncLoading; + this.loader = requireNonNull(loader); + this.isAsyncLoader = isAsyncLoader; this.ticker = new SerializableFakeTicker(); this.implementation = requireNonNull(implementation); this.original = new LinkedHashMap<>(); @@ -345,11 +344,11 @@ public boolean isSoftValues() { } public boolean isLoading() { - return (loader != null); + return (loader != Loader.DISABLED); } - public boolean isAsyncLoading() { - return isAsyncLoading; + public boolean isAsyncLoader() { + return isAsyncLoader; } public Loader loader() { @@ -494,7 +493,7 @@ public String toString() { .add("valueStrength", valueStrength) .add("compute", compute) .add("loader", loader) - .add("isAsyncLoading", isAsyncLoading) + .add("isAsyncLoader", isAsyncLoader) .add("cacheExecutor", cacheExecutor) .add("cacheScheduler", cacheScheduler) .add("removalListener", removalListenerType) diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheGenerator.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheGenerator.java index b494007404..9009ff3d7a 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheGenerator.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheGenerator.java @@ -15,8 +15,6 @@ */ package com.github.benmanes.caffeine.cache.testing; -import static com.google.common.collect.ImmutableSet.toImmutableSet; - import java.util.Arrays; import java.util.List; import java.util.Map; @@ -90,7 +88,8 @@ public static void initialize(CacheContext context) { /** Returns the Cartesian set of the possible cache configurations. */ private Set> combinations() { - var asyncLoading = ImmutableSet.of(true, false); + var asyncLoader = ImmutableSet.of(true, false); + var loaders = ImmutableSet.copyOf(cacheSpec.loader()); var keys = filterTypes(options.keys(), cacheSpec.keys()); var values = filterTypes(options.values(), cacheSpec.values()); var statistics = filterTypes(options.stats(), cacheSpec.stats()); @@ -101,15 +100,17 @@ private Set> combinations() { values = values.contains(ReferenceType.STRONG) ? ImmutableSet.of(ReferenceType.STRONG) : ImmutableSet.of(); - computations = Sets.filter(computations, Compute.ASYNC::equals); + computations = Sets.intersection(computations, Set.of(Compute.ASYNC)); } if (!isGuavaCompatible || isAsyncOnly || computations.equals(ImmutableSet.of(Compute.ASYNC))) { - implementations = implementations.stream() - .filter(implementation -> implementation != Implementation.Guava) - .collect(toImmutableSet()); + implementations = Sets.difference(implementations, Set.of(Implementation.Guava)); } if (computations.equals(ImmutableSet.of(Compute.SYNC))) { - asyncLoading = ImmutableSet.of(false); + asyncLoader = ImmutableSet.of(false); + } + + if (isLoadingOnly) { + loaders = Sets.difference(loaders, Set.of(Loader.DISABLED)).immutableCopy(); } if (computations.isEmpty() || implementations.isEmpty() @@ -132,17 +133,16 @@ private Set> combinations() { ImmutableSet.copyOf(cacheSpec.removalListener()), ImmutableSet.copyOf(cacheSpec.evictionListener()), ImmutableSet.copyOf(cacheSpec.population()), - ImmutableSet.of(true, isLoadingOnly), - ImmutableSet.copyOf(asyncLoading), + ImmutableSet.copyOf(asyncLoader), ImmutableSet.copyOf(computations), - ImmutableSet.copyOf(cacheSpec.loader()), + ImmutableSet.copyOf(loaders), ImmutableSet.copyOf(implementations)); } /** Returns the set of options filtered if a specific type is specified. */ private static Set filterTypes(Optional type, T[] options) { return type.isPresent() - ? type.filter(List.of(options)::contains).stream().collect(toImmutableSet()) + ? Sets.intersection(Set.of(options), Set.of(type.orElseThrow())) : ImmutableSet.copyOf(options); } @@ -166,7 +166,6 @@ private CacheContext newCacheContext(List combination) { (Listener) combination.get(index++), (Population) combination.get(index++), (Boolean) combination.get(index++), - (Boolean) combination.get(index++), (Compute) combination.get(index++), (Loader) combination.get(index++), (Implementation) combination.get(index++), @@ -177,7 +176,7 @@ private CacheContext newCacheContext(List combination) { private boolean isCompatible(CacheContext context) { boolean asyncIncompatible = context.isAsync() && (!context.isCaffeine() || !context.isStrongValues()); - boolean asyncLoaderIncompatible = context.isAsyncLoading() + boolean asyncLoaderIncompatible = context.isAsyncLoader() && (!context.isAsync() || !context.isLoading()); boolean refreshIncompatible = context.refreshes() && !context.isLoading(); boolean weigherIncompatible = context.isUnbounded() && context.isWeighted(); diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheSpec.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheSpec.java index d472b1d823..4f1275141e 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheSpec.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CacheSpec.java @@ -411,11 +411,18 @@ public RemovalListener create() { /* --------------- CacheLoader --------------- */ Loader[] loader() default { + Loader.DISABLED, Loader.NEGATIVE, }; /** The {@link CacheLoader} for constructing the {@link LoadingCache}. */ enum Loader implements CacheLoader { + /** A flag indicating that a loader should not be configured. */ + DISABLED { + @Override public Int load(Int key) { + throw new AssertionError(); + } + }, /** A loader that always returns null (no mapping). */ NULL { @Override public Int load(Int key) { diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CaffeineCacheFromContext.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CaffeineCacheFromContext.java index bbae11894d..80426caced 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CaffeineCacheFromContext.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/CaffeineCacheFromContext.java @@ -28,6 +28,7 @@ import com.github.benmanes.caffeine.cache.testing.CacheSpec.CacheWeigher; import com.github.benmanes.caffeine.cache.testing.CacheSpec.InitialCapacity; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Listener; +import com.github.benmanes.caffeine.cache.testing.CacheSpec.Loader; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Maximum; import com.github.benmanes.caffeine.cache.testing.CacheSpec.ReferenceType; @@ -99,14 +100,14 @@ public static Cache newCaffeineCache(CacheContext context) { builder.evictionListener(context.evictionListener()); } if (context.isAsync()) { - if (context.loader() == null) { + if (context.loader() == Loader.DISABLED) { context.asyncCache = builder.buildAsync(); } else { context.asyncCache = builder.buildAsync( - context.isAsyncLoading() ? context.loader().async() : context.loader()); + context.isAsyncLoader() ? context.loader().async() : context.loader()); } context.cache = context.asyncCache.synchronous(); - } else if (context.loader() == null) { + } else if (context.loader() == Loader.DISABLED) { context.cache = builder.build(); } else { context.cache = builder.build(context.loader()); diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/GuavaCacheFromContext.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/GuavaCacheFromContext.java index 97e19fbeb0..d9556e1c2a 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/GuavaCacheFromContext.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/testing/GuavaCacheFromContext.java @@ -47,6 +47,7 @@ import com.github.benmanes.caffeine.cache.testing.CacheSpec.Expire; import com.github.benmanes.caffeine.cache.testing.CacheSpec.InitialCapacity; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Listener; +import com.github.benmanes.caffeine.cache.testing.CacheSpec.Loader; import com.github.benmanes.caffeine.cache.testing.CacheSpec.Maximum; import com.github.benmanes.caffeine.cache.testing.CacheSpec.ReferenceType; import com.github.benmanes.caffeine.testing.Int; @@ -123,7 +124,7 @@ public static Cache newGuavaCache(CacheContext context) { builder.removalListener(new GuavaRemovalListener<>( translateZeroExpire, context.removalListener())); } - if (context.loader() == null) { + if (context.loader() == Loader.DISABLED) { context.cache = new GuavaCache<>(builder.build(), context); } else if (context.loader().isBulk()) { var loader = new BulkLoader(context.loader()); diff --git a/gradle/dependencies.gradle b/gradle/dependencies.gradle index 46f846633d..a9175f220b 100644 --- a/gradle/dependencies.gradle +++ b/gradle/dependencies.gradle @@ -62,7 +62,7 @@ ext { univocityParsers: '2.9.1', ycsb: '0.17.0', xz: '1.9', - zstd: '1.5.2-1', + zstd: '1.5.2-2', ] testVersions = [ awaitility: '4.1.1', @@ -80,12 +80,12 @@ ext { testng: '7.5', truth: '1.1.3', felix: '7.0.3', - felixScr: '2.2.0-RC1', + felixScr: '2.2.0', osgiUtilFunction: '1.2.0', osgiUtilPromise: '1.2.0', ] pluginVersions = [ - bnd: '6.1.0', + bnd: '6.2.0', checkstyle: '9.3', coveralls: '2.12.0', errorprone: '2.0.2', @@ -95,7 +95,7 @@ ext { jmhReport: '0.9.0', nexusPublish: '1.1.0', nullaway: '1.3.0', - pmd: '6.42.0', + pmd: '6.43.0', semanticVersioning: '1.1.0', shadow: '7.1.2', snyke: '0.4', diff --git a/gradle/jmh.gradle b/gradle/jmh.gradle index 5126e745cd..1820c55b1b 100644 --- a/gradle/jmh.gradle +++ b/gradle/jmh.gradle @@ -1,6 +1,8 @@ /** * Java microbenchmark harness: https://github.com/melix/jmh-gradle-plugin */ +import org.gradle.plugins.ide.eclipse.model.Library + apply plugin: 'com.github.johnrengelman.shadow' apply plugin: 'io.morethan.jmhreport' apply plugin: 'me.champeau.jmh' @@ -11,6 +13,7 @@ idea.module { eclipse.classpath.file.whenMerged { entries.find { it.path == 'src/jmh/java' }.entryAttributes['test'] = 'true' + entries.removeIf { (it instanceof Library) && (it.moduleVersion?.name == 'slf4j-nop') } } configurations {