diff --git a/apollo-cache-interceptor/src/commonMain/kotlin/com/apollographql/apollo/interceptor/cache/CacheExecutionContext.kt b/apollo-cache-interceptor/src/commonMain/kotlin/com/apollographql/apollo/interceptor/cache/CacheExecutionContext.kt index 64af5f6bc30..e083b05abec 100644 --- a/apollo-cache-interceptor/src/commonMain/kotlin/com/apollographql/apollo/interceptor/cache/CacheExecutionContext.kt +++ b/apollo-cache-interceptor/src/commonMain/kotlin/com/apollographql/apollo/interceptor/cache/CacheExecutionContext.kt @@ -76,4 +76,4 @@ val Response.fromCache @ApolloExperimental fun ApolloClient.Builder.normalizedCache(normalizedCache: NormalizedCache): ApolloClient.Builder { return addInterceptor(ApolloCacheInterceptor(ApolloStore(normalizedCache))) -} \ No newline at end of file +} diff --git a/apollo-normalized-cache/build.gradle.kts b/apollo-normalized-cache/build.gradle.kts index af668f55e5e..a5057f7bfdf 100644 --- a/apollo-normalized-cache/build.gradle.kts +++ b/apollo-normalized-cache/build.gradle.kts @@ -1,6 +1,7 @@ plugins { `java-library` kotlin("multiplatform") + id("kotlinx-atomicfu") } kotlin { @@ -27,16 +28,23 @@ kotlin { dependencies { api(project(":apollo-api")) api(project(":apollo-normalized-cache-api")) + implementation(groovy.util.Eval.x(project, "x.dep.kotlin.atomic")) } } val jvmMain by getting { - dependsOn(commonMain) dependencies { implementation(groovy.util.Eval.x(project, "x.dep.cache")) } } + val iosMain by getting { + } + + val iosSimMain by getting { + dependsOn(iosMain) + } + val commonTest by getting { dependencies { implementation(kotlin("test-common")) @@ -45,17 +53,11 @@ kotlin { } val jvmTest by getting { - dependsOn(jvmMain) dependencies { implementation(kotlin("test-junit")) implementation(groovy.util.Eval.x(project, "x.dep.junit")) implementation(groovy.util.Eval.x(project, "x.dep.truth")) } } - } } - -metalava { - hiddenPackages += setOf("com.apollographql.apollo.cache.normalized.internal") -} diff --git a/apollo-normalized-cache/src/commonMain/kotlin/com/apollographql/apollo/cache/normalized/MemoryCache.kt b/apollo-normalized-cache/src/commonMain/kotlin/com/apollographql/apollo/cache/normalized/MemoryCache.kt new file mode 100644 index 00000000000..ad64e6db473 --- /dev/null +++ b/apollo-normalized-cache/src/commonMain/kotlin/com/apollographql/apollo/cache/normalized/MemoryCache.kt @@ -0,0 +1,114 @@ +package com.apollographql.apollo.cache.normalized + +import com.apollographql.apollo.api.internal.json.JsonReader +import com.apollographql.apollo.cache.ApolloCacheHeaders +import com.apollographql.apollo.cache.CacheHeaders +import com.apollographql.apollo.cache.normalized.internal.LruCache +import com.apollographql.apollo.cache.normalized.internal.MapJsonReader +import com.apollographql.apollo.cache.normalized.internal.Platform +import okio.internal.commonAsUtf8ToByteArray +import kotlin.reflect.KClass + +/** + * Memory (multiplatform) cache implementation based on recently used property (LRU). + * + * [maxSizeBytes] - the maximum size of bytes the cache may occupy. + * [expireAfterMillis] - after what timeout each entry in the cache treated as expired. By default there is no timeout. + * + * Expired entries removed from the cache only on cache miss ([loadRecord] operation) and not removed from the cache automatically + * (there is no any sort of GC that runs in the background). + */ +class MemoryCache( + private val maxSizeBytes: Int, + private val expireAfterMillis: Long = -1, +) : NormalizedCache() { + private val lruCache = LruCache(maxSize = maxSizeBytes) { key, cacheEntry -> + key.commonAsUtf8ToByteArray().size + (cacheEntry?.sizeInBytes ?: 0) + } + + val size: Int + get() = lruCache.size() + + override fun loadRecord(key: String, cacheHeaders: CacheHeaders): Record? { + val cachedEntry = lruCache[key] + return if (cachedEntry == null || cachedEntry.isExpired) { + if (cachedEntry != null) { + lruCache.remove(key) + } + nextCache?.loadRecord(key, cacheHeaders) + } else { + if (cacheHeaders.hasHeader(ApolloCacheHeaders.EVICT_AFTER_READ)) { + lruCache.remove(key) + } + cachedEntry.record + } + } + + override fun clearAll() { + lruCache.clear() + nextCache?.clearAll() + } + + override fun remove(cacheKey: CacheKey, cascade: Boolean): Boolean { + val cachedEntry = lruCache.remove(cacheKey.key) + if (cascade && cachedEntry != null) { + for (cacheReference in cachedEntry.record.referencedFields()) { + remove(CacheKey(cacheReference.key), true) + } + } + + val removeFromNextCacheResult = nextCache?.remove(cacheKey, cascade) ?: false + + return cachedEntry != null || removeFromNextCacheResult + } + + override fun performMerge(apolloRecord: Record, oldRecord: Record?, cacheHeaders: CacheHeaders): Set { + return if (oldRecord == null) { + lruCache[apolloRecord.key] = CacheEntry( + record = apolloRecord, + expireAfterMillis = expireAfterMillis + ) + apolloRecord.keys() + } else { + oldRecord.mergeWith(apolloRecord).also { + //re-insert to trigger new weight calculation + lruCache[apolloRecord.key] = CacheEntry( + record = oldRecord, + expireAfterMillis = expireAfterMillis + ) + } + } + } + + @OptIn(ExperimentalStdlibApi::class) + override fun dump() = buildMap, Map> { + put(this@MemoryCache::class, lruCache.dump().mapValues { (_, entry) -> entry.record }) + putAll(nextCache?.dump().orEmpty()) + } + + internal fun clearCurrentCache() { + lruCache.clear() + } + + override fun stream(key: String, cacheHeaders: CacheHeaders): JsonReader? { + return loadRecord(key, cacheHeaders)?.let { MapJsonReader(it) } + } + + private class CacheEntry( + val record: Record, + val expireAfterMillis: Long + ) { + val cachedAtMillis: Long = Platform.currentTimeMillis() + + val sizeInBytes: Int = record.sizeEstimateBytes() + 8 + + val isExpired: Boolean + get() { + return if (expireAfterMillis < 0) { + false + } else { + Platform.currentTimeMillis() - cachedAtMillis >= expireAfterMillis + } + } + } +} diff --git a/apollo-normalized-cache/src/commonMain/kotlin/com/apollographql/apollo/cache/normalized/internal/LruCache.kt b/apollo-normalized-cache/src/commonMain/kotlin/com/apollographql/apollo/cache/normalized/internal/LruCache.kt new file mode 100644 index 00000000000..0dca6093894 --- /dev/null +++ b/apollo-normalized-cache/src/commonMain/kotlin/com/apollographql/apollo/cache/normalized/internal/LruCache.kt @@ -0,0 +1,160 @@ +package com.apollographql.apollo.cache.normalized.internal + +import kotlinx.atomicfu.locks.reentrantLock +import kotlinx.atomicfu.locks.withLock + +internal typealias Weigher = (Key, Value?) -> Int + +/** + * Multiplatform LRU cache implementation. + * + * Implementation is based on usage of [LinkedHashMap] as a container for the cache and custom + * double linked queue to track LRU property. + * + * [maxSize] - maximum size of the cache, can be anything bytes, number of entries etc. By default is number o entries. + * [weigher] - to be called to calculate the estimated size (weight) of the cache entry defined by its [Key] and [Value]. + * By default it returns 1. + * + * This implementation is thread safe guaranteed by global lock used for both read / write operations. + * + * Cache trim performed only on new entry insertion. + */ +internal class LruCache( + private val maxSize: Int, + private val weigher: Weigher = { _, _ -> 1 } +) { + private val cache = LinkedHashMap>(0, 0.75f) + private var headNode: Node? = null + private var tailNode: Node? = null + private val lock = reentrantLock() + private var size: Int = 0 + + operator fun get(key: Key): Value? { + return lock.withLock { + val node = cache[key] + if (node != null) { + moveNodeToHead(node) + } + node?.value + } + } + + operator fun set(key: Key, value: Value) { + lock.withLock { + val node = cache[key] + if (node == null) { + cache[key] = addNode(key, value) + } else { + node.value = value + moveNodeToHead(node) + } + + trim() + } + } + + fun remove(key: Key): Value? { + return lock.withLock { + val nodeToRemove = cache.remove(key) + val value = nodeToRemove?.value + if (nodeToRemove != null) { + unlinkNode(nodeToRemove) + } + value + } + } + + fun clear() { + lock.withLock { + cache.clear() + headNode = null + tailNode = null + size = 0 + } + } + + fun size(): Int { + return lock.withLock { + size + } + } + + fun dump(): Map { + return lock.withLock { + cache.mapValues { (_, value) -> value.value as Value } + } + } + + private fun trim() { + var nodeToRemove = tailNode + while (nodeToRemove != null && size > maxSize) { + cache.remove(nodeToRemove.key) + unlinkNode(nodeToRemove) + nodeToRemove = tailNode + } + } + + private fun addNode(key: Key, value: Value?): Node { + val node = Node( + key = key, + value = value, + next = headNode, + prev = null, + ) + + headNode = node + + if (node.next == null) { + tailNode = headNode + } else { + node.next?.prev = headNode + } + + size += weigher(key, value) + + return node + } + + private fun moveNodeToHead(node: Node) { + if (node.prev == null) { + return + } + + node.prev?.next = node.next + node.next?.prev = node.prev + + node.next = headNode?.next + node.prev = null + + headNode?.prev = node + headNode = node + } + + private fun unlinkNode(node: Node) { + if (node.prev == null) { + this.headNode = node.next + } else { + node.prev?.next = node.next + } + + if (node.next == null) { + this.tailNode = node.prev + } else { + node.next?.prev = node.prev + } + + size -= weigher(node.key!!, node.value) + + node.key = null + node.value = null + node.next = null + node.prev = null + } + + private class Node( + var key: Key?, + var value: Value?, + var next: Node?, + var prev: Node?, + ) +} diff --git a/apollo-normalized-cache/src/commonMain/kotlin/com/apollographql/apollo/cache/normalized/internal/Platform.kt b/apollo-normalized-cache/src/commonMain/kotlin/com/apollographql/apollo/cache/normalized/internal/Platform.kt new file mode 100644 index 00000000000..314999b9230 --- /dev/null +++ b/apollo-normalized-cache/src/commonMain/kotlin/com/apollographql/apollo/cache/normalized/internal/Platform.kt @@ -0,0 +1,5 @@ +package com.apollographql.apollo.cache.normalized.internal + +expect object Platform { + fun currentTimeMillis(): Long +} diff --git a/apollo-normalized-cache/src/commonMain/kotlin/com/apollographql/apollo/cache/normalized/simple/MapNormalizedCache.kt b/apollo-normalized-cache/src/commonMain/kotlin/com/apollographql/apollo/cache/normalized/simple/MapNormalizedCache.kt deleted file mode 100644 index bc66694986f..00000000000 --- a/apollo-normalized-cache/src/commonMain/kotlin/com/apollographql/apollo/cache/normalized/simple/MapNormalizedCache.kt +++ /dev/null @@ -1,56 +0,0 @@ -package com.apollographql.apollo.cache.normalized.simple - -import com.apollographql.apollo.api.internal.json.JsonReader -import com.apollographql.apollo.cache.CacheHeaders -import com.apollographql.apollo.cache.normalized.CacheKey -import com.apollographql.apollo.cache.normalized.NormalizedCache -import com.apollographql.apollo.cache.normalized.Record -import com.apollographql.apollo.cache.normalized.internal.MapJsonReader - -/** - * A simple normalized cache backed by a [MutableMap]. - * - * A [MapNormalizedCache] keeps its entry in memory forever and can only grow. Do not use it to store big amounts of data. - */ -class MapNormalizedCache : NormalizedCache() { - private val map = mutableMapOf() - - override fun loadRecord(key: String, cacheHeaders: CacheHeaders): Record? { - val record = nextCache?.loadRecord(key, cacheHeaders) - if (record != null) { - return record - } - - return map.get(key) - } - - override fun stream(key: String, cacheHeaders: CacheHeaders): JsonReader? { - return map.get(key)?.let { MapJsonReader(it) } - } - - override fun performMerge(apolloRecord: Record, oldRecord: Record?, cacheHeaders: CacheHeaders): Set { - return map.getOrPut(apolloRecord.key, {apolloRecord}) - .mergeWith(apolloRecord) - } - - override fun clearAll() { - nextCache?.clearAll() - map.clear() - } - - override fun remove(cacheKey: CacheKey, cascade: Boolean): Boolean { - var result: Boolean = nextCache?.remove(cacheKey, cascade) ?: false - - val record = map.get(cacheKey.key) - if (record != null) { - map.remove(cacheKey.key) - result = true - if (cascade) { - for (cacheReference in record.referencedFields()) { - result = result && remove(CacheKey(cacheReference.key), true) - } - } - } - return result - } -} \ No newline at end of file diff --git a/apollo-normalized-cache/src/commonTest/kotlin/com/apollographql/apollo/cache/normalized/MemoryCacheTest.kt b/apollo-normalized-cache/src/commonTest/kotlin/com/apollographql/apollo/cache/normalized/MemoryCacheTest.kt new file mode 100644 index 00000000000..b19e899d400 --- /dev/null +++ b/apollo-normalized-cache/src/commonTest/kotlin/com/apollographql/apollo/cache/normalized/MemoryCacheTest.kt @@ -0,0 +1,284 @@ +package com.apollographql.apollo.cache.normalized + +import com.apollographql.apollo.cache.ApolloCacheHeaders +import com.apollographql.apollo.cache.CacheHeaders +import kotlin.test.Test +import kotlin.test.assertEquals +import kotlin.test.assertNotNull +import kotlin.test.assertNull +import kotlin.test.assertTrue + +class MemoryCacheTest { + @Test + fun testSaveAndLoad_singleRecord() { + val lruCache = createCache() + val testRecord = createTestRecord("1") + lruCache.merge(testRecord, CacheHeaders.NONE) + + assertTestRecordPresentAndAccurate(testRecord, lruCache) + } + + @Test + fun testSaveAndLoad_multipleRecord_readSingle() { + val lruCache = createCache() + val testRecord1 = createTestRecord("1") + val testRecord2 = createTestRecord("2") + val testRecord3 = createTestRecord("3") + val records = listOf(testRecord1, testRecord2, testRecord3) + lruCache.merge(records, CacheHeaders.NONE) + + assertTestRecordPresentAndAccurate(testRecord1, lruCache) + assertTestRecordPresentAndAccurate(testRecord2, lruCache) + assertTestRecordPresentAndAccurate(testRecord3, lruCache) + } + + @Test + fun testSaveAndLoad_multipleRecord_readMultiple() { + val lruCache = createCache() + val testRecord1 = createTestRecord("1") + val testRecord2 = createTestRecord("2") + val testRecord3 = createTestRecord("3") + val records = listOf(testRecord1, testRecord2, testRecord3) + lruCache.merge(records, CacheHeaders.NONE) + + val readRecords = lruCache.loadRecords(listOf("key1", "key2", "key3"), CacheHeaders.NONE) + assertTrue(readRecords.containsAll(records)) + } + + @Test + fun testLoad_recordNotPresent() { + val lruCache = createCache() + val record = lruCache.loadRecord("key1", CacheHeaders.NONE) + assertNull(record) + } + + @Test + fun testEviction() { + val testRecord1 = createTestRecord("1") + val testRecord2 = createTestRecord("2") + val testRecord3 = createTestRecord("3") + + val lruCache = createCache( + // all records won't fit as there is timestamp that stored with each record + maxSizeBytes = 200 + ) + + val records = listOf(testRecord1, testRecord2, testRecord3) + lruCache.merge(records, CacheHeaders.NONE) + + //Cache does not reveal exactly how it handles eviction, but appears + //to evict more than is strictly necessary. Regardless, any sane eviction + //strategy should leave the third record in this test case, and evict the first record. + assertNull(lruCache.loadRecord(testRecord1.key, CacheHeaders.NONE)) + assertNotNull(lruCache.loadRecord(testRecord3.key, CacheHeaders.NONE)) + } + + @Test + fun testEviction_recordChange() { + val testRecord1 = createTestRecord("1") + val testRecord2 = createTestRecord("2") + val testRecord3 = createTestRecord("3") + + val lruCache = createCache( + maxSizeBytes = 240 + ) + + val records = listOf(testRecord1, testRecord2, testRecord3) + lruCache.merge(records, CacheHeaders.NONE) + + assertNotNull(lruCache.loadRecord(testRecord1.key, CacheHeaders.NONE)) + assertNotNull(lruCache.loadRecord(testRecord2.key, CacheHeaders.NONE)) + assertNotNull(lruCache.loadRecord(testRecord3.key, CacheHeaders.NONE)) + + val updatedRestRecord1 = testRecord1.toBuilder().addField("field3", "value3").build() + lruCache.merge(updatedRestRecord1, CacheHeaders.NONE) + + assertNotNull(lruCache.loadRecord(testRecord1.key, CacheHeaders.NONE)) + assertNotNull(lruCache.loadRecord(testRecord2.key, CacheHeaders.NONE)) + assertNotNull(lruCache.loadRecord(testRecord3.key, CacheHeaders.NONE)) + } + + @Test + fun testExpiration() { + val testRecord = createTestRecord("") + val lruCache = createCache(expireAfterMillis = 0) + lruCache.merge(testRecord, CacheHeaders.NONE) + + assertNull(lruCache.loadRecord(testRecord.key, CacheHeaders.NONE)) + } + + @Test + fun testDualCacheSingleRecord() { + val secondaryCache = createCache() + val primaryCache = createCache().chain(secondaryCache) + + val mockRecord = createTestRecord("") + primaryCache.merge(mockRecord, CacheHeaders.NONE) + + //verify write through behavior + assertEquals(mockRecord.fields, primaryCache.loadRecord(mockRecord.key, CacheHeaders.NONE)?.fields) + assertEquals(mockRecord.fields, secondaryCache.loadRecord(mockRecord.key, CacheHeaders.NONE)?.fields) + } + + @Test + fun testDualCacheMultipleRecord() { + val secondaryCache = createCache() + val primaryCache = createCache().chain(secondaryCache) + + val testRecord1 = createTestRecord("1") + val testRecord2 = createTestRecord("2") + val testRecord3 = createTestRecord("3") + val records = listOf(testRecord1, testRecord2, testRecord3) + primaryCache.merge(records, CacheHeaders.NONE) + + val keys = listOf(testRecord1.key, testRecord2.key, testRecord3.key) + assertEquals(3, primaryCache.loadRecords(keys, CacheHeaders.NONE).size) + assertEquals(3, secondaryCache.loadRecords(keys, CacheHeaders.NONE).size) + } + + @Test + fun testDualCache_recordNotPresent() { + val secondaryCache = createCache() + val primaryCache = createCache().chain(secondaryCache) + assertNull(primaryCache.loadRecord("key", CacheHeaders.NONE)) + } + + + @Test + fun testDualCache_clearAll() { + val secondaryCache = createCache() + val primaryCache = createCache().chain(secondaryCache) as MemoryCache + + val testRecord1 = createTestRecord("1") + val testRecord2 = createTestRecord("2") + val testRecord3 = createTestRecord("3") + val records = listOf(testRecord1, testRecord2, testRecord3) + primaryCache.merge(records, CacheHeaders.NONE) + + primaryCache.clearAll() + + assertEquals(0, primaryCache.size) + assertEquals(0, secondaryCache.size) + } + + @Test + fun testDualCache_readFromNext() { + val secondaryCache = createCache() + val primaryCache = createCache().chain(secondaryCache) as MemoryCache + + val testRecord = createTestRecord("") + primaryCache.merge(testRecord, CacheHeaders.NONE) + + primaryCache.clearCurrentCache() + + assertEquals(testRecord.fields, primaryCache.loadRecord(testRecord.key, CacheHeaders.NONE)?.fields) + } + + + // Tests for StandardCacheHeader compliance. + @Test + fun testHeader_evictAfterRead() { + val lruCache = createCache() + val testRecord = createTestRecord("1") + + lruCache.merge(testRecord, CacheHeaders.NONE) + + val headers = CacheHeaders.builder().addHeader(ApolloCacheHeaders.EVICT_AFTER_READ, "true").build() + + assertNotNull(lruCache.loadRecord(testRecord.key, headers)) + assertNull(lruCache.loadRecord(testRecord.key, headers)) + } + + @Test + fun testHeader_noCache() { + val lruCache = createCache() + val testRecord = createTestRecord("1") + + val headers = CacheHeaders.builder().addHeader(ApolloCacheHeaders.DO_NOT_STORE, "true").build() + + lruCache.merge(testRecord, headers) + + assertNull(lruCache.loadRecord(testRecord.key, headers)) + } + + @Test + fun testDump() { + val lruCache = createCache() + + val testRecord1 = createTestRecord("1") + val testRecord2 = createTestRecord("2") + val testRecord3 = createTestRecord("3") + val records = listOf(testRecord1, testRecord2, testRecord3) + lruCache.merge(records, CacheHeaders.NONE) + + with(lruCache.dump()) { + val cache = this[MemoryCache::class]!! + + assertTrue(cache.keys.containsAll(records.map { it.key })) + assertEquals(testRecord1, cache[testRecord1.key]) + assertEquals(testRecord2, cache[testRecord2.key]) + assertEquals(testRecord3, cache[testRecord3.key]) + } + } + + + @Test + fun testRemove_cascadeFalse() { + val lruCache = createCache() + + val record1 = Record.builder("id_1") + .addField("a", "stringValueA") + .addField("b", "stringValueB") + .build() + + val record2 = Record.builder("id_2") + .addField("a", CacheReference("id_1")) + .build() + + val records = listOf(record1, record2) + lruCache.merge(records, CacheHeaders.NONE) + + assertTrue(lruCache.remove(CacheKey(record2.key), cascade = false)) + assertNotNull(lruCache.loadRecord(record1.key, CacheHeaders.NONE)) + } + + @Test + fun testRemove_cascadeTrue() { + val lruCache = createCache() + + val record1 = Record.builder("id_1") + .addField("a", "stringValueA") + .addField("b", "stringValueB") + .build() + + val record2 = Record.builder("id_2") + .addField("a", CacheReference("id_1")) + .build() + + val records = listOf(record1, record2) + lruCache.merge(records, CacheHeaders.NONE) + + assertTrue(lruCache.remove(CacheKey(record2.key), cascade = true)) + assertNull(lruCache.loadRecord(record1.key, CacheHeaders.NONE)) + } + + private fun createCache( + maxSizeBytes: Int = 10 * 1024, + expireAfterMillis: Long = -1, + ): MemoryCache { + return MemoryCache(maxSizeBytes = maxSizeBytes, expireAfterMillis = expireAfterMillis) + } + + private fun assertTestRecordPresentAndAccurate(testRecord: Record, store: NormalizedCache) { + val cacheRecord = checkNotNull(store.loadRecord(testRecord.key, CacheHeaders.NONE)) + assertEquals(testRecord.key, cacheRecord.key) + assertEquals(testRecord.fields, cacheRecord.fields) + } + + private fun createTestRecord(id: String): Record { + return Record.builder("key$id") + .addField("field1", "stringValueA$id") + .addField("field2", "stringValueB$id") + .build() + } +} diff --git a/apollo-normalized-cache/src/commonTest/kotlin/com/apollographql/apollo/cache/normalized/internal/LruCacheTest.kt b/apollo-normalized-cache/src/commonTest/kotlin/com/apollographql/apollo/cache/normalized/internal/LruCacheTest.kt new file mode 100644 index 00000000000..4d87a18acd2 --- /dev/null +++ b/apollo-normalized-cache/src/commonTest/kotlin/com/apollographql/apollo/cache/normalized/internal/LruCacheTest.kt @@ -0,0 +1,182 @@ +package com.apollographql.apollo.cache.normalized.internal + +import kotlin.test.Test +import kotlin.test.assertEquals + +class LruCacheTest { + @Test + fun emptyCache() { + val cache = LruCache(10) + + assertEquals(0, cache.size()) + assertEquals(null, cache["key"]) + assertEquals(null, cache.remove("key")) + assertEquals(mapOf(), cache.dump()) + + cache.clear() + assertEquals(0, cache.size()) + } + + @Test + fun addNewItemsToCache() { + val cache = LruCache(10) + + val expectedEntries = mapOf( + "key1" to "value1", + "key2" to "value2", + "key3" to null + ) + + expectedEntries.forEach { (key, value) -> + cache[key] = value + } + + assertEquals(3, cache.size()) + assertEquals(expectedEntries, cache.dump()) + } + + @Test + fun removeItemsFromCache() { + val cache = LruCache(10) + + mapOf( + "key1" to "value1", + "key2" to "value2", + "key3" to null + ).forEach { (key, value) -> + cache[key] = value + } + + assertEquals(null, cache.remove("key")) + assertEquals("value1", cache.remove("key1")) + assertEquals(null, cache.remove("key3")) + + assertEquals(1, cache.size()) + assertEquals(mapOf("key2" to "value2"), cache.dump()) + } + + @Test + fun clearCache() { + val cache = LruCache(10) + + mapOf( + "key1" to "value1", + "key2" to "value2", + "key3" to null + ).forEach { (key, value) -> + cache[key] = value + } + + cache.clear() + + assertEquals(0, cache.size()) + assertEquals(mapOf(), cache.dump()) + } + + @Test + fun trimCache() { + val cache = LruCache(2) + + mapOf( + "key1" to "value1", + "key2" to "value2", + "key3" to null + ).forEach { (key, value) -> + cache[key] = value + } + + assertEquals(2, cache.size()) + assertEquals( + mapOf( + "key2" to "value2", + "key3" to null + ), + cache.dump() + ) + } + + @Test + fun addItemToCacheWithCustomWeigher() { + val cache = LruCache(100) { key, value -> + key.length + (value?.length ?: 0) + } + + mapOf( + "key1" to "value1", + "key2" to "value2", + "key3" to null + ).forEach { (key, value) -> + cache[key] = value + } + + assertEquals(24, cache.size()) + } + + @Test + fun removeItemFromCacheWithCustomWeigher() { + val cache = LruCache(100) { key, value -> + key.length + (value?.length ?: 0) + } + + mapOf( + "key1" to "value1", + "key2" to "value2", + "key3" to null + ).forEach { (key, value) -> + cache[key] = value + } + + cache.remove("key2") + cache.remove("key3") + + assertEquals(10, cache.size()) + } + + @Test + fun trimCacheWithCustomWeigher() { + val cache = LruCache(12) { key, value -> + key.length + (value?.length ?: 0) + } + + mapOf( + "key1" to "value1", + "key2" to "value2", + "key3" to null + ).forEach { (key, value) -> + cache[key] = value + } + + assertEquals(4, cache.size()) + assertEquals( + mapOf( + "key3" to null + ), + cache.dump() + ) + } + + @Test + fun recentUsedItem() { + val cache = LruCache(10) + + mapOf( + "key1" to "value1", + "key2" to "value2", + "key3" to null + ).forEach { (key, value) -> + cache[key] = value + } + + cache["key3"] + cache["key2"] + + assertEquals( + mapOf( + "key2" to "value2", + "key3" to null, + "key1" to "value1", + ), + cache.dump() + ) + } +} diff --git a/apollo-normalized-cache/src/iosMain/kotlin/com/apollographql/apollo/cache/normalized/internal/Platform.kt b/apollo-normalized-cache/src/iosMain/kotlin/com/apollographql/apollo/cache/normalized/internal/Platform.kt new file mode 100644 index 00000000000..f2613f2c7dc --- /dev/null +++ b/apollo-normalized-cache/src/iosMain/kotlin/com/apollographql/apollo/cache/normalized/internal/Platform.kt @@ -0,0 +1,12 @@ +package com.apollographql.apollo.cache.normalized.internal + +import kotlinx.cinterop.convert +import platform.darwin.DISPATCH_TIME_NOW +import platform.darwin.dispatch_time + +actual object Platform { + actual fun currentTimeMillis(): Long { + val nanoseconds: Long = dispatch_time(DISPATCH_TIME_NOW, 0).convert() + return nanoseconds * 1_000_000L + } +} diff --git a/apollo-normalized-cache/src/jvmMain/kotlin/com/apollographql/apollo/cache/normalized/internal/Platform.kt b/apollo-normalized-cache/src/jvmMain/kotlin/com/apollographql/apollo/cache/normalized/internal/Platform.kt new file mode 100644 index 00000000000..0091beacf72 --- /dev/null +++ b/apollo-normalized-cache/src/jvmMain/kotlin/com/apollographql/apollo/cache/normalized/internal/Platform.kt @@ -0,0 +1,7 @@ +package com.apollographql.apollo.cache.normalized.internal + +actual object Platform { + actual fun currentTimeMillis(): Long { + return System.currentTimeMillis() + } +} diff --git a/buildSrc/build.gradle.kts b/buildSrc/build.gradle.kts index 5e65cb14596..b736c718491 100644 --- a/buildSrc/build.gradle.kts +++ b/buildSrc/build.gradle.kts @@ -28,4 +28,6 @@ dependencies { implementation(groovy.util.Eval.x(project, "x.dep.benManesVersions")) // this plugin is added to the classpath but never applied, it is only used for the closeAndRelease code implementation(groovy.util.Eval.x(project, "x.dep.vanniktechPlugin")) -} \ No newline at end of file + + implementation(groovy.util.Eval.x(project, "x.dep.kotlin.atomicGradle")) +} diff --git a/buildSrc/src/main/kotlin/ApiCompatibility.kt b/buildSrc/src/main/kotlin/ApiCompatibility.kt index 382d72da7ff..38322f45304 100644 --- a/buildSrc/src/main/kotlin/ApiCompatibility.kt +++ b/buildSrc/src/main/kotlin/ApiCompatibility.kt @@ -14,6 +14,10 @@ object ApiCompatibility { // apollo-runtime-kotlin is still under development. Include the check once it is stable enough. return@subprojects } + "apollo-normalized-cache" -> { + // is still under development. Include the check once it is stable enough. + return@subprojects + } else -> { it.configureJapiCmp() MetalavaPlugin().apply(it) diff --git a/composite/apollo-integration-kotlin/src/commonTest/kotlin/com/apollographql/apollo/integration/CacheInterceptorTest.kt b/composite/apollo-integration-kotlin/src/commonTest/kotlin/com/apollographql/apollo/integration/CacheInterceptorTest.kt index 01e755208d8..fe8110d3905 100644 --- a/composite/apollo-integration-kotlin/src/commonTest/kotlin/com/apollographql/apollo/integration/CacheInterceptorTest.kt +++ b/composite/apollo-integration-kotlin/src/commonTest/kotlin/com/apollographql/apollo/integration/CacheInterceptorTest.kt @@ -3,7 +3,7 @@ package com.apollographql.apollo.integration import HeroNameQuery import com.apollographql.apollo.ApolloClient import com.apollographql.apollo.cache.normalized.NormalizedCache -import com.apollographql.apollo.cache.normalized.simple.MapNormalizedCache +import com.apollographql.apollo.cache.normalized.MemoryCache import com.apollographql.apollo.ApolloQueryRequest import com.apollographql.apollo.interceptor.cache.FetchPolicy import com.apollographql.apollo.interceptor.cache.fromCache @@ -30,7 +30,7 @@ class CacheInterceptorTest { @BeforeTest fun setUp() { - cache = MapNormalizedCache() + cache = MemoryCache(maxSizeBytes = Int.MAX_VALUE) networkTransport = MockNetworkTransport() apolloClient = ApolloClient.Builder() .networkTransport(networkTransport) diff --git a/gradle/dependencies.gradle b/gradle/dependencies.gradle index a8e359eddea..6e113dbd204 100644 --- a/gradle/dependencies.gradle +++ b/gradle/dependencies.gradle @@ -12,6 +12,7 @@ def versions = [ jetbrainsAnnotations : '13.0', junit : '4.13.1', kotlin : '1.4.20', + kotlinAtomic : '0.15.1', kotlinCoroutines : '1.4.2', kotlinPoet : '1.6.0', moshi : '1.11.0', @@ -57,6 +58,8 @@ ext.dep = [ jetbrainsAnnotations : "org.jetbrains:annotations:$versions.jetbrainsAnnotations", junit : "junit:junit:$versions.junit", kotlin : [ + atomic : "org.jetbrains.kotlinx:atomicfu:$versions.kotlinAtomic", + atomicGradle : "org.jetbrains.kotlinx:atomicfu-gradle-plugin:$versions.kotlinAtomic", plugin : "org.jetbrains.kotlin:kotlin-gradle-plugin:$versions.kotlin", coroutines : "org.jetbrains.kotlinx:kotlinx-coroutines-core:$versions.kotlinCoroutines", coroutinesAndroid : "org.jetbrains.kotlinx:kotlinx-coroutines-android:$versions.kotlinCoroutines",