From 434dc0ed2b7fdbb5167f842e72fc17d7ee1aac43 Mon Sep 17 00:00:00 2001 From: Peter Alfonsi Date: Thu, 12 Dec 2024 15:36:41 -0800 Subject: [PATCH] add all caffeine Signed-off-by: Peter Alfonsi --- plugins/cache-caffeine/build.gradle | 37 ++ .../licenses/caffeine-3.1.8.jar.sha1 | 1 + .../licenses/caffeine-LICENSE.txt | 202 ++++++++ .../licenses/caffeine-NOTICE.txt | 0 .../licenses/slf4j-api-1.7.36.jar.sha1 | 1 + .../licenses/slf4j-api-LICENSE.txt | 21 + .../licenses/slf4j-api-NOTICE.txt | 0 .../opensearch/cache/CaffeineCachePlugin.java | 56 ++ .../cache/CaffeineHeapCacheSettings.java | 83 +++ .../org/opensearch/cache/package-info.java | 12 + .../cache/store/CaffeineHeapCache.java | 285 ++++++++++ .../opensearch/cache/store/package-info.java | 9 + .../plugin-metadata/plugin-security.policy | 15 + .../opensearch/cache/CaffeinePluginTests.java | 26 + .../cache/store/CaffeineHeapCacheTests.java | 489 ++++++++++++++++++ 15 files changed, 1237 insertions(+) create mode 100644 plugins/cache-caffeine/build.gradle create mode 100644 plugins/cache-caffeine/licenses/caffeine-3.1.8.jar.sha1 create mode 100644 plugins/cache-caffeine/licenses/caffeine-LICENSE.txt create mode 100644 plugins/cache-caffeine/licenses/caffeine-NOTICE.txt create mode 100644 plugins/cache-caffeine/licenses/slf4j-api-1.7.36.jar.sha1 create mode 100644 plugins/cache-caffeine/licenses/slf4j-api-LICENSE.txt create mode 100644 plugins/cache-caffeine/licenses/slf4j-api-NOTICE.txt create mode 100644 plugins/cache-caffeine/src/main/java/org/opensearch/cache/CaffeineCachePlugin.java create mode 100644 plugins/cache-caffeine/src/main/java/org/opensearch/cache/CaffeineHeapCacheSettings.java create mode 100644 plugins/cache-caffeine/src/main/java/org/opensearch/cache/package-info.java create mode 100644 plugins/cache-caffeine/src/main/java/org/opensearch/cache/store/CaffeineHeapCache.java create mode 100644 plugins/cache-caffeine/src/main/java/org/opensearch/cache/store/package-info.java create mode 100644 plugins/cache-caffeine/src/main/plugin-metadata/plugin-security.policy create mode 100644 plugins/cache-caffeine/src/test/java/org/opensearch/cache/CaffeinePluginTests.java create mode 100644 plugins/cache-caffeine/src/test/java/org/opensearch/cache/store/CaffeineHeapCacheTests.java diff --git a/plugins/cache-caffeine/build.gradle b/plugins/cache-caffeine/build.gradle new file mode 100644 index 0000000000000..6a11cd1a14c19 --- /dev/null +++ b/plugins/cache-caffeine/build.gradle @@ -0,0 +1,37 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +apply plugin: 'opensearch.internal-cluster-test' + +opensearchplugin { + description 'Caffeine based cache implementation.' + classname 'org.opensearch.cache.CaffeineCachePlugin' +} + +versions << [ + 'caffeine' : '3.1.8' +] + +dependencies { + api "com.github.ben-manes.caffeine:caffeine:${versions.caffeine}" + api "org.slf4j:slf4j-api:${versions.slf4j}" +} + +thirdPartyAudit { + ignoreMissingClasses( + 'org.slf4j.impl.StaticLoggerBinder', + 'org.slf4j.impl.StaticMDCBinder', + 'org.slf4j.impl.StaticMarkerBinder' + ) +} + +tasks.named("bundlePlugin").configure { + from('config/cache-caffeine') { + into 'config' + } +} diff --git a/plugins/cache-caffeine/licenses/caffeine-3.1.8.jar.sha1 b/plugins/cache-caffeine/licenses/caffeine-3.1.8.jar.sha1 new file mode 100644 index 0000000000000..d9747af25a16c --- /dev/null +++ b/plugins/cache-caffeine/licenses/caffeine-3.1.8.jar.sha1 @@ -0,0 +1 @@ +24795585df8afaf70a2cd534786904ea5889c047 \ No newline at end of file diff --git a/plugins/cache-caffeine/licenses/caffeine-LICENSE.txt b/plugins/cache-caffeine/licenses/caffeine-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/cache-caffeine/licenses/caffeine-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/cache-caffeine/licenses/caffeine-NOTICE.txt b/plugins/cache-caffeine/licenses/caffeine-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/cache-caffeine/licenses/slf4j-api-1.7.36.jar.sha1 b/plugins/cache-caffeine/licenses/slf4j-api-1.7.36.jar.sha1 new file mode 100644 index 0000000000000..77b9917528382 --- /dev/null +++ b/plugins/cache-caffeine/licenses/slf4j-api-1.7.36.jar.sha1 @@ -0,0 +1 @@ +6c62681a2f655b49963a5983b8b0950a6120ae14 \ No newline at end of file diff --git a/plugins/cache-caffeine/licenses/slf4j-api-LICENSE.txt b/plugins/cache-caffeine/licenses/slf4j-api-LICENSE.txt new file mode 100644 index 0000000000000..54512cc08d16b --- /dev/null +++ b/plugins/cache-caffeine/licenses/slf4j-api-LICENSE.txt @@ -0,0 +1,21 @@ +Copyright (c) 2004-2022 QOS.ch +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/plugins/cache-caffeine/licenses/slf4j-api-NOTICE.txt b/plugins/cache-caffeine/licenses/slf4j-api-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/cache-caffeine/src/main/java/org/opensearch/cache/CaffeineCachePlugin.java b/plugins/cache-caffeine/src/main/java/org/opensearch/cache/CaffeineCachePlugin.java new file mode 100644 index 0000000000000..b3753934fd268 --- /dev/null +++ b/plugins/cache-caffeine/src/main/java/org/opensearch/cache/CaffeineCachePlugin.java @@ -0,0 +1,56 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.cache; + +import org.opensearch.cache.store.CaffeineHeapCache; +import org.opensearch.common.cache.CacheType; +import org.opensearch.common.cache.ICache; +import org.opensearch.common.settings.Setting; +import org.opensearch.plugins.CachePlugin; +import org.opensearch.plugins.Plugin; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import static org.opensearch.cache.CaffeineHeapCacheSettings.CACHE_TYPE_MAP; + +/** + * Caffeine based cache plugin. + */ +public class CaffeineCachePlugin extends Plugin implements CachePlugin { + + private static final String CAFFEINE_CACHE_PLUGIN = "CaffeinePlugin"; + + /** + * Default constructor to avoid javadoc related failures. + */ + public CaffeineCachePlugin() {} + + @Override + public Map getCacheFactoryMap() { + return Map.of(CaffeineHeapCache.CaffeineHeapCacheFactory.NAME, new CaffeineHeapCache.CaffeineHeapCacheFactory()); + } + + @Override + public List> getSettings() { + List> settingList = new ArrayList<>(); + for (Map.Entry>> entry : CACHE_TYPE_MAP.entrySet()) { + for (Map.Entry> entry1 : entry.getValue().entrySet()) { + settingList.add(entry1.getValue()); + } + } + return settingList; + } + + @Override + public String getName() { + return CAFFEINE_CACHE_PLUGIN; + } +} diff --git a/plugins/cache-caffeine/src/main/java/org/opensearch/cache/CaffeineHeapCacheSettings.java b/plugins/cache-caffeine/src/main/java/org/opensearch/cache/CaffeineHeapCacheSettings.java new file mode 100644 index 0000000000000..08bd7a918742d --- /dev/null +++ b/plugins/cache-caffeine/src/main/java/org/opensearch/cache/CaffeineHeapCacheSettings.java @@ -0,0 +1,83 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.cache; + +import org.opensearch.cache.store.CaffeineHeapCache; +import org.opensearch.common.cache.CacheType; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.common.unit.ByteSizeValue; + +import java.util.HashMap; +import java.util.Map; + +import static org.opensearch.common.settings.Setting.Property.NodeScope; + +public class CaffeineHeapCacheSettings { + + /** + * Setting to define maximum size for the cache as 1% of heap memory available. + * + * Setting pattern: {cache_type}.caffeine_heap.maximum_size_in_bytes + */ + public static final Setting.AffixSetting MAXIMUM_SIZE_IN_BYTES_SETTING = Setting.suffixKeySetting( + CaffeineHeapCache.CaffeineHeapCacheFactory.NAME + ".maximum_size_in_bytes", + (key) -> Setting.memorySizeSetting(key, "1%", NodeScope) + ); + + /** + * Setting to define expire after access. + * + * Setting pattern: {cache_type}.caffeine_heap.expire_after_access + */ + public static final Setting.AffixSetting EXPIRE_AFTER_ACCESS_SETTING = Setting.suffixKeySetting( + CaffeineHeapCache.CaffeineHeapCacheFactory.NAME + ".expire_after_access", + (key) -> Setting.positiveTimeSetting(key, TimeValue.MAX_VALUE, Setting.Property.NodeScope) + ); + + public static final String MAXIMUM_SIZE_IN_BYTES_KEY = "maximum_size_in_bytes"; + public static final String EXPIRE_AFTER_ACCESS_KEY = "expire_after_access"; + + private static final Map> KEY_SETTING_MAP = Map.of( + MAXIMUM_SIZE_IN_BYTES_KEY, + MAXIMUM_SIZE_IN_BYTES_SETTING, + EXPIRE_AFTER_ACCESS_KEY, + EXPIRE_AFTER_ACCESS_SETTING + ); + + public static final Map>> CACHE_TYPE_MAP = getCacheTypeMap(); + + private static Map>> getCacheTypeMap() { + Map>> cacheTypeMap = new HashMap<>(); + for (CacheType cacheType : CacheType.values()) { + Map> settingMap = new HashMap<>(); + for (Map.Entry> entry : KEY_SETTING_MAP.entrySet()) { + settingMap.put(entry.getKey(), entry.getValue().getConcreteSettingForNamespace(cacheType.getSettingPrefix())); + } + cacheTypeMap.put(cacheType, settingMap); + } + return cacheTypeMap; + } + + public static Map> getSettingListForCacheType(CacheType cacheType) { + Map> cacheTypeSettings = CACHE_TYPE_MAP.get(cacheType); + if (cacheTypeSettings == null) { + throw new IllegalArgumentException( + "No settings exist for cache store name: " + + CaffeineHeapCache.CaffeineHeapCacheFactory.NAME + + "associated with " + + "cache type: " + + cacheType + ); + } + return cacheTypeSettings; + } + + public CaffeineHeapCacheSettings() {} +} diff --git a/plugins/cache-caffeine/src/main/java/org/opensearch/cache/package-info.java b/plugins/cache-caffeine/src/main/java/org/opensearch/cache/package-info.java new file mode 100644 index 0000000000000..8b71e2f68648f --- /dev/null +++ b/plugins/cache-caffeine/src/main/java/org/opensearch/cache/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Base package for cache plugin + */ +package org.opensearch.cache; diff --git a/plugins/cache-caffeine/src/main/java/org/opensearch/cache/store/CaffeineHeapCache.java b/plugins/cache-caffeine/src/main/java/org/opensearch/cache/store/CaffeineHeapCache.java new file mode 100644 index 0000000000000..634137786deaf --- /dev/null +++ b/plugins/cache-caffeine/src/main/java/org/opensearch/cache/store/CaffeineHeapCache.java @@ -0,0 +1,285 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.cache.store; + +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; +import com.github.benmanes.caffeine.cache.RemovalCause; +import com.github.benmanes.caffeine.cache.RemovalListener; +import com.github.benmanes.caffeine.cache.Weigher; + +import org.opensearch.OpenSearchException; +import org.opensearch.cache.CaffeineHeapCacheSettings; +import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.common.cache.CacheType; +import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.ICacheKey; +import org.opensearch.common.cache.LoadAwareCacheLoader; +import org.opensearch.common.cache.RemovalNotification; +import org.opensearch.common.cache.RemovalReason; +import org.opensearch.common.cache.settings.CacheSettings; +import org.opensearch.common.cache.stats.CacheStatsHolder; +import org.opensearch.common.cache.stats.DefaultCacheStatsHolder; +import org.opensearch.common.cache.stats.ImmutableCacheStatsHolder; +import org.opensearch.common.cache.stats.NoopCacheStatsHolder; +import org.opensearch.common.cache.store.builders.ICacheBuilder; +import org.opensearch.common.cache.store.config.CacheConfig; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.common.util.FeatureFlags; +import org.opensearch.core.common.unit.ByteSizeValue; + +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.Executor; +import java.util.concurrent.ForkJoinPool; +import java.util.function.Function; +import java.util.function.ToLongBiFunction; + +@ExperimentalApi +public class CaffeineHeapCache implements ICache { + + private final Cache, V> cache; + private final CacheStatsHolder cacheStatsHolder; + private final ToLongBiFunction, V> weigher; + private final CaffeineRemovalListener caffeineRemovalListener; + + private CaffeineHeapCache(Builder builder) { + List dimensionNames = Objects.requireNonNull(builder.dimensionNames, "Dimension names can't be null"); + if (builder.getStatsTrackingEnabled()) { + // If this cache is being used, FeatureFlags.PLUGGABLE_CACHE is already on, so we can always use the DefaultCacheStatsHolder + // unless statsTrackingEnabled is explicitly set to false in CacheConfig. + this.cacheStatsHolder = new DefaultCacheStatsHolder(dimensionNames, CaffeineHeapCacheFactory.NAME); + } else { + this.cacheStatsHolder = NoopCacheStatsHolder.getInstance(); + } + this.weigher = Objects.requireNonNull(builder.getWeigher(), "Weigher can't be null"); + this.caffeineRemovalListener = new CaffeineRemovalListener( + Objects.requireNonNull(builder.getRemovalListener(), "Removal listener can't be null") + ); + + cache = AccessController.doPrivileged( + (PrivilegedAction, V>>) () -> Caffeine.newBuilder() + .removalListener(this.caffeineRemovalListener) + .maximumWeight(builder.getMaxWeightInBytes()) + .expireAfterAccess(builder.getExpireAfterAcess().duration(), builder.getExpireAfterAcess().timeUnit()) + .weigher(new CaffeineWeigher()) + .executor(Runnable::run) + .build() + ); + } + + /** + * Wrapper over ICache weigher to be used by Caffeine + */ + private class CaffeineWeigher implements Weigher, V> { + @Override + public int weigh(ICacheKey key, V value) { + return (int) weigher.applyAsLong(key, value); + } + } + + private class CaffeineRemovalListener implements RemovalListener, V> { + private final org.opensearch.common.cache.RemovalListener, V> removalListener; + + CaffeineRemovalListener(org.opensearch.common.cache.RemovalListener, V> removalListener) { + this.removalListener = removalListener; + } + + @Override + public void onRemoval(ICacheKey key, V value, RemovalCause removalCause) { + switch (removalCause) { + case SIZE: + removalListener.onRemoval(new RemovalNotification<>(key, value, RemovalReason.EVICTED)); + cacheStatsHolder.incrementEvictions(key.dimensions); + break; + case EXPIRED: + removalListener.onRemoval(new RemovalNotification<>(key, value, RemovalReason.INVALIDATED)); + cacheStatsHolder.incrementEvictions(key.dimensions); + break; + case EXPLICIT: + removalListener.onRemoval(new RemovalNotification<>(key, value, RemovalReason.EXPLICIT)); + break; + case REPLACED: + removalListener.onRemoval(new RemovalNotification<>(key, value, RemovalReason.REPLACED)); + break; + } + cacheStatsHolder.decrementItems(key.dimensions); + cacheStatsHolder.decrementSizeInBytes(key.dimensions, weigher.applyAsLong(key, value)); + } + } + + @Override + public V get(ICacheKey key) { + if (key == null) { + throw new IllegalArgumentException("Key passed to caffeine heap cache was null."); + } + V value = cache.getIfPresent(key); + if (value != null) { + cacheStatsHolder.incrementHits(key.dimensions); + } else { + cacheStatsHolder.incrementMisses(key.dimensions); + } + return value; + } + + @Override + public void put(ICacheKey key, V value) { + if (key == null) { + throw new IllegalArgumentException("Key passed to caffeine heap cache was null."); + } + if (value == null) { + throw new IllegalArgumentException("Value passed to caffeine heap cache was null."); + } + if (cache.getIfPresent(key) != null) { + cache.invalidate(key); + } + cache.put(key, value); + cacheStatsHolder.incrementItems(key.dimensions); + cacheStatsHolder.incrementSizeInBytes(key.dimensions, weigher.applyAsLong(key, value)); + } + + @Override + public V computeIfAbsent(ICacheKey key, LoadAwareCacheLoader, V> loader) { + Function, V> mappingFunction = k -> { + V loadedValue; + try { + loadedValue = loader.load(k); + } catch (Exception ex) { + throw new OpenSearchException("Exception occurred while getting value from cache loader."); + } + return loadedValue; + }; + V value = cache.get(key, mappingFunction); + if (!loader.isLoaded()) { + cacheStatsHolder.incrementHits(key.dimensions); + } else { + cacheStatsHolder.incrementMisses(key.dimensions); + cacheStatsHolder.incrementItems(key.dimensions); + cacheStatsHolder.incrementSizeInBytes(key.dimensions, weigher.applyAsLong(key, value)); + } + return value; + } + + @Override + public void invalidate(ICacheKey key) { + if (key == null) { + throw new IllegalArgumentException("Key passed to caffeine heap cache was null."); + } + if (key.getDropStatsForDimensions()) { + cacheStatsHolder.removeDimensions(key.dimensions); + } + if (key.key != null) { + cache.invalidate(key); + } + } + + @Override + public void invalidateAll() { + cache.invalidateAll(); + cacheStatsHolder.reset(); + } + + @Override + public Iterable> keys() { + ConcurrentMap, V> map = cache.asMap(); + return map.keySet(); + } + + @Override + public long count() { + return cacheStatsHolder.count(); + } + + @Override + public void refresh() { + // Left empty, as ehcache doesn't provide a refresh method either. + } + + @Override + public ImmutableCacheStatsHolder stats(String[] levels) { + return cacheStatsHolder.getImmutableCacheStatsHolder(levels); + } + + @Override + public void close() {} + + public static class CaffeineHeapCacheFactory implements ICache.Factory { + + public static final String NAME = "caffeine_heap"; + + @Override + public ICache create(CacheConfig config, CacheType cacheType, Map cacheFactories) { + Map> settingList = CaffeineHeapCacheSettings.getSettingListForCacheType(cacheType); + Settings settings = config.getSettings(); + boolean statsTrackingEnabled = statsTrackingEnabled(config.getSettings(), config.getStatsTrackingEnabled()); + ICacheBuilder builder = new CaffeineHeapCache.Builder().setDimensionNames(config.getDimensionNames()) + .setStatsTrackingEnabled(statsTrackingEnabled) + .setMaximumWeightInBytes(((ByteSizeValue) settingList.get(CaffeineHeapCacheSettings.MAXIMUM_SIZE_IN_BYTES_KEY).get(settings)).getBytes()) + .setExpireAfterAccess(((TimeValue) settingList.get(CaffeineHeapCacheSettings.EXPIRE_AFTER_ACCESS_KEY).get(settings))) + .setWeigher(config.getWeigher()) + .setRemovalListener(config.getRemovalListener()); + if (config.getMaxSizeInBytes() != 0) { + builder.setMaximumWeightInBytes(config.getMaxSizeInBytes()); + } + if (config.getExpireAfterAccess() != null) { + builder.setExpireAfterAccess(config.getExpireAfterAccess()); + } + return builder.build(); + } + + @Override + public String getCacheName() { + return NAME; + } + + private boolean statsTrackingEnabled(Settings settings, boolean statsTrackingEnabledConfig) { + // Don't track stats when pluggable caching is off, or when explicitly set to false in the CacheConfig + return FeatureFlags.PLUGGABLE_CACHE_SETTING.get(settings) && statsTrackingEnabledConfig; + } + } + + public static class Builder extends ICacheBuilder { + private List dimensionNames; + private Executor executor = ForkJoinPool.commonPool(); + + public Builder() {} + + public Builder setDimensionNames(List dimensionNames) { + this.dimensionNames = dimensionNames; + return this; + } + + public Builder setExecutor(Executor executor) { + this.executor = executor; + return this; + } + + public Executor getExecutor() { + return executor; + } + + public CaffeineHeapCache build() { + return new CaffeineHeapCache<>(this); + } + } + + /** + * Manually performs Caffeine maintenance cycle, which includes removing expired entries from the cache. + * Used for testing. + */ + void cleanUp() { + cache.cleanUp(); + } +} diff --git a/plugins/cache-caffeine/src/main/java/org/opensearch/cache/store/package-info.java b/plugins/cache-caffeine/src/main/java/org/opensearch/cache/store/package-info.java new file mode 100644 index 0000000000000..1e8252d2a1527 --- /dev/null +++ b/plugins/cache-caffeine/src/main/java/org/opensearch/cache/store/package-info.java @@ -0,0 +1,9 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.cache.store; diff --git a/plugins/cache-caffeine/src/main/plugin-metadata/plugin-security.policy b/plugins/cache-caffeine/src/main/plugin-metadata/plugin-security.policy new file mode 100644 index 0000000000000..bce865c917d5b --- /dev/null +++ b/plugins/cache-caffeine/src/main/plugin-metadata/plugin-security.policy @@ -0,0 +1,15 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +grant { + permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; + permission java.lang.RuntimePermission "createClassLoader"; + permission java.lang.RuntimePermission "accessDeclaredMembers"; + permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; + permission java.lang.RuntimePermission "getClassLoader"; +}; diff --git a/plugins/cache-caffeine/src/test/java/org/opensearch/cache/CaffeinePluginTests.java b/plugins/cache-caffeine/src/test/java/org/opensearch/cache/CaffeinePluginTests.java new file mode 100644 index 0000000000000..a0006188fbf98 --- /dev/null +++ b/plugins/cache-caffeine/src/test/java/org/opensearch/cache/CaffeinePluginTests.java @@ -0,0 +1,26 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.cache; + +import org.opensearch.cache.store.CaffeineHeapCache; +import org.opensearch.common.cache.ICache; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.Map; + +public class CaffeinePluginTests extends OpenSearchTestCase { + + private CaffeineCachePlugin caffeineCachePlugin = new CaffeineCachePlugin(); + + public void testGetCacheStoreTypeMap() { + Map factoryMap = caffeineCachePlugin.getCacheFactoryMap(); + assertNotNull(factoryMap); + assertNotNull(factoryMap.get(CaffeineHeapCache.CaffeineHeapCacheFactory.NAME)); + } +} diff --git a/plugins/cache-caffeine/src/test/java/org/opensearch/cache/store/CaffeineHeapCacheTests.java b/plugins/cache-caffeine/src/test/java/org/opensearch/cache/store/CaffeineHeapCacheTests.java new file mode 100644 index 0000000000000..316460bcbcf15 --- /dev/null +++ b/plugins/cache-caffeine/src/test/java/org/opensearch/cache/store/CaffeineHeapCacheTests.java @@ -0,0 +1,489 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.cache.store; + +import org.opensearch.common.Randomness; +import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.ICacheKey; +import org.opensearch.common.cache.LoadAwareCacheLoader; +import org.opensearch.common.cache.RemovalListener; +import org.opensearch.common.cache.RemovalNotification; +import org.opensearch.common.cache.stats.ImmutableCacheStats; +import org.opensearch.common.metrics.CounterMetric; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.UUID; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Phaser; +import java.util.function.ToLongBiFunction; + +public class CaffeineHeapCacheTests extends OpenSearchTestCase { + + private final String dimensionName = "shardId"; + private static final int CACHE_SIZE_IN_BYTES = 1024 * 101; + private static final int MOCK_WEIGHT = 10; + + public void testBasicGetAndPut() throws IOException { + ToLongBiFunction, String> weigher = getMockWeigher(false); + MockRemovalListener removalListener = new MockRemovalListener<>(); + ICache caffeineTest = new CaffeineHeapCache.Builder().setDimensionNames(List.of(dimensionName)) + .setExecutor(Runnable::run) // Specify direct (same thread) executor for testing purposes. + .setExpireAfterAccess(TimeValue.MAX_VALUE) + .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) + .setWeigher(weigher) + .setRemovalListener(removalListener) + .build(); + int randomKeys = randomIntBetween(10, 100); + Map keyValueMap = new HashMap<>(); + for (int i = 0; i < randomKeys; i++) { + keyValueMap.put(UUID.randomUUID().toString(), UUID.randomUUID().toString()); + } + for (Map.Entry entry : keyValueMap.entrySet()) { + ICacheKey iCacheKey = getICacheKey(entry.getKey()); + caffeineTest.put(iCacheKey, entry.getValue()); + } + for (Map.Entry entry : keyValueMap.entrySet()) { + String value = caffeineTest.get(getICacheKey(entry.getKey())); + assertEquals(entry.getValue(), value); + } + assertEquals(randomKeys, caffeineTest.stats().getTotalItems()); + assertEquals(randomKeys, caffeineTest.stats().getTotalHits()); + assertEquals(randomKeys, caffeineTest.count()); + + // Validate misses + int expectedNumberOfMisses = randomIntBetween(10, 200); + for (int i = 0; i < expectedNumberOfMisses; i++) { + caffeineTest.get(getICacheKey(UUID.randomUUID().toString())); + } + assertEquals(expectedNumberOfMisses, caffeineTest.stats().getTotalMisses()); + } + + public void testConcurrentGet() throws Exception { + ToLongBiFunction, String> weigher = getMockWeigher(false); + MockRemovalListener removalListener = new MockRemovalListener<>(); + ICache caffeineTest = new CaffeineHeapCache.Builder().setDimensionNames(List.of(dimensionName)) + .setExecutor(Runnable::run) + .setExpireAfterAccess(TimeValue.MAX_VALUE) + .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) + .setWeigher(weigher) + .setRemovalListener(removalListener) + .build(); + int randomKeys = randomIntBetween(20, 100); + Thread[] threads = new Thread[randomKeys]; + Phaser phaser = new Phaser(randomKeys + 1); + CountDownLatch countDownLatch = new CountDownLatch(randomKeys); + Map, String> keyValueMap = new HashMap<>(); + int j = 0; + for (int i = 0; i < randomKeys; i++) { + keyValueMap.put(getICacheKey(UUID.randomUUID().toString()), UUID.randomUUID().toString()); + } + for (Map.Entry, String> entry : keyValueMap.entrySet()) { + caffeineTest.put(entry.getKey(), entry.getValue()); + } + + for (Map.Entry, String> entry : keyValueMap.entrySet()) { + threads[j] = new Thread(() -> { + phaser.arriveAndAwaitAdvance(); + assertEquals(entry.getValue(), caffeineTest.get(entry.getKey())); + countDownLatch.countDown(); + }); + threads[j].start(); + j++; + } + phaser.arriveAndAwaitAdvance(); // Will trigger parallel gets above. + countDownLatch.await(); // Wait for all threads to finish + assertEquals(randomKeys, caffeineTest.stats().getTotalHits()); + } + + public void testConcurrentPut() throws Exception { + ToLongBiFunction, String> weigher = getMockWeigher(false); + MockRemovalListener removalListener = new MockRemovalListener<>(); + ICache caffeineTest = new CaffeineHeapCache.Builder().setDimensionNames(List.of(dimensionName)) + .setExecutor(Runnable::run) + .setExpireAfterAccess(TimeValue.MAX_VALUE) + .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) + .setWeigher(weigher) + .setRemovalListener(removalListener) + .build(); + int randomKeys = randomIntBetween(20, 100); + Thread[] threads = new Thread[randomKeys]; + Phaser phaser = new Phaser(randomKeys + 1); + CountDownLatch countDownLatch = new CountDownLatch(randomKeys); + Map, String> keyValueMap = new HashMap<>(); + int j = 0; + for (int i = 0; i < randomKeys; i++) { + keyValueMap.put(getICacheKey(UUID.randomUUID().toString()), UUID.randomUUID().toString()); + } + for (Map.Entry, String> entry : keyValueMap.entrySet()) { + threads[j] = new Thread(() -> { + phaser.arriveAndAwaitAdvance(); + caffeineTest.put(entry.getKey(), entry.getValue()); + countDownLatch.countDown(); + }); + threads[j].start(); + j++; + } + phaser.arriveAndAwaitAdvance(); // Will trigger parallel puts above. + countDownLatch.await(); // Wait for all threads to finish + for (Map.Entry, String> entry : keyValueMap.entrySet()) { + String value = caffeineTest.get(entry.getKey()); + assertEquals(entry.getValue(), value); + } + assertEquals(randomKeys, caffeineTest.count()); + assertEquals(randomKeys, caffeineTest.stats().getTotalItems()); + } + + // Modified from EhCacheDiskCacheTests.java + public void testComputeIfAbsentConcurrently() throws Exception { + ToLongBiFunction, String> weigher = getMockWeigher(false); + MockRemovalListener removalListener = new MockRemovalListener<>(); + ICache caffeineTest = new CaffeineHeapCache.Builder().setDimensionNames(List.of(dimensionName)) + .setExecutor(Runnable::run) + .setExpireAfterAccess(TimeValue.MAX_VALUE) + .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) + .setWeigher(weigher) + .setRemovalListener(removalListener) + .build(); + int numberOfRequest = randomIntBetween(200, 400); + String key = UUID.randomUUID().toString(); + String value = "dummy"; + Thread[] threads = new Thread[numberOfRequest]; + Phaser phaser = new Phaser(numberOfRequest + 1); + CountDownLatch countDownLatch = new CountDownLatch(numberOfRequest); + + List, String>> loadAwareCacheLoaderList = new CopyOnWriteArrayList<>(); + + // Try to hit different request with the same key concurrently. Verify value is only loaded once. + for (int i = 0; i < numberOfRequest; i++) { + threads[i] = new Thread(() -> { + LoadAwareCacheLoader, String> loadAwareCacheLoader = new LoadAwareCacheLoader<>() { + boolean isLoaded; + + @Override + public boolean isLoaded() { + return isLoaded; + } + + @Override + public String load(ICacheKey key) { + isLoaded = true; + return value; + } + }; + loadAwareCacheLoaderList.add(loadAwareCacheLoader); + phaser.arriveAndAwaitAdvance(); + try { + assertEquals(value, caffeineTest.computeIfAbsent(getICacheKey(key), loadAwareCacheLoader)); + } catch (Exception e) { + throw new RuntimeException(e); + } + countDownLatch.countDown(); + }); + threads[i].start(); + } + phaser.arriveAndAwaitAdvance(); + countDownLatch.await(); + int numberOfTimesValueLoaded = 0; + for (int i = 0; i < numberOfRequest; i++) { + if (loadAwareCacheLoaderList.get(i).isLoaded()) { + numberOfTimesValueLoaded++; + } + } + assertEquals(1, numberOfTimesValueLoaded); + assertEquals(1, caffeineTest.stats().getTotalMisses()); + assertEquals(1, caffeineTest.stats().getTotalItems()); + assertEquals(numberOfRequest - 1, caffeineTest.stats().getTotalHits()); + assertEquals(1, caffeineTest.count()); + caffeineTest.close(); + } + + public void testInvalidateAll() throws Exception { + ToLongBiFunction, String> weigher = getMockWeigher(false); + MockRemovalListener removalListener = new MockRemovalListener<>(); + ICache caffeineTest = new CaffeineHeapCache.Builder().setDimensionNames(List.of(dimensionName)) + .setExecutor(Runnable::run) + .setExpireAfterAccess(TimeValue.MAX_VALUE) + .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) + .setWeigher(weigher) + .setRemovalListener(removalListener) + .build(); + int randomKeys = randomIntBetween(10, 100); + Map, String> keyValueMap = new HashMap<>(); + for (int i = 0; i < randomKeys; i++) { + ICacheKey key = getICacheKey(UUID.randomUUID().toString()); + String value = UUID.randomUUID().toString(); + keyValueMap.put(key, value); + caffeineTest.put(key, value); + } + caffeineTest.invalidateAll(); // Clear all the entries. + for (Map.Entry, String> entry : keyValueMap.entrySet()) { + // Verify that value is null for a removed entry. + assertNull(caffeineTest.get(entry.getKey())); + } + assertEquals(0, caffeineTest.count()); + assertEquals(0, caffeineTest.stats().getTotalSizeInBytes()); + } + + // Modified from OpenSearchOnHeapCache.java + public void testInvalidateWithDropDimensions() throws Exception { + ToLongBiFunction, String> weigher = getMockWeigher(false); + MockRemovalListener removalListener = new MockRemovalListener<>(); + List dimensionNames = List.of("dim1", "dim2"); + ICache caffeineTest = new CaffeineHeapCache.Builder().setDimensionNames(dimensionNames) + .setExecutor(Runnable::run) + .setExpireAfterAccess(TimeValue.MAX_VALUE) + .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) + .setWeigher(weigher) + .setRemovalListener(removalListener) + .build(); + List> keysAdded = new ArrayList<>(); + + for (int i = 0; i < 20; i++) { + ICacheKey key = new ICacheKey<>(UUID.randomUUID().toString(), getRandomDimensions(dimensionNames)); + keysAdded.add(key); + caffeineTest.put(key, UUID.randomUUID().toString()); + } + + ICacheKey keyToDrop = keysAdded.get(0); + + String[] levels = dimensionNames.toArray(new String[0]); + ImmutableCacheStats snapshot = caffeineTest.stats(levels).getStatsForDimensionValues(keyToDrop.dimensions); + assertNotNull(snapshot); + + keyToDrop.setDropStatsForDimensions(true); + caffeineTest.invalidate(keyToDrop); + + // Now assert the stats are gone for any key that has this combination of dimensions, but still there otherwise + for (ICacheKey keyAdded : keysAdded) { + snapshot = caffeineTest.stats(levels).getStatsForDimensionValues(keyAdded.dimensions); + if (keyAdded.dimensions.equals(keyToDrop.dimensions)) { + assertNull(snapshot); + } else { + assertNotNull(snapshot); + } + } + } + + public void testInvalidateConcurrently() throws Exception { + ToLongBiFunction, String> weigher = getMockWeigher(false); + MockRemovalListener removalListener = new MockRemovalListener<>(); + ICache caffeineTest = new CaffeineHeapCache.Builder().setDimensionNames(List.of(dimensionName)) + .setExecutor(Runnable::run) + .setExpireAfterAccess(TimeValue.MAX_VALUE) + .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) + .setWeigher(weigher) + .setRemovalListener(removalListener) + .build(); + int randomKeys = randomIntBetween(10, 100); + Map, String> keyValueMap = new HashMap<>(); + for (int i = 0; i < randomKeys; i++) { + keyValueMap.put(getICacheKey(UUID.randomUUID().toString()), UUID.randomUUID().toString()); + } + for (Map.Entry, String> entry : keyValueMap.entrySet()) { + caffeineTest.put(entry.getKey(), entry.getValue()); + } + assertEquals(keyValueMap.size(), caffeineTest.count()); + List> removedKeyList = new ArrayList<>(); + Thread[] threads = new Thread[randomKeys]; + Phaser phaser = new Phaser(randomKeys + 1); + CountDownLatch countDownLatch = new CountDownLatch(randomKeys); + int j = 0; + for (Map.Entry, String> entry : keyValueMap.entrySet()) { + threads[j] = new Thread(() -> { + phaser.arriveAndAwaitAdvance(); + if (randomBoolean()) { + removedKeyList.add(entry.getKey()); + caffeineTest.invalidate(entry.getKey()); + } + countDownLatch.countDown(); + }); + threads[j].start(); + j++; + } + phaser.arriveAndAwaitAdvance(); // Will trigger parallel invalidations above. + countDownLatch.await(); + + for (ICacheKey removedKey : removedKeyList) { + assertNull(caffeineTest.get(removedKey)); + } + assertEquals(keyValueMap.size() - removedKeyList.size(), caffeineTest.count()); + assertEquals((keyValueMap.size() - removedKeyList.size()) * 10, caffeineTest.stats().getTotalSizeInBytes()); + } + + public void testEvictions() throws Exception { + int MAX_CACHE_SIZE = 100; // Restrict cache size in order to test size-based eviction. + ToLongBiFunction, String> weigher = getMockWeigher(false); + MockRemovalListener removalListener = new MockRemovalListener<>(); + ICache caffeineTest = new CaffeineHeapCache.Builder().setDimensionNames(getMockDimensions()) + .setExecutor(Runnable::run) + .setExpireAfterAccess(TimeValue.MAX_VALUE) + .setMaximumWeightInBytes(MAX_CACHE_SIZE) + .setWeigher(weigher) + .setRemovalListener(removalListener) + .build(); + int randomKeys = randomIntBetween(MAX_CACHE_SIZE / MOCK_WEIGHT + 1, 100); + for (int i = 0; i < randomKeys; i++) { + caffeineTest.put(getICacheKey(UUID.randomUUID().toString()), UUID.randomUUID().toString()); + } + assertEquals(randomKeys - (MAX_CACHE_SIZE / MOCK_WEIGHT), caffeineTest.stats().getTotalEvictions()); + assertEquals(randomKeys - (MAX_CACHE_SIZE / MOCK_WEIGHT), removalListener.evictionMetric.count()); + assertEquals(MAX_CACHE_SIZE, caffeineTest.stats().getTotalSizeInBytes()); + } + + public void testConcurrentEvictions() throws Exception { + int MAX_CACHE_SIZE = 100; // Restrict cache size in order to test size-based eviction. + ToLongBiFunction, String> weigher = getMockWeigher(false); + MockRemovalListener removalListener = new MockRemovalListener<>(); + ICache caffeineTest = new CaffeineHeapCache.Builder().setDimensionNames(List.of(dimensionName)) + .setExecutor(Runnable::run) + .setExpireAfterAccess(TimeValue.MAX_VALUE) + .setMaximumWeightInBytes(MAX_CACHE_SIZE) + .setWeigher(weigher) + .setRemovalListener(removalListener) + .build(); + int randomKeys = randomIntBetween(MAX_CACHE_SIZE / MOCK_WEIGHT + 1, 100); + Thread[] threads = new Thread[randomKeys]; + Phaser phaser = new Phaser(randomKeys + 1); + CountDownLatch countDownLatch = new CountDownLatch(randomKeys); + Map, String> keyValueMap = new HashMap<>(); + int j = 0; + for (int i = 0; i < randomKeys; i++) { + keyValueMap.put(getICacheKey(UUID.randomUUID().toString()), UUID.randomUUID().toString()); + } + for (Map.Entry, String> entry : keyValueMap.entrySet()) { + threads[j] = new Thread(() -> { + phaser.arriveAndAwaitAdvance(); + caffeineTest.put(entry.getKey(), entry.getValue()); + countDownLatch.countDown(); + }); + threads[j].start(); + j++; + } + phaser.arriveAndAwaitAdvance(); // Will trigger parallel puts above. + countDownLatch.await(); // Wait for all threads to finish + + ((CaffeineHeapCache) caffeineTest).cleanUp(); // Manually perform maintenance cycle, which includes removing + // expired entries. + assertEquals(randomKeys - (MAX_CACHE_SIZE / MOCK_WEIGHT), caffeineTest.stats().getTotalEvictions()); + assertEquals(randomKeys - (MAX_CACHE_SIZE / MOCK_WEIGHT), removalListener.evictionMetric.count()); + assertEquals(MAX_CACHE_SIZE, caffeineTest.stats().getTotalSizeInBytes()); + } + + public void testReplace() throws Exception { + ToLongBiFunction, String> weigher = getMockWeigher(true); + MockRemovalListener removalListener = new MockRemovalListener<>(); + ICache caffeineTest = new CaffeineHeapCache.Builder().setDimensionNames(List.of(dimensionName)) + .setExecutor(Runnable::run) + .setExpireAfterAccess(TimeValue.MAX_VALUE) + .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) + .setWeigher(weigher) + .setRemovalListener(removalListener) + .build(); + + Map, String> keyValueMap = new HashMap<>(); + int randomKeys = randomIntBetween(10, 100); + for (int i = 0; i < randomKeys; i++) { + ICacheKey key = getICacheKey(UUID.randomUUID().toString()); + String value = Integer.toString(randomIntBetween(1, 10)); + keyValueMap.put(key, value); + caffeineTest.put(key, value); + } + + // Replace old values with new, differently-sized values. Ensure that size changes accordingly. + for (Map.Entry, String> entry : keyValueMap.entrySet()) { + long current_size = caffeineTest.stats().getTotalItems(); + long current_size_in_bytes = caffeineTest.stats().getTotalSizeInBytes(); + String old_value = entry.getValue(); + ICacheKey key = entry.getKey(); + String new_value = Integer.toString(randomIntBetween(1, 10)); + caffeineTest.put(key, new_value); + keyValueMap.put(key, new_value); + assertEquals(current_size, caffeineTest.stats().getTotalItems()); + assertEquals(current_size_in_bytes - Integer.parseInt(old_value) + Integer.parseInt(new_value), caffeineTest.stats().getTotalSizeInBytes()); + } + } + + public void testIteratorRemove() throws Exception { + ToLongBiFunction, String> weigher = getMockWeigher(false); + MockRemovalListener removalListener = new MockRemovalListener<>(); + ICache caffeineTest = new CaffeineHeapCache.Builder().setDimensionNames(List.of(dimensionName)) + .setExecutor(Runnable::run) + .setExpireAfterAccess(TimeValue.MAX_VALUE) + .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) + .setWeigher(weigher) + .setRemovalListener(removalListener) + .build(); + + Map, String> keyValueMap = new HashMap<>(); + int randomKeys = randomIntBetween(10, 100); + for (int i = 0; i < randomKeys; i++) { + ICacheKey key = getICacheKey(UUID.randomUUID().toString()); + String value = UUID.randomUUID().toString(); + keyValueMap.put(key, value); + caffeineTest.put(key, value); + } + + // Ensures that calling remove() on the result of caffeineTest.keys().iterator() removes key from underlying cache. + Iterator> caffeineTestIterator = caffeineTest.keys().iterator(); + while (caffeineTestIterator.hasNext()) { + ICacheKey next = caffeineTestIterator.next(); + assertEquals(keyValueMap.get(next), caffeineTest.get(next)); + caffeineTestIterator.remove(); + assertNull(caffeineTest.get(next)); + } + assertEquals(0, caffeineTest.count()); + assertEquals(0, caffeineTest.stats().getTotalSizeInBytes()); + assertEquals(0, caffeineTest.stats().getTotalEvictions()); // This shouldn't increment number of evictions. + } + + private List getMockDimensions() { + return List.of("0"); + } + + private ICacheKey getICacheKey(String key) { + return new ICacheKey<>(key, getMockDimensions()); + } + + private List getRandomDimensions(List dimensionNames) { + Random rand = Randomness.get(); + int bound = 3; + List result = new ArrayList<>(); + for (String dimName : dimensionNames) { + result.add(String.valueOf(rand.nextInt(bound))); + } + return result; + } + + private ToLongBiFunction, String> getMockWeigher(boolean variedWeights) { + if (!variedWeights) { + return (iCacheKey, value) -> { return MOCK_WEIGHT; }; + } + + // Used for testing replace. + return (iCacheKey, value) -> { return Long.parseLong(value); }; + } + + static class MockRemovalListener implements RemovalListener, V> { + CounterMetric evictionMetric = new CounterMetric(); + + @Override + public void onRemoval(RemovalNotification, V> notification) { + evictionMetric.inc(); + } + } +}