diff --git a/107/build.gradle b/107/build.gradle index aae8a5ad3f..0c4ace8679 100644 --- a/107/build.gradle +++ b/107/build.gradle @@ -14,6 +14,8 @@ * limitations under the License. */ +import org.gradle.internal.jvm.Jvm + apply plugin: EhDeploy configurations { @@ -30,23 +32,23 @@ sourceSets { } dependencies { - compile project(':impl'), project(':xml'), "javax.cache:cache-api:$parent.jcacheVersion" + compile project(':impl'), project(':xml') + provided "javax.cache:cache-api:$parent.jcacheVersion" tckTestRuntime 'javax.cache:cache-tests:1.0.1' tckTestClasses('javax.cache:cache-tests:1.0.1:tests') { transitive = false } } -javadoc { - exclude '**/tck/**' +tasks.withType(JavaCompile) { + options.compilerArgs += ['-Werror'] } -def tckTestOnlyIfJava7 = { - JavaVersion.current().isJava7Compatible() +javadoc { + exclude '**/tck/**' } task unpackTckTests(type: Copy) { - onlyIf tckTestOnlyIfJava7 from { configurations.tckTestClasses.collect {zipTree(it)} } @@ -54,7 +56,7 @@ task unpackTckTests(type: Copy) { } task tckTest(type: Test, dependsOn: unpackTckTests) { - onlyIf tckTestOnlyIfJava7 + executable = Jvm.current().javaExecutable testClassesDir = sourceSets.tckTest.output.classesDir classpath += sourceSets.tckTest.runtimeClasspath diff --git a/107/src/main/java/org/ehcache/jsr107/ConfigurationMerger.java b/107/src/main/java/org/ehcache/jsr107/ConfigurationMerger.java index eef5c3f319..f143ad89e1 100644 --- a/107/src/main/java/org/ehcache/jsr107/ConfigurationMerger.java +++ b/107/src/main/java/org/ehcache/jsr107/ConfigurationMerger.java @@ -165,12 +165,12 @@ private CacheConfigurationBuilder handleStoreByValue(Eh107CompleteC if (defaults.containsKey(jsr107Configuration.getKeyType())) { matchingDefault = true; } else { - builder = builder.add(new DefaultCopierConfiguration((Class) SerializingCopier.class, DefaultCopierConfiguration.Type.KEY)); + builder = builder.add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)); } if (defaults.containsKey(jsr107Configuration.getValueType())) { matchingDefault = true; } else { - builder = builder.add(new DefaultCopierConfiguration((Class) SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE)); + builder = builder.add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)); } if (matchingDefault) { LOG.info("CacheManager level copier configuration overwriting JSR-107 by-value semantics for cache {}", cacheName); @@ -187,6 +187,7 @@ private CacheConfigurationBuilder handleStoreByValue(Eh107CompleteC return builder; } + @SuppressWarnings("unchecked") private static CacheConfigurationBuilder addDefaultCopiers(CacheConfigurationBuilder builder, Class keyType, Class valueType ) { Set immutableTypes = new HashSet(); immutableTypes.add(String.class); @@ -198,13 +199,13 @@ private static CacheConfigurationBuilder addDefaultCopiers(CacheCon if (immutableTypes.contains(keyType)) { builder = builder.add(new DefaultCopierConfiguration((Class)Eh107IdentityCopier.class, DefaultCopierConfiguration.Type.KEY)); } else { - builder = builder.add(new DefaultCopierConfiguration((Class)SerializingCopier.class, DefaultCopierConfiguration.Type.KEY)); + builder = builder.add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)); } if (immutableTypes.contains(valueType)) { builder = builder.add(new DefaultCopierConfiguration((Class)Eh107IdentityCopier.class, DefaultCopierConfiguration.Type.VALUE)); } else { - builder = builder.add(new DefaultCopierConfiguration((Class)SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE)); + builder = builder.add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)); } return builder; } @@ -218,7 +219,8 @@ private static void handleCopierDefaultsforImmutableTypes(Map, ClassIns addIdentityCopierIfNoneRegistered(defaults, Character.class); } - private static void addIdentityCopierIfNoneRegistered(Map, ClassInstanceConfiguration>> defaults, Class clazz) { + @SuppressWarnings("unchecked") + private static void addIdentityCopierIfNoneRegistered(Map, ClassInstanceConfiguration>> defaults, Class clazz) { if (!defaults.containsKey(clazz)) { defaults.put(clazz, new DefaultCopierConfiguration(Eh107IdentityCopier.class, DefaultCopierConfiguration.Type.VALUE)); } diff --git a/107/src/main/java/org/ehcache/jsr107/DefaultJsr107SerializationProvider.java b/107/src/main/java/org/ehcache/jsr107/DefaultJsr107SerializationProvider.java index 4528ace604..5267f646c9 100644 --- a/107/src/main/java/org/ehcache/jsr107/DefaultJsr107SerializationProvider.java +++ b/107/src/main/java/org/ehcache/jsr107/DefaultJsr107SerializationProvider.java @@ -24,6 +24,7 @@ */ class DefaultJsr107SerializationProvider extends DefaultSerializationProvider { + @SuppressWarnings("unchecked") DefaultJsr107SerializationProvider() { super(new DefaultSerializationProviderConfiguration() .addSerializerFor(Object.class, (Class) PlainJavaSerializer.class)); diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107CacheEntryEvent.java b/107/src/main/java/org/ehcache/jsr107/Eh107CacheEntryEvent.java index 98fe478d2d..85d1725031 100644 --- a/107/src/main/java/org/ehcache/jsr107/Eh107CacheEntryEvent.java +++ b/107/src/main/java/org/ehcache/jsr107/Eh107CacheEntryEvent.java @@ -27,11 +27,11 @@ abstract class Eh107CacheEntryEvent extends CacheEntryEvent { private static final long serialVersionUID = 8460535666272347345L; - private final CacheEvent ehEvent; + private final CacheEvent ehEvent; private final boolean hasOldValue; - Eh107CacheEntryEvent(Cache source, EventType eventType, CacheEvent ehEvent, + Eh107CacheEntryEvent(Cache source, EventType eventType, CacheEvent ehEvent, boolean hasOldValue) { super(source, eventType); this.ehEvent = ehEvent; @@ -63,7 +63,7 @@ public boolean isOldValueAvailable() { static class NormalEvent extends Eh107CacheEntryEvent { - public NormalEvent(Cache source, EventType eventType, CacheEvent ehEvent, boolean hasOldValue) { + public NormalEvent(Cache source, EventType eventType, CacheEvent ehEvent, boolean hasOldValue) { super(source, eventType, ehEvent, hasOldValue); } @@ -75,7 +75,7 @@ public V getValue() { static class RemovingEvent extends Eh107CacheEntryEvent { - public RemovingEvent(Cache source, EventType eventType, CacheEvent ehEvent, boolean hasOldValue) { + public RemovingEvent(Cache source, EventType eventType, CacheEvent ehEvent, boolean hasOldValue) { super(source, eventType, ehEvent, hasOldValue); } @@ -85,4 +85,4 @@ public V getValue() { } } -} \ No newline at end of file +} diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107CacheManager.java b/107/src/main/java/org/ehcache/jsr107/Eh107CacheManager.java index 4c5bf1ee5c..d9c3c13078 100644 --- a/107/src/main/java/org/ehcache/jsr107/Eh107CacheManager.java +++ b/107/src/main/java/org/ehcache/jsr107/Eh107CacheManager.java @@ -92,6 +92,7 @@ private void refreshAllCaches() { for (Map.Entry> namedCacheEntry : caches.entrySet()) { Eh107Cache cache = namedCacheEntry.getValue(); if (!cache.isClosed()) { + @SuppressWarnings("unchecked") Eh107Configuration configuration = cache.getConfiguration(Eh107Configuration.class); if (configuration.isManagementEnabled()) { enableManagement(cache, true); @@ -114,7 +115,7 @@ private Eh107Cache wrapEhcacheCache(String alias, InternalCache serviceConfiguration : cache.getRuntimeConfiguration().getServiceConfigurations()) { if (serviceConfiguration instanceof DefaultCopierConfiguration) { - DefaultCopierConfiguration copierConfig = (DefaultCopierConfiguration)serviceConfiguration; + DefaultCopierConfiguration copierConfig = (DefaultCopierConfiguration) serviceConfiguration; if(!copierConfig.getClazz().isAssignableFrom(IdentityCopier.class)) storeByValueOnHeap = true; break; diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java b/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java index 3965fbec02..661e5392b3 100644 --- a/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java +++ b/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java @@ -22,11 +22,9 @@ import org.ehcache.core.statistics.BulkOps; import org.terracotta.context.ContextManager; import org.terracotta.context.TreeNode; -import org.terracotta.context.query.Matcher; import org.terracotta.context.query.Matchers; import org.terracotta.context.query.Query; import org.terracotta.statistics.OperationStatistic; -import org.terracotta.statistics.StatisticsManager; import org.terracotta.statistics.derived.LatencySampling; import org.terracotta.statistics.derived.MinMaxAverage; import org.terracotta.statistics.jsr166e.LongAdder; @@ -55,7 +53,7 @@ class Eh107CacheStatisticsMXBean extends Eh107MXBean implements javax.cache.mana private final OperationStatistic putIfAbsent; private final OperationStatistic replace; private final OperationStatistic conditionalRemove; - private final OperationStatistic authorityEviction; + private final OperationStatistic lowestTierEviction; private final Map bulkMethodEntries; private final LatencyMonitor averageGetTime; private final LatencyMonitor averagePutTime; @@ -71,7 +69,7 @@ class Eh107CacheStatisticsMXBean extends Eh107MXBean implements javax.cache.mana putIfAbsent = findCacheStatistic(cache, CacheOperationOutcomes.PutIfAbsentOutcome.class, "putIfAbsent"); replace = findCacheStatistic(cache, CacheOperationOutcomes.ReplaceOutcome.class, "replace"); conditionalRemove = findCacheStatistic(cache, CacheOperationOutcomes.ConditionalRemoveOutcome.class, "conditionalRemove"); - authorityEviction = findAuthoritativeTierStatistic(cache, StoreOperationOutcomes.EvictionOutcome.class, "eviction"); + lowestTierEviction = findLowestTierStatistic(cache, StoreOperationOutcomes.EvictionOutcome.class, "eviction"); averageGetTime = new LatencyMonitor(allOf(CacheOperationOutcomes.GetOutcome.class)); get.addDerivedStatistic(averageGetTime); @@ -139,7 +137,7 @@ public long getCacheRemovals() { @Override public long getCacheEvictions() { - return normalize(authorityEviction.sum(EnumSet.of(StoreOperationOutcomes.EvictionOutcome.SUCCESS)) - compensatingCounters.cacheEvictions); + return normalize(lowestTierEviction.sum(EnumSet.of(StoreOperationOutcomes.EvictionOutcome.SUCCESS)) - compensatingCounters.cacheEvictions); } @Override @@ -159,7 +157,7 @@ public float getAverageRemoveTime() { private long getMisses() { return getBulkCount(BulkOps.GET_ALL_MISS) + - get.sum(EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER, CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER)) + + get.sum(EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS)) + putIfAbsent.sum(EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.PUT)) + replace.sum(EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.MISS_NOT_PRESENT)) + conditionalRemove.sum(EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE_KEY_MISSING)); @@ -167,8 +165,8 @@ private long getMisses() { private long getHits() { return getBulkCount(BulkOps.GET_ALL_HITS) + - get.sum(EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER, CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER)) + - putIfAbsent.sum(EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.PUT)) + + get.sum(EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT)) + + putIfAbsent.sum(EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.HIT)) + replace.sum(EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.HIT, CacheOperationOutcomes.ReplaceOutcome.MISS_PRESENT)) + conditionalRemove.sum(EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.SUCCESS, CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE_KEY_PRESENT)); } @@ -189,6 +187,7 @@ private static float normalize(float value) { } static > OperationStatistic findCacheStatistic(Cache cache, Class type, String statName) { + @SuppressWarnings("unchecked") Query query = queryBuilder() .children() .filter(context(attributes(Matchers.>allOf(hasAttribute("name", statName), hasAttribute("type", type))))) @@ -201,45 +200,49 @@ static > OperationStatistic findCacheStatistic(Cache if (result.isEmpty()) { throw new RuntimeException("result must not be null"); } - return (OperationStatistic) result.iterator().next().getContext().attributes().get("this"); + @SuppressWarnings("unchecked") + OperationStatistic statistic = (OperationStatistic) result.iterator().next().getContext().attributes().get("this"); + return statistic; } - > OperationStatistic findAuthoritativeTierStatistic(Cache cache, Class type, String statName) { - Query storeQuery = queryBuilder() - .children() - .children() - .filter(context(attributes(Matchers.>allOf( - hasAttribute("tags", new Matcher>() { - @Override - protected boolean matchesSafely(Set object) { - return object.containsAll(Collections.singleton("store")); - } - }))))) - .build(); - - Set storeResult = storeQuery.execute(Collections.singleton(ContextManager.nodeFor(cache))); - if (storeResult.size() > 1) { - throw new RuntimeException("store result must be unique"); - } - if (storeResult.isEmpty()) { - throw new RuntimeException("store result must not be null"); - } - Object authoritativeTier = storeResult.iterator().next().getContext().attributes().get("authoritativeTier"); + > OperationStatistic findLowestTierStatistic(Cache cache, Class type, String statName) { + @SuppressWarnings("unchecked") Query statQuery = queryBuilder() - .children() + .descendants() .filter(context(attributes(Matchers.>allOf(hasAttribute("name", statName), hasAttribute("type", type))))) .build(); - Set statResult = statQuery.execute(Collections.singleton(StatisticsManager.nodeFor(authoritativeTier))); - if (statResult.size() > 1) { - throw new RuntimeException("stat result must be unique"); + Set statResult = statQuery.execute(Collections.singleton(ContextManager.nodeFor(cache))); + + if(statResult.size() < 1) { + throw new RuntimeException("Failed to find lowest tier statistic: " + statName + " , valid result Set sizes must 1 or more. Found result Set size of: " + statResult.size()); + } + + //if only 1 store then you don't need to find the lowest tier + if(statResult.size() == 1) { + @SuppressWarnings("unchecked") + OperationStatistic statistic = (OperationStatistic) statResult.iterator().next().getContext().attributes().get("this"); + return statistic; } - if (statResult.isEmpty()) { - throw new RuntimeException("stat result must not be null"); + + String lowestStoreType = "onheap"; + TreeNode lowestTierNode = null; + for(TreeNode treeNode : statResult) { + if(((Set)treeNode.getContext().attributes().get("tags")).size() != 1) { + throw new RuntimeException("Failed to find lowest tier statistic. \"tags\" set must be size 1"); + } + + String storeType = treeNode.getContext().attributes().get("tags").toString(); + if(storeType.compareToIgnoreCase(lowestStoreType) < 0) { + lowestStoreType = treeNode.getContext().attributes().get("tags").toString(); + lowestTierNode = treeNode; + } } - return (OperationStatistic) statResult.iterator().next().getContext().attributes().get("this"); + @SuppressWarnings("unchecked") + OperationStatistic statistic = (OperationStatistic) lowestTierNode.getContext().attributes().get("this"); + return statistic; } class CompensatingCounters { diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107CompleteConfiguration.java b/107/src/main/java/org/ehcache/jsr107/Eh107CompleteConfiguration.java index 3d9dbc75e3..c65380d101 100644 --- a/107/src/main/java/org/ehcache/jsr107/Eh107CompleteConfiguration.java +++ b/107/src/main/java/org/ehcache/jsr107/Eh107CompleteConfiguration.java @@ -113,7 +113,7 @@ private static boolean isStoreByValue(Configuration config, CacheCo Collection> serviceConfigurations = ehcacheConfig.getServiceConfigurations(); for (ServiceConfiguration serviceConfiguration : serviceConfigurations) { if (serviceConfiguration instanceof DefaultCopierConfiguration) { - DefaultCopierConfiguration copierConfig = (DefaultCopierConfiguration)serviceConfiguration; + DefaultCopierConfiguration copierConfig = (DefaultCopierConfiguration)serviceConfiguration; if(copierConfig.getType().equals(DefaultCopierConfiguration.Type.VALUE)) { if(copierConfig.getClazz().isAssignableFrom(IdentityCopier.class)) { return false; diff --git a/107/src/main/java/org/ehcache/jsr107/EhcacheCachingProvider.java b/107/src/main/java/org/ehcache/jsr107/EhcacheCachingProvider.java index f9b679424e..379027a635 100644 --- a/107/src/main/java/org/ehcache/jsr107/EhcacheCachingProvider.java +++ b/107/src/main/java/org/ehcache/jsr107/EhcacheCachingProvider.java @@ -76,18 +76,7 @@ public CacheManager getCacheManager(URI uri, ClassLoader classLoader, Properties } } - Configuration config; - try { - if (URI_DEFAULT.equals(uri)) { - config = new DefaultConfiguration(classLoader); - } else { - config = new XmlConfiguration(uri.toURL(), classLoader); - } - } catch (Exception e) { - throw new javax.cache.CacheException(e); - } - - return getCacheManager(uri, config, properties); + return getCacheManager(new ConfigSupplier(uri, classLoader), properties); } /** @@ -98,8 +87,8 @@ public CacheManager getCacheManager(URI uri, ClassLoader classLoader, Properties * * @return a cache manager */ - public Eh107CacheManager getCacheManager(URI uri, Configuration config) { - return getCacheManager(uri, config, new Properties()); + public CacheManager getCacheManager(URI uri, Configuration config) { + return getCacheManager(new ConfigSupplier(uri, config), new Properties()); } /** @@ -112,10 +101,15 @@ public Eh107CacheManager getCacheManager(URI uri, Configuration config) { * * @return a cache manager */ - public Eh107CacheManager getCacheManager(URI uri, Configuration config, Properties properties) { + public CacheManager getCacheManager(URI uri, Configuration config, Properties properties) { + return getCacheManager(new ConfigSupplier(uri, config), properties); + } + + Eh107CacheManager getCacheManager(ConfigSupplier configSupplier, Properties properties) { Eh107CacheManager cacheManager; ConcurrentMap byURI; - ClassLoader classLoader = config.getClassLoader(); + final ClassLoader classLoader = configSupplier.getClassLoader(); + final URI uri = configSupplier.getUri(); synchronized (cacheManagers) { byURI = cacheManagers.get(classLoader); @@ -131,7 +125,7 @@ public Eh107CacheManager getCacheManager(URI uri, Configuration config, Properti byURI.remove(uri, cacheManager); } - cacheManager = createCacheManager(uri, config, properties); + cacheManager = createCacheManager(uri, configSupplier.getConfiguration(), properties); byURI.put(uri, cacheManager); } } @@ -296,4 +290,45 @@ private static Properties cloneProperties(Properties properties) { return clone; } + static class ConfigSupplier { + private final URI uri; + private final ClassLoader classLoader; + private Configuration configuration; + + public ConfigSupplier(URI uri, ClassLoader classLoader) { + this.uri = uri; + this.classLoader = classLoader; + this.configuration = null; + } + + public ConfigSupplier(URI uri, Configuration configuration) { + this.uri = uri; + this.classLoader = configuration.getClassLoader(); + this.configuration = configuration; + } + + public URI getUri() { + return uri; + } + + public ClassLoader getClassLoader() { + return classLoader; + } + + public Configuration getConfiguration() { + if(configuration == null) { + try { + if (URI_DEFAULT.equals(uri)) { + configuration = new DefaultConfiguration(classLoader); + } else { + configuration = new XmlConfiguration(uri.toURL(), classLoader); + } + } catch (Exception e) { + throw new javax.cache.CacheException(e); + } + } + return configuration; + } + } + } diff --git a/107/src/main/java/org/ehcache/jsr107/EventListenerAdaptors.java b/107/src/main/java/org/ehcache/jsr107/EventListenerAdaptors.java index 770904798d..d3b80baec8 100644 --- a/107/src/main/java/org/ehcache/jsr107/EventListenerAdaptors.java +++ b/107/src/main/java/org/ehcache/jsr107/EventListenerAdaptors.java @@ -95,7 +95,7 @@ org.ehcache.event.EventType getEhcacheEventType() { @SuppressWarnings("unchecked") @Override - public void onEvent(org.ehcache.event.CacheEvent ehEvent) { + public void onEvent(org.ehcache.event.CacheEvent ehEvent) { Eh107CacheEntryEvent event = new Eh107CacheEntryEvent.NormalEvent(source, EventType.UPDATED, ehEvent, requestsOld); if (filter.evaluate(event)) { Set events = Collections.singleton(event); @@ -121,7 +121,7 @@ org.ehcache.event.EventType getEhcacheEventType() { @SuppressWarnings("unchecked") @Override - public void onEvent(org.ehcache.event.CacheEvent ehEvent) { + public void onEvent(org.ehcache.event.CacheEvent ehEvent) { Eh107CacheEntryEvent event = new Eh107CacheEntryEvent.RemovingEvent(source, EventType.REMOVED, ehEvent, requestsOld); if (filter.evaluate(event)) { Set events = Collections.singleton(event); @@ -147,7 +147,7 @@ org.ehcache.event.EventType getEhcacheEventType() { @SuppressWarnings("unchecked") @Override - public void onEvent(org.ehcache.event.CacheEvent ehEvent) { + public void onEvent(org.ehcache.event.CacheEvent ehEvent) { Eh107CacheEntryEvent event = new Eh107CacheEntryEvent.RemovingEvent(source, EventType.EXPIRED, ehEvent, requestsOld); if (filter.evaluate(event)) { Set events = Collections.singleton(event); @@ -173,7 +173,7 @@ org.ehcache.event.EventType getEhcacheEventType() { @SuppressWarnings("unchecked") @Override - public void onEvent(org.ehcache.event.CacheEvent ehEvent) { + public void onEvent(org.ehcache.event.CacheEvent ehEvent) { Eh107CacheEntryEvent event = new Eh107CacheEntryEvent.NormalEvent(source, EventType.CREATED, ehEvent, requestsOld); if (filter.evaluate(event)) { Set events = Collections.singleton(event); diff --git a/107/src/test/java/com/pany/ehcache/TestCacheEventListener.java b/107/src/test/java/com/pany/ehcache/TestCacheEventListener.java index c64d3db509..ca1e85e784 100644 --- a/107/src/test/java/com/pany/ehcache/TestCacheEventListener.java +++ b/107/src/test/java/com/pany/ehcache/TestCacheEventListener.java @@ -26,10 +26,10 @@ * TestCacheEventListener */ public class TestCacheEventListener implements CacheEventListener { - public static List> seen = new ArrayList>(); + public static List> seen = new ArrayList>(); @Override - public void onEvent(CacheEvent event) { + public void onEvent(CacheEvent event) { seen.add(event); } } diff --git a/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java b/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java index bc4ef947b8..77ed29f550 100644 --- a/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java +++ b/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java @@ -21,17 +21,23 @@ import org.ehcache.config.CacheRuntimeConfiguration; import org.ehcache.config.ResourceType; import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.core.config.DefaultConfiguration; import org.ehcache.core.internal.util.ValueSuppliers; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; import org.ehcache.jsr107.Eh107Configuration; +import org.ehcache.jsr107.EhcacheCachingProvider; import org.junit.After; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.TemporaryFolder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pany.domain.Client; import com.pany.domain.Product; +import java.io.File; import java.util.Random; import java.util.concurrent.TimeUnit; @@ -66,6 +72,8 @@ public class EhCache107ConfigurationIntegrationDocTest { private CacheManager cacheManager; private CachingProvider cachingProvider; + @Rule + public TemporaryFolder tempFolder = new TemporaryFolder(); @Before public void setUp() throws Exception { @@ -101,6 +109,7 @@ public void basicConfiguration() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testGettingToEhcacheConfiguration() { // tag::mutableConfigurationExample[] MutableConfiguration configuration = new MutableConfiguration(); @@ -129,6 +138,7 @@ public void testGettingToEhcacheConfiguration() { } @Test + @SuppressWarnings("unchecked") public void testUsingEhcacheConfiguration() throws Exception { // tag::ehcacheBasedConfigurationExample[] CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, @@ -164,6 +174,7 @@ public void testWithoutEhcacheExplicitDependencyCanSpecifyXML() throws Exception } @Test + @SuppressWarnings("unchecked") public void testWithoutEhcacheExplicitDependencyAndNoCodeChanges() throws Exception { CacheManager manager = cachingProvider.getCacheManager( getClass().getResource("/org/ehcache/docs/ehcache-jsr107-template-override.xml").toURI(), @@ -208,6 +219,7 @@ public void testWithoutEhcacheExplicitDependencyAndNoCodeChanges() throws Except } @Test + @SuppressWarnings("unchecked") public void testTemplateOverridingStoreByValue() throws Exception { cacheManager = cachingProvider.getCacheManager( getClass().getResource("/org/ehcache/docs/ehcache-jsr107-template-override.xml").toURI(), @@ -267,4 +279,23 @@ public void testCacheThroughAtomicsXMLValid() throws Exception { getClass().getResource("/org/ehcache/docs/ehcache-jsr107-cache-through.xml").toURI(), getClass().getClassLoader()); } + + @Test + public void testCacheManagerLevelConfiguration() throws Exception { + // tag::ehcacheCacheManagerConfigurationExample[] + CachingProvider cachingProvider = Caching.getCachingProvider(); + EhcacheCachingProvider ehcacheProvider = (EhcacheCachingProvider) cachingProvider; // <1> + + DefaultConfiguration configuration = new DefaultConfiguration(ehcacheProvider.getDefaultClassLoader(), + new DefaultPersistenceConfiguration(getPersistenceDirectory())); // <2> + + CacheManager cacheManager = ehcacheProvider.getCacheManager(ehcacheProvider.getDefaultURI(), configuration); // <3> + // end::ehcacheCacheManagerConfigurationExample[] + + assertThat(cacheManager, notNullValue()); + } + + private File getPersistenceDirectory() { + return tempFolder.getRoot(); + } } diff --git a/107/src/test/java/org/ehcache/jsr107/ConfigStatsManagementActivationTest.java b/107/src/test/java/org/ehcache/jsr107/ConfigStatsManagementActivationTest.java index 8fb9693502..6f4c288a51 100644 --- a/107/src/test/java/org/ehcache/jsr107/ConfigStatsManagementActivationTest.java +++ b/107/src/test/java/org/ehcache/jsr107/ConfigStatsManagementActivationTest.java @@ -57,6 +57,7 @@ public void testEnabledAtCacheLevel() throws Exception { .toURI(), provider.getDefaultClassLoader()); Cache cache = cacheManager.getCache("stringCache", String.class, String.class); + @SuppressWarnings("unchecked") Eh107Configuration configuration = cache.getConfiguration(Eh107Configuration.class); assertThat(configuration.isManagementEnabled(), is(true)); @@ -69,6 +70,7 @@ public void testEnabledAtCacheManagerLevel() throws Exception { .toURI(), provider.getDefaultClassLoader()); Cache cache = cacheManager.getCache("stringCache", String.class, String.class); + @SuppressWarnings("unchecked") Eh107Configuration configuration = cache.getConfiguration(Eh107Configuration.class); assertThat(configuration.isManagementEnabled(), is(true)); @@ -81,6 +83,7 @@ public void testCacheLevelOverridesCacheManagerLevel() throws Exception { .toURI(), provider.getDefaultClassLoader()); Cache cache = cacheManager.getCache("overrideCache", String.class, String.class); + @SuppressWarnings("unchecked") Eh107Configuration configuration = cache.getConfiguration(Eh107Configuration.class); assertThat(configuration.isManagementEnabled(), is(false)); @@ -93,6 +96,7 @@ public void testCacheLevelOnlyOneOverridesCacheManagerLevel() throws Exception { .toURI(), provider.getDefaultClassLoader()); Cache cache = cacheManager.getCache("overrideOneCache", String.class, String.class); + @SuppressWarnings("unchecked") Eh107Configuration configuration = cache.getConfiguration(Eh107Configuration.class); assertThat(configuration.isManagementEnabled(), is(true)); @@ -107,6 +111,7 @@ public void testEnableCacheLevelProgrammatic() throws Exception { .add(new Jsr107CacheConfiguration(ConfigurationElementState.ENABLED, ConfigurationElementState.ENABLED)); Cache cache = cacheManager.createCache("test", Eh107Configuration.fromEhcacheCacheConfiguration(configurationBuilder)); + @SuppressWarnings("unchecked") Eh107Configuration configuration = cache.getConfiguration(Eh107Configuration.class); assertThat(configuration.isManagementEnabled(), is(true)); assertThat(configuration.isStatisticsEnabled(), is(true)); @@ -125,6 +130,7 @@ public void testManagementDisabledOverriddenFromTemplate() throws Exception { Cache cache = cacheManager.createCache("enables-mbeans", configuration); + @SuppressWarnings("unchecked") Eh107Configuration eh107Configuration = cache.getConfiguration(Eh107Configuration.class); assertThat(eh107Configuration.isManagementEnabled(), is(true)); assertThat(eh107Configuration.isStatisticsEnabled(), is(true)); @@ -143,6 +149,7 @@ public void testManagementEnabledOverriddenFromTemplate() throws Exception { Cache cache = cacheManager.createCache("disables-mbeans", configuration); + @SuppressWarnings("unchecked") Eh107Configuration eh107Configuration = cache.getConfiguration(Eh107Configuration.class); assertThat(eh107Configuration.isManagementEnabled(), is(false)); assertThat(eh107Configuration.isStatisticsEnabled(), is(false)); @@ -158,6 +165,7 @@ public void basicJsr107StillWorks() throws Exception { configuration.setStatisticsEnabled(true); Cache cache = cacheManager.createCache("cache", configuration); + @SuppressWarnings("unchecked") Eh107Configuration eh107Configuration = cache.getConfiguration(Eh107Configuration.class); assertThat(eh107Configuration.isManagementEnabled(), is(true)); diff --git a/107/src/test/java/org/ehcache/jsr107/Eh107CacheTypeTest.java b/107/src/test/java/org/ehcache/jsr107/Eh107CacheTypeTest.java index 68e1cd7007..d787eef9f8 100644 --- a/107/src/test/java/org/ehcache/jsr107/Eh107CacheTypeTest.java +++ b/107/src/test/java/org/ehcache/jsr107/Eh107CacheTypeTest.java @@ -31,6 +31,7 @@ public class Eh107CacheTypeTest { @Test + @SuppressWarnings("unchecked") public void testCompileTimeTypeSafety() throws Exception { CachingProvider provider = Caching.getCachingProvider(); javax.cache.CacheManager cacheManager = @@ -70,10 +71,11 @@ public void testRunTimeTypeSafety() throws Exception { cache1Conf.setTypes(Long.class, String.class); javax.cache.Cache cache = cacheManager.createCache("cache1", cache1Conf); - Configuration cache1CompleteConf = cache.getConfiguration(Configuration.class); + @SuppressWarnings("unchecked") + Configuration cache1CompleteConf = cache.getConfiguration(Configuration.class); - assertThat((Class)cache1CompleteConf.getKeyType(), is(equalTo(Long.class))); - assertThat((Class)cache1CompleteConf.getValueType(), is(equalTo(String.class))); + assertThat(cache1CompleteConf.getKeyType(), is(equalTo(Long.class))); + assertThat(cache1CompleteConf.getValueType(), is(equalTo(String.class))); try { cacheManager.getCache("cache1"); @@ -94,9 +96,10 @@ public void testTypeOverriding() throws Exception { MutableConfiguration cache1Conf = new MutableConfiguration(); cache1Conf.setTypes(Long.class, String.class); javax.cache.Cache cache = cacheManager.createCache("defaultCache", cache1Conf); - Configuration cache1CompleteConf = cache.getConfiguration(Configuration.class); - assertThat((Class)cache1CompleteConf.getKeyType(), is(equalTo(Long.class))); - assertThat((Class)cache1CompleteConf.getValueType(), is(equalTo(String.class))); + @SuppressWarnings("unchecked") + Configuration cache1CompleteConf = cache.getConfiguration(Configuration.class); + assertThat(cache1CompleteConf.getKeyType(), is(equalTo(Long.class))); + assertThat(cache1CompleteConf.getValueType(), is(equalTo(String.class))); } @Test diff --git a/107/src/test/java/org/ehcache/jsr107/Eh107XmlIntegrationTest.java b/107/src/test/java/org/ehcache/jsr107/Eh107XmlIntegrationTest.java index b773960a6d..9898665083 100644 --- a/107/src/test/java/org/ehcache/jsr107/Eh107XmlIntegrationTest.java +++ b/107/src/test/java/org/ehcache/jsr107/Eh107XmlIntegrationTest.java @@ -92,6 +92,7 @@ public boolean isStoreByValue() { } }); + @SuppressWarnings("unchecked") CompleteConfiguration configuration = cache.getConfiguration(CompleteConfiguration.class); assertThat(configuration, notNullValue()); assertThat(configuration.isStoreByValue(), is(true)); diff --git a/107/src/test/java/org/ehcache/jsr107/EhCachingProviderTest.java b/107/src/test/java/org/ehcache/jsr107/EhCachingProviderTest.java index 1d1b296452..64b22daf32 100644 --- a/107/src/test/java/org/ehcache/jsr107/EhCachingProviderTest.java +++ b/107/src/test/java/org/ehcache/jsr107/EhCachingProviderTest.java @@ -31,6 +31,7 @@ import javax.cache.configuration.MutableConfiguration; import javax.cache.spi.CachingProvider; +import org.ehcache.config.Configuration; import org.junit.Test; import com.pany.domain.Customer; @@ -77,6 +78,24 @@ public void testCacheUsesCacheManagerClassLoaderForDefaultURI() { } } + @Test + public void testClassLoadCount() throws Exception { + EhcacheCachingProvider cachingProvider = (EhcacheCachingProvider)Caching.getCachingProvider(); + URI uri = cachingProvider.getDefaultURI(); + ClassLoader classLoader = cachingProvider.getDefaultClassLoader(); + CountingConfigSupplier configSupplier = new CountingConfigSupplier(uri, classLoader); + + assertEquals(configSupplier.configCount, 0); + + cachingProvider.getCacheManager(configSupplier, new Properties()); + + assertEquals(configSupplier.configCount, 1); + + cachingProvider.getCacheManager(configSupplier, new Properties()); + + assertEquals(configSupplier.configCount, 1); + } + private class LimitedClassLoader extends ClassLoader { private final ClassLoader delegate; @@ -94,4 +113,17 @@ public Class loadClass(String name) throws ClassNotFoundException { } } + private static class CountingConfigSupplier extends EhcacheCachingProvider.ConfigSupplier { + private int configCount = 0; + + public CountingConfigSupplier(URI uri, ClassLoader classLoader) { + super(uri, classLoader); + } + + @Override + public Configuration getConfiguration() { + configCount++; + return super.getConfiguration(); + } + } } diff --git a/107/src/test/java/org/ehcache/jsr107/LoadAtomicsWith107Test.java b/107/src/test/java/org/ehcache/jsr107/LoadAtomicsWith107Test.java index bddba5a508..70b3a6e5dc 100644 --- a/107/src/test/java/org/ehcache/jsr107/LoadAtomicsWith107Test.java +++ b/107/src/test/java/org/ehcache/jsr107/LoadAtomicsWith107Test.java @@ -20,6 +20,8 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; import javax.cache.Cache; import javax.cache.CacheManager; @@ -41,15 +43,16 @@ * LoadAtomicsWith107Test */ public class LoadAtomicsWith107Test { + @Mock private CacheLoader cacheLoader; + @Mock private CacheWriter cacheWriter; private Cache testCache; private CacheManager cacheManager; @Before public void setUp() throws Exception { - cacheLoader = mock(CacheLoader.class); - cacheWriter = mock(CacheWriter.class); + MockitoAnnotations.initMocks(this); CachingProvider provider = Caching.getCachingProvider(); cacheManager = provider.getCacheManager(this.getClass().getResource("/ehcache-loader-writer-107-load-atomics.xml").toURI(), getClass().getClassLoader()); diff --git a/107/src/test/java/org/ehcache/jsr107/LoaderWriterConfigTest.java b/107/src/test/java/org/ehcache/jsr107/LoaderWriterConfigTest.java index f54d6ce074..968e927869 100644 --- a/107/src/test/java/org/ehcache/jsr107/LoaderWriterConfigTest.java +++ b/107/src/test/java/org/ehcache/jsr107/LoaderWriterConfigTest.java @@ -19,6 +19,8 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; import java.util.Set; @@ -42,10 +44,15 @@ */ public class LoaderWriterConfigTest { + @Mock + private CacheLoader cacheLoader; + @Mock + private CacheWriter cacheWriter; private CachingProvider cachingProvider; @Before public void setUp() { + MockitoAnnotations.initMocks(this); cachingProvider = Caching.getCachingProvider(); } @@ -55,10 +62,8 @@ public void tearDown() { } @Test + @SuppressWarnings("unchecked") public void enablingWriteThroughDoesNotForceReadThrough() throws Exception { - final CacheLoader cacheLoader = mock(CacheLoader.class); - final CacheWriter cacheWriter = mock(CacheWriter.class); - MutableConfiguration config = getConfiguration(false, cacheLoader, true, cacheWriter); Cache cache = cachingProvider.getCacheManager().createCache("writingCache", config); @@ -75,9 +80,6 @@ public void enablingWriteThroughDoesNotForceReadThrough() throws Exception { @Test public void enablingReadThroughDoesNotForceWriteThrough() throws Exception { - final CacheLoader cacheLoader = mock(CacheLoader.class); - final CacheWriter cacheWriter = mock(CacheWriter.class); - MutableConfiguration config = getConfiguration(true, cacheLoader, false, cacheWriter); Cache cache = cachingProvider.getCacheManager().createCache("writingCache", config); diff --git a/107/src/test/java/org/ehcache/jsr107/LoaderWriterTest.java b/107/src/test/java/org/ehcache/jsr107/LoaderWriterTest.java index 5c00587331..56f46ab81a 100644 --- a/107/src/test/java/org/ehcache/jsr107/LoaderWriterTest.java +++ b/107/src/test/java/org/ehcache/jsr107/LoaderWriterTest.java @@ -20,6 +20,9 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -35,8 +38,6 @@ import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; import static org.mockito.Matchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyZeroInteractions; @@ -47,15 +48,16 @@ */ public class LoaderWriterTest { + @Mock private CacheLoader cacheLoader; + @Mock private CacheWriter cacheWriter; private Cache testCache; private CacheManager cacheManager; @Before public void setUp() throws Exception { - cacheLoader = mock(CacheLoader.class); - cacheWriter = mock(CacheWriter.class); + MockitoAnnotations.initMocks(this); CachingProvider provider = Caching.getCachingProvider(); cacheManager = provider.getCacheManager(this.getClass().getResource("/ehcache-loader-writer-107.xml").toURI(), getClass().getClassLoader()); @@ -273,4 +275,8 @@ public void testSimpleRemove2ArgsWithLoaderAndWriter_existsInStore_notEquals() t verifyZeroInteractions(cacheWriter); } + private void reset(Object mock) { + Mockito.reset(mock); + } + } diff --git a/README.adoc b/README.adoc index 01b173a7ce..589c601756 100644 --- a/README.adoc +++ b/README.adoc @@ -12,16 +12,17 @@ For samples, documentation, and usage information, please see http://ehcache.org == Current release -We released 3.1.1 on July 18th 2016. +We released 3.2.0 on December 12th 2016. -The https://github.com/ehcache/ehcache3/releases/tag/v3.1.1[release notes] contain the links to the artifacts and the documentation to help you get started. +The https://github.com/ehcache/ehcache3/releases/tag/v3.2.0[release notes] contain the links to the artifacts and the documentation to help you get started. -You should consider upgrading to 3.1.x as it does all 3.0.x does and more with a fully compatible API. -The only thing to note is that transactional support has been moved to a separate jar. +You should consider upgrading to 3.2.x as it does all 3.0.x and 3.1.x do and more with a fully compatible API. +The only thing to note cmopared to 3.0.x is that transactional support has been moved to a separate jar. == Current development & next release -We are now working on the missing features of the clustering tier of Ehcache 3 which will be included in upcoming 3.1.x releases. -We may still do a last 3.0.x release to include all fixes that have been made on it, but this is now less a priority. +We are still working on the missing features of the clustering tier of Ehcache 3 which will be included in upcoming releases. +We may still do 3.1.x release to include all fixes that have been made on it, but this is now less a priority. +There is no longer any plan for a 3.0.x release. See the https://github.com/ehcache/ehcache3/milestones[milestones on GitHub] for more details on the current status. diff --git a/api/build.gradle b/api/build.gradle index dcd161b0fa..8119b013a6 100644 --- a/api/build.gradle +++ b/api/build.gradle @@ -20,3 +20,6 @@ checkstyle { configFile = file("$projectDir/config/checkstyle.xml") } +tasks.withType(JavaCompile) { + options.compilerArgs += ['-Werror'] +} diff --git a/api/src/main/java/org/ehcache/config/Eviction.java b/api/src/main/java/org/ehcache/config/Eviction.java index 8a3c9109c4..64dd5f7869 100644 --- a/api/src/main/java/org/ehcache/config/Eviction.java +++ b/api/src/main/java/org/ehcache/config/Eviction.java @@ -21,7 +21,7 @@ */ public final class Eviction { - private static final EvictionAdvisor NO_ADVICE = new EvictionAdvisor() { + private static final EvictionAdvisor NO_ADVICE = new EvictionAdvisor() { @Override public boolean adviseAgainstEviction(Object key, Object value) { return false; @@ -31,12 +31,10 @@ public boolean adviseAgainstEviction(Object key, Object value) { /** * Returns an {@link EvictionAdvisor} where no mappings are advised against eviction. * - * @param the key type for the advisor - * @param the value type for the advisor * @return an advisor where no mappings are advised against eviction */ - public static EvictionAdvisor noAdvice() { - return (EvictionAdvisor) NO_ADVICE; + public static EvictionAdvisor noAdvice() { + return NO_ADVICE; } } diff --git a/api/src/main/java/org/ehcache/event/CacheEventListener.java b/api/src/main/java/org/ehcache/event/CacheEventListener.java index f12f1658f7..4cc8a38475 100644 --- a/api/src/main/java/org/ehcache/event/CacheEventListener.java +++ b/api/src/main/java/org/ehcache/event/CacheEventListener.java @@ -38,6 +38,6 @@ public interface CacheEventListener { * * @param event the actual {@code CacheEvent} */ - void onEvent(CacheEvent event); + void onEvent(CacheEvent event); } diff --git a/api/src/main/java/org/ehcache/spi/persistence/StateHolder.java b/api/src/main/java/org/ehcache/spi/persistence/StateHolder.java new file mode 100644 index 0000000000..70d3c94dc9 --- /dev/null +++ b/api/src/main/java/org/ehcache/spi/persistence/StateHolder.java @@ -0,0 +1,57 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.spi.persistence; + +import java.util.Map; +import java.util.Set; + +/** + * A {@code Map} like structure that can hold key value mappings. + * + * @param type of Keys + * @param type of Values + */ +public interface StateHolder { + + /** + * If the specified key is not already associated with a value (or is mapped + * to {@code null}) associates it with the given value and returns + * {@code null}, else returns the current value. + * + * @param key a key + * @param value a value + * @return the previous value associated with the specified key, or + * {@code null} if there was no mapping for the key. + */ + V putIfAbsent(K key, V value); + + /** + * Retrieves the value mapped to the given {@code key} + * + * @param key a key + * @return the value mapped to the key + */ + V get(K key); + + /** + * Retrieves all the entries in the {@code StateHolder} as a {@code Set} of {@code Map.Entry} instances. + * + * @return the set of this {@code StateHolder} mappings + */ + Set> entrySet(); + +} diff --git a/api/src/main/java/org/ehcache/spi/persistence/StateRepository.java b/api/src/main/java/org/ehcache/spi/persistence/StateRepository.java index 898a89a8ba..25ea836c67 100644 --- a/api/src/main/java/org/ehcache/spi/persistence/StateRepository.java +++ b/api/src/main/java/org/ehcache/spi/persistence/StateRepository.java @@ -17,7 +17,6 @@ package org.ehcache.spi.persistence; import java.io.Serializable; -import java.util.concurrent.ConcurrentMap; /** * A repository allowing to preserve state in the context of a {@link org.ehcache.Cache}. @@ -25,17 +24,17 @@ public interface StateRepository { /** - * Gets a named persistent map rooted in the current {@code StateRepository}. + * Gets a named state holder rooted in the current {@code StateRepository}. *

- * If the map existed already, it is returned with its content fully available. + * If the state holder existed already, it is returned with its content fully available. *

* - * @param name the map name - * @param keyClass concrete map key type - * @param valueClass concrete map value type - * @param the map key type, must be {@code Serializable} - * @param the map value type, must be {@code Serializable} - * @return a map + * @param name the state holder name + * @param keyClass concrete key type + * @param valueClass concrete value type + * @param the key type, must be {@code Serializable} + * @param the value type, must be {@code Serializable} + * @return a state holder */ - ConcurrentMap getPersistentConcurrentMap(String name, Class keyClass, Class valueClass); + StateHolder getPersistentStateHolder(String name, Class keyClass, Class valueClass); } diff --git a/api/src/main/java/org/ehcache/spi/serialization/Serializer.java b/api/src/main/java/org/ehcache/spi/serialization/Serializer.java index 96b42c33ca..90beca481f 100644 --- a/api/src/main/java/org/ehcache/spi/serialization/Serializer.java +++ b/api/src/main/java/org/ehcache/spi/serialization/Serializer.java @@ -23,15 +23,8 @@ * Implementations must be thread-safe. *

*

- * When used within the default serialization provider, there are additional requirements. - * The implementations must define either or both of the two constructors: - *

- *
Serializer(ClassLoader loader) - *
This constructor is used to initialize the serializer for transient caches. - *
Serializer(ClassLoader loader, org.ehcache.spi.persistence.StateRepository stateRepository) - *
This constructor is used to initialize the serializer for persistent caches and allows them to store any relevant - * state in the provided repository. - *
+ * When used within the default serialization provider, there is an additional requirement. + * The implementations must define a constructor that takes in a {@code ClassLoader}. * The {@code ClassLoader} value may be {@code null}. If not {@code null}, the class loader * instance provided should be used during deserialization to load classes needed by the deserialized objects. *

diff --git a/api/src/main/java/org/ehcache/spi/serialization/StatefulSerializer.java b/api/src/main/java/org/ehcache/spi/serialization/StatefulSerializer.java new file mode 100644 index 0000000000..3b3d0e0ff0 --- /dev/null +++ b/api/src/main/java/org/ehcache/spi/serialization/StatefulSerializer.java @@ -0,0 +1,47 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.spi.serialization; + +import org.ehcache.spi.persistence.StateRepository; + +/** + * Implementations of this interface can have their state maintained in a {@code StateRepository}. + * The state will be maintained by the authoritative tier of the cache for which this is configured. + *

+ * Implementations must be thread-safe. + *

+ *

+ * When used within the default serialization provider, there is an additional constructor requirement. + * The implementations must define a constructor that takes in a {@code ClassLoader}. + * Post instantiation, the state repository will be injected with the {@code init} method invocation. + * This is guaranteed to happen before any serialization/deserialization interaction. + *

+ * + * @param the type of the instances to serialize + * + * @see Serializer + */ +public interface StatefulSerializer extends Serializer { + + /** + * This method is used to inject a {@code StateRepository} to the serializer + * by the authoritative tier of a cache during the cache initialization. + * The passed in state repository will have the persistent properties of the injecting tier. + * + * @param stateRepository the state repository + */ + void init(StateRepository stateRepository); +} diff --git a/api/src/main/java/org/ehcache/spi/service/MaintainableService.java b/api/src/main/java/org/ehcache/spi/service/MaintainableService.java index d5fc4adce7..a9d3e76adf 100644 --- a/api/src/main/java/org/ehcache/spi/service/MaintainableService.java +++ b/api/src/main/java/org/ehcache/spi/service/MaintainableService.java @@ -22,11 +22,23 @@ */ @PluralService public interface MaintainableService extends Service { + + /** + * Defines Maintenance scope + */ + enum MaintenanceScope { + /** Will impact the cache manager */ + CACHE_MANAGER, + /** Will impact one or many caches */ + CACHE + } + /** * Start this service for maintenance, based on its default configuration. - * * @param serviceProvider enables to depend on other maintainable services + * @param maintenanceScope the scope of the maintenance + * */ - void startForMaintenance(ServiceProvider serviceProvider); + void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope); } diff --git a/build.gradle b/build.gradle index b850391e31..c5aafb303e 100644 --- a/build.gradle +++ b/build.gradle @@ -14,32 +14,93 @@ * limitations under the License. */ import scripts.* +import org.gradle.internal.jvm.Jvm + +buildscript { + repositories { + mavenCentral() + } + dependencies { + classpath "io.codearte.gradle.nexus:gradle-nexus-staging-plugin:0.5.3" + } +} + +// This adds tasks to auto close or release nexus staging repos +// see https://github.com/Codearte/gradle-nexus-staging-plugin/ +project.plugins.apply 'io.codearte.nexus-staging' +project.nexusStaging { + username = project.sonatypeUser + password = project.sonatypePwd + packageGroup = 'org.ehcache' +} + +// Disable automatic promotion for added safety +closeAndPromoteRepository.enabled = false + ext { - baseVersion = '3.1.2-SNAPSHOT' + + baseVersion = findProperty('overrideVersion') ?: '3.2.1-SNAPSHOT' // Third parties - offheapVersion = '2.2.2' - statisticVersion = '1.1.0' + offheapVersion = '2.3.2' + statisticVersion = '1.4.1' jcacheVersion = '1.0.0' slf4jVersion = '1.7.7' sizeofVersion = '0.3.0' // Clustered - terracottaPlatformVersion = '5.0.6.beta5' + terracottaPlatformVersion = '5.1.1-pre3' managementVersion = terracottaPlatformVersion - terracottaApisVersion = '1.0.6.beta' - terracottaCoreVersion = '5.0.6-beta2' + terracottaApisVersion = '1.1.0' + terracottaCoreVersion = '5.1.1-pre2' offheapResourceVersion = terracottaPlatformVersion entityApiVersion = terracottaApisVersion - terracottaPassthroughTestingVersion = '1.0.6.beta2' + terracottaPassthroughTestingVersion = '1.1.1-pre2' entityTestLibVersion = terracottaPassthroughTestingVersion - galvanVersion = '1.0.6-beta2' + galvanVersion = '1.1.1-pre2' + + // Tools + findbugsVersion = '3.0.1' utils = new Utils(baseVersion, logger) isReleaseVersion = !baseVersion.endsWith('SNAPSHOT') isCloudbees = System.getenv('JENKINS_URL')?.contains('cloudbees') - logger.info("Is cloudbees? $isCloudbees") +} + +if (deployUrl.contains('nexus')) { + ext { + deployUser = tcDeployUser + deployPwd = tcDeployPassword + } +} else { + ext { + deployUser = sonatypeUser + deployPwd = sonatypePwd + } +} + +// Java 6 build setup +def java6Error = 'Set the poperty \'java6Home\' in your $HOME/.gradle/gradle.properties pointing to a Java 6 installation' +assert (JavaVersion.current().isJava8Compatible()) : 'The Ehcache 3 build requires Java 8 to run and a configured Java 6 installation\n' + java6Error +assert hasProperty('java6Home') : 'The Ehcache 3 build requires a configured Java 6 installation\n' + java6Error +def java6HomeLocation = new File(java6Home) +def testJavaHomeLocation = java6HomeLocation + +if (hasProperty('testVM')) { + switch (testVM) { + case '6': + break + case '7': + assert hasProperty('java7Home') : 'Set the poperty \'java7Home\' in your $HOME/.gradle/gradle.properties pointing to a Java 7 installation' + testJavaHomeLocation = new File(java7Home) + break + case '8': + testJavaHomeLocation = Jvm.current().javaHome + break + default: + throw new AssertionError("Unrecognized 'testVM' value $testVM - Accepted values are 7 or 8") + } } subprojects { @@ -58,6 +119,9 @@ subprojects { targetCompatibility = 1.6 repositories { + if (project.hasProperty('mvnlocal')) { + mavenLocal() + } mavenCentral() maven { url "http://repo.terracotta.org/maven2" } } @@ -72,12 +136,12 @@ subprojects { } dependencies { - if (JavaVersion.current().compareTo(JavaVersion.VERSION_1_7) >= 0) { - compileOnly 'com.google.code.findbugs:annotations:3.0.0' - } else { - compileOnly 'com.google.code.findbugs:annotations:2.0.3' + compileOnly "com.google.code.findbugs:annotations:$parent.findbugsVersion" + testCompileOnly "com.google.code.findbugs:annotations:$parent.findbugsVersion" + testCompile 'junit:junit:4.12', 'org.assertj:assertj-core:1.7.1', 'org.hamcrest:hamcrest-library:1.3' + testCompile('org.mockito:mockito-core:1.9.5') { + exclude group:'org.hamcrest', module:'hamcrest-core' } - testCompile 'junit:junit:4.11', 'org.hamcrest:hamcrest-library:1.3', 'org.mockito:mockito-core:1.9.5' testRuntime "org.slf4j:slf4j-simple:$parent.slf4jVersion" } @@ -87,7 +151,7 @@ subprojects { } test { - maxHeapSize = "512m" + maxHeapSize = "1408m" systemProperty 'java.awt.headless', 'true' if (parent.isCloudbees) { systemProperty 'disable.concurrent.tests', 'true' @@ -113,14 +177,6 @@ subprojects { exclude '**/internal/**' } - if (JavaVersion.current().isJava8Compatible()) { - allprojects { - tasks.withType(Javadoc) { - options.addStringOption('Xdoclint:none', '-quiet') - } - } - } - task javadocJar(type: Jar, dependsOn: javadoc) { from javadoc.destinationDir classifier = 'javadoc' @@ -136,17 +192,13 @@ subprojects { checkstyle { configFile = file("$rootDir/config/checkstyle.xml") configProperties = ['projectDir':projectDir, 'rootDir':rootDir] - toolVersion = '5.7' + toolVersion = '5.9' } findbugs { ignoreFailures = false sourceSets = [sourceSets.main] - if (JavaVersion.current().compareTo(JavaVersion.VERSION_1_7) >= 0) { - findbugs.toolVersion = '3.0.1' - } else { - findbugs.toolVersion = '2.0.3' - } + findbugs.toolVersion = parent.findbugsVersion } jacoco { @@ -159,6 +211,28 @@ subprojects { csv.enabled false } } + + tasks.withType(AbstractCompile) { + options.with { + fork = true + forkOptions.executable = utils.executables(java6HomeLocation).javac + } + } + tasks.withType(Test) { + executable = utils.executables(testJavaHomeLocation).java + } + tasks.withType(JavaExec) { + executable = utils.executables(testJavaHomeLocation).java + } + tasks.withType(Javadoc) { + options.addStringOption('Xdoclint:none', '-quiet') + } + + configurations.all { + resolutionStrategy { + failOnVersionConflict() + } + } } allprojects { diff --git a/buildSrc/src/main/groovy/EhDeploy.groovy b/buildSrc/src/main/groovy/EhDeploy.groovy index 282c40b8e1..6c280a54b8 100644 --- a/buildSrc/src/main/groovy/EhDeploy.groovy +++ b/buildSrc/src/main/groovy/EhDeploy.groovy @@ -1,6 +1,8 @@ import org.gradle.api.Plugin import org.gradle.api.Project +import org.gradle.api.artifacts.maven.Conf2ScopeMappingContainer import org.gradle.api.artifacts.maven.MavenDeployment +import org.gradle.api.plugins.MavenPlugin import org.gradle.plugins.signing.Sign import scripts.Utils @@ -32,35 +34,56 @@ class EhDeploy implements Plugin { project.plugins.apply 'signing' project.plugins.apply 'maven' + project.configurations { + provided + } + + project.sourceSets { + main { + compileClasspath += project.configurations.provided + } + test { + compileClasspath += project.configurations.provided + runtimeClasspath += project.configurations.provided + } + } + project.signing { required { project.isReleaseVersion && project.gradle.taskGraph.hasTask("uploadArchives") } sign project.configurations.getByName('archives') } + def artifactFiltering = { + pom.scopeMappings.addMapping(MavenPlugin.COMPILE_PRIORITY, project.configurations.provided, Conf2ScopeMappingContainer.PROVIDED) + + utils.pomFiller(pom, project.subPomName, project.subPomDesc) + + } + + project.install { + repositories.mavenInstaller artifactFiltering + } + project.uploadArchives { repositories { - mavenDeployer { + mavenDeployer ({ beforeDeployment { MavenDeployment deployment -> project.signing.signPom(deployment)} if (project.isReleaseVersion) { - repository(id: 'sonatype-nexus-staging', url: 'https://oss.sonatype.org/service/local/staging/deploy/maven2/') { - authentication(userName: project.sonatypeUser, password: project.sonatypePwd) + repository(url: project.deployUrl) { + authentication(userName: project.deployUser, password: project.deployPwd) } } else { repository(id: 'sonatype-nexus-snapshot', url: 'https://oss.sonatype.org/content/repositories/snapshots') { authentication(userName: project.sonatypeUser, password: project.sonatypePwd) } } - } + } << artifactFiltering) } } def installer = project.install.repositories.mavenInstaller def deployer = project.uploadArchives.repositories.mavenDeployer - [installer, deployer]*.pom*.whenConfigured {pom -> - utils.pomFiller(pom, project.subPomName, project.subPomDesc) - } - } } diff --git a/buildSrc/src/main/groovy/EhDistribute.groovy b/buildSrc/src/main/groovy/EhDistribute.groovy index af43ca6361..a815108ba5 100644 --- a/buildSrc/src/main/groovy/EhDistribute.groovy +++ b/buildSrc/src/main/groovy/EhDistribute.groovy @@ -40,6 +40,7 @@ class EhDistribute implements Plugin { def OSGI_OVERRIDE_KEYS = ['Import-Package', 'Export-Package', 'Private-Package', 'Tool', 'Bnd-LastModified', 'Created-By', 'Require-Capability'] project.configurations { + shadowCompile shadowProvided } diff --git a/buildSrc/src/main/groovy/EhOsgi.groovy b/buildSrc/src/main/groovy/EhOsgi.groovy index 1704876074..521d58f400 100644 --- a/buildSrc/src/main/groovy/EhOsgi.groovy +++ b/buildSrc/src/main/groovy/EhOsgi.groovy @@ -54,7 +54,7 @@ class EhOsgi implements Plugin { if (project.hasProperty('shadowJar')) { classesDir = project.shadowJar.archivePath - classpath = project.files(project.configurations.shadow, project.configurations.shadowProvided) + classpath = project.files(project.configurations.shadowCompile, project.configurations.shadowProvided) } else { classesDir = new File(project.buildDir, 'classes/main') //can't figure out where to get this value classpath = project.sourceSets.main.compileClasspath diff --git a/buildSrc/src/main/groovy/EhPomMangle.groovy b/buildSrc/src/main/groovy/EhPomMangle.groovy index a20274e57e..271271ab3d 100644 --- a/buildSrc/src/main/groovy/EhPomMangle.groovy +++ b/buildSrc/src/main/groovy/EhPomMangle.groovy @@ -27,7 +27,7 @@ import scripts.Utils * Removes all implicit dependencies from the pom * and adds only what is specified in (from shadowJar) * - * project.configurations.shadow (as compile) + * project.configurations.shadowCompile (as compile) * project.configurations.shadowProvided (as provided) * * as well as (these do not affect shadow) @@ -49,7 +49,7 @@ class EhPomMangle implements Plugin { project.plugins.apply 'signing' project.configurations { - shadow + shadowCompile shadowProvided pomOnlyCompile pomOnlyProvided @@ -60,14 +60,14 @@ class EhPomMangle implements Plugin { pom.scopeMappings.mappings.remove(project.configurations.runtime) pom.scopeMappings.mappings.remove(project.configurations.testCompile) pom.scopeMappings.mappings.remove(project.configurations.testRuntime) - pom.scopeMappings.addMapping(MavenPlugin.COMPILE_PRIORITY, project.configurations.shadow, Conf2ScopeMappingContainer.COMPILE) + pom.scopeMappings.addMapping(MavenPlugin.COMPILE_PRIORITY, project.configurations.shadowCompile, Conf2ScopeMappingContainer.COMPILE) pom.scopeMappings.addMapping(MavenPlugin.COMPILE_PRIORITY, project.configurations.shadowProvided, Conf2ScopeMappingContainer.PROVIDED) //Anything extra to add to pom that isn't in the shadowed jar or compilation pom.scopeMappings.addMapping(MavenPlugin.COMPILE_PRIORITY, project.configurations.pomOnlyCompile, Conf2ScopeMappingContainer.COMPILE) pom.scopeMappings.addMapping(MavenPlugin.COMPILE_PRIORITY, project.configurations.pomOnlyProvided, Conf2ScopeMappingContainer.PROVIDED) - utils.pomFiller(pom, 'Ehcache', 'Ehcache single jar, containing all modules') + utils.pomFiller(pom, project.subPomName, project.subPomDesc) } @@ -81,8 +81,8 @@ class EhPomMangle implements Plugin { beforeDeployment { MavenDeployment deployment -> project.signing.signPom(deployment)} if (project.isReleaseVersion) { - repository(id: 'sonatype-nexus-staging', url: 'https://oss.sonatype.org/service/local/staging/deploy/maven2/') { - authentication(userName: project.sonatypeUser, password: project.sonatypePwd) + repository(url: project.deployUrl) { + authentication(userName: project.deployUser, password: project.deployPwd) } } else { repository(id: 'sonatype-nexus-snapshot', url: 'https://oss.sonatype.org/content/repositories/snapshots') { diff --git a/buildSrc/src/main/groovy/MavenToolchain.groovy b/buildSrc/src/main/groovy/MavenToolchain.groovy deleted file mode 100644 index af5b930343..0000000000 --- a/buildSrc/src/main/groovy/MavenToolchain.groovy +++ /dev/null @@ -1,50 +0,0 @@ -import org.gradle.api.JavaVersion -import org.gradle.api.resources.MissingResourceException; -import org.gradle.internal.os.OperatingSystem; - -/** - * Emulates maven toolchains support by looking at the user's - * ~/.m2/toolchains.xml - * - * Throws if this file is not found - * - * Provides a closure to use to find the correct jvm's executable, eg: - * MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'javac') - */ -class MavenToolchain { - - static def mavenToolchainDefinitions = { - String userHome = System.getProperty("user.home"); - File toolchain = new File(userHome, ".m2" + File.separator + "toolchains.xml") - if (toolchain.isFile()) { - def xmlSlurper = new XmlSlurper() - return new XmlSlurper().parse(toolchain) - } else { - throw new MissingResourceException("toolchain file not found at ${toolchain}" ); - } - } - - static def toolchains; - static { - def xml = mavenToolchainDefinitions() - if (xml == null) { - toolchains = [:] - } else { - toolchains = xml.toolchain.findAll({ it.type.text() == 'jdk' }).collectEntries{[JavaVersion.toVersion(it.provides.version.text()), it.configuration.jdkHome.text()]} - } - } - - private static def exe = OperatingSystem.current().isWindows() ? '.exe' : '' - - static def javaHome = { v -> - def jdk = toolchains.get(v); - if (jdk == null) { - - throw new MissingResourceException("JDK $v not available - check your toolchains.xml") - } else { - return jdk; - } - } - - static def javaExecutable = { v, exec -> MavenToolchain.javaHome(v) + ['', 'bin', exec].join(File.separator) + exe } -} diff --git a/buildSrc/src/main/groovy/scripts/Utils.groovy b/buildSrc/src/main/groovy/scripts/Utils.groovy index 1388470e90..40a88267d3 100644 --- a/buildSrc/src/main/groovy/scripts/Utils.groovy +++ b/buildSrc/src/main/groovy/scripts/Utils.groovy @@ -14,12 +14,15 @@ * limitations under the License. */ -package scripts; +package scripts + +import org.gradle.internal.os.OperatingSystem class Utils { String version String revision + Map> executablesPath = [:] Utils(version, logger) { this.version = version @@ -85,4 +88,18 @@ class Utils { } } } + + def executables(path) { + def execMap = executablesPath.get(path) + if (execMap == null) { + execMap = [:].withDefault { execName -> + def extension = OperatingSystem.current().isWindows() ? ".exe" : "" + def executable = new File(path, 'bin' + File.separator + execName + extension) + assert executable.exists(): "There is no ${execName} executable in ${path}" + executable + } + executablesPath.put(path, execMap) + } + execMap + } } diff --git a/clustered/client/build.gradle b/clustered/client/build.gradle index 9e1c16d5d3..fd4dc8a556 100644 --- a/clustered/client/build.gradle +++ b/clustered/client/build.gradle @@ -14,13 +14,16 @@ * limitations under the License. */ +import org.gradle.internal.jvm.Jvm + apply plugin: EhDeploy dependencies { compileOnly project(':api') compileOnly project(':xml') - compile project(':clustered:common') - compile "org.terracotta:entity-client-api:$parent.entityApiVersion" + compile project(':clustered:common'), "org.slf4j:slf4j-api:$parent.slf4jVersion" + provided "org.terracotta:entity-client-api:$parent.entityApiVersion" + provided "org.terracotta:runnel:$parent.terracottaPlatformVersion" testCompile project(':api') testCompile project(':xml') @@ -31,3 +34,17 @@ dependencies { testCompile "org.terracotta:entity-test-lib:$parent.entityTestLibVersion" testCompile "org.terracotta:passthrough-server:$parent.terracottaPassthroughTestingVersion" } + +compileTestJava { + options.forkOptions.executable = Jvm.current().javacExecutable + sourceCompatibility = 1.8 + targetCompatibility = 1.8 +} + +test { + executable = Jvm.current().javaExecutable +} + +tasks.withType(JavaCompile) { + options.compilerArgs += ['-Werror'] +} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java index 7fd43c4d1a..803109f3df 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java @@ -27,15 +27,17 @@ import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.exceptions.ClusterException; +import org.ehcache.clustered.common.internal.exceptions.InvalidClientIdException; import org.ehcache.clustered.common.internal.exceptions.ResourceBusyException; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.Failure; -import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.Type; +import org.ehcache.clustered.common.internal.messages.EhcacheResponseType; +import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; +import org.ehcache.clustered.common.internal.messages.EhcacheOperationMessage; import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; -import org.ehcache.clustered.common.internal.messages.LifecycleMessage; -import org.ehcache.clustered.common.internal.messages.ReconnectDataCodec; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ServerStoreOp; +import org.ehcache.clustered.common.internal.messages.ReconnectMessage; +import org.ehcache.clustered.common.internal.messages.ReconnectMessageCodec; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terracotta.connection.entity.Entity; @@ -46,8 +48,9 @@ import org.terracotta.entity.MessageCodecException; import org.terracotta.exception.EntityException; +import java.util.ArrayList; +import java.util.Collections; import java.util.EnumSet; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -56,9 +59,7 @@ import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; - -import static org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ServerStoreOp.GET; -import static org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ServerStoreOp.getServerStoreOp; +import java.util.concurrent.atomic.AtomicLong; /** * The client-side {@link Entity} through which clustered cache operations are performed. @@ -69,9 +70,6 @@ public class EhcacheClientEntity implements Entity { private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheClientEntity.class); - private Set reconnectData = new HashSet(); - private int reconnectDatalen = 0; - public interface ResponseListener { void onResponse(T response); } @@ -80,12 +78,22 @@ public interface DisconnectionListener { void onDisconnection(); } + public interface ReconnectListener { + void onHandleReconnect(ReconnectMessage reconnectMessage); + } + + private final AtomicLong sequenceGenerator = new AtomicLong(0L); + private final EntityClientEndpoint endpoint; private final LifeCycleMessageFactory messageFactory; private final Map, List>> responseListeners = new ConcurrentHashMap, List>>(); private final List disconnectionListeners = new CopyOnWriteArrayList(); - private final ReconnectDataCodec reconnectDataCodec = new ReconnectDataCodec(); + private final List reconnectListeners = new ArrayList(); + private final ReconnectMessageCodec reconnectMessageCodec = new ReconnectMessageCodec(); private volatile boolean connected = true; + private final Set caches = Collections.newSetFromMap(new ConcurrentHashMap()); + private final Object lock = new Object(); + private volatile UUID clientId; private Timeouts timeouts = Timeouts.builder().build(); @@ -95,6 +103,7 @@ public EhcacheClientEntity(EntityClientEndpoint> responseListeners = this.responseListeners.get(response.getClass()); + private void fireResponseEvent(T response) { + @SuppressWarnings("unchecked") + List> responseListeners = (List) this.responseListeners.get(response.getClass()); if (responseListeners == null) { + LOGGER.warn("Ignoring the response {} as no registered response listener could be found.", response); return; } LOGGER.debug("{} registered response listener(s) for {}", responseListeners.size(), response.getClass()); - for (ResponseListener responseListener : responseListeners) { + for (ResponseListener responseListener : responseListeners) { responseListener.onResponse(response); } } + public UUID getClientId() { + if (clientId == null) { + throw new IllegalStateException("Client Id cannot be null"); + } + return this.clientId; + } + public boolean isConnected() { return connected; } @@ -146,6 +170,30 @@ public void addDisconnectionListener(DisconnectionListener listener) { disconnectionListeners.add(listener); } + public void removeDisconnectionListener(DisconnectionListener listener) { + disconnectionListeners.remove(listener); + } + + public List getDisconnectionListeners() { + return Collections.unmodifiableList(disconnectionListeners); + } + + public void addReconnectListener(ReconnectListener listener) { + synchronized (lock) { + reconnectListeners.add(listener); + } + } + + public void removeReconnectListener(ReconnectListener listener) { + synchronized (lock) { + reconnectListeners.remove(listener); + } + } + + public List getReconnectListeners() { + return Collections.unmodifiableList(reconnectListeners); + } + public void addResponseListener(Class responseType, ResponseListener responseListener) { List> responseListeners = this.responseListeners.get(responseType); if (responseListeners == null) { @@ -155,6 +203,13 @@ public void addResponseListener(Class respo responseListeners.add(responseListener); } + public void removeResponseListener(Class responseType, ResponseListener responseListener) { + List> responseListeners = this.responseListeners.get(responseType); + if (responseListeners != null) { + responseListeners.remove(responseListener); + } + } + public UUID identity() { return ClusteredEhcacheIdentity.deserialize(endpoint.getEntityConfiguration()); } @@ -162,11 +217,23 @@ public UUID identity() { @Override public void close() { endpoint.close(); + this.responseListeners.clear(); + this.disconnectionListeners.clear(); + this.reconnectListeners.clear(); } public void validate(ServerSideConfiguration config) throws ClusteredTierManagerValidationException, TimeoutException { try { - invokeInternal(timeouts.getLifecycleOperationTimeout(), messageFactory.validateStoreManager(config), false); + while (true) { + try { + clientId = UUID.randomUUID(); + this.messageFactory.setClientId(clientId); + invokeInternal(timeouts.getLifecycleOperationTimeout(), messageFactory.validateStoreManager(config), false); + break; + } catch (InvalidClientIdException e) { + //nothing to do - loop again since the earlier generated UUID is being already tracked by the server + } + } } catch (ClusterException e) { throw new ClusteredTierManagerValidationException("Error validating server clustered tier manager", e); } @@ -174,6 +241,8 @@ public void validate(ServerSideConfiguration config) throws ClusteredTierManager public void configure(ServerSideConfiguration config) throws ClusteredTierManagerConfigurationException, TimeoutException { try { + clientId = UUID.randomUUID(); + this.messageFactory.setClientId(clientId); invokeInternal(timeouts.getLifecycleOperationTimeout(), messageFactory.configureStoreManager(config), true); } catch (ClusterException e) { throw new ClusteredTierManagerConfigurationException("Error configuring clustered tier manager", e); @@ -184,7 +253,7 @@ public void createCache(String name, ServerStoreConfiguration serverStoreConfigu throws ClusteredTierCreationException, TimeoutException { try { invokeInternal(timeouts.getLifecycleOperationTimeout(), messageFactory.createServerStore(name, serverStoreConfiguration), true); - addReconnectData(name); + caches.add(name); } catch (ClusterException e) { throw new ClusteredTierCreationException("Error creating clustered tier '" + name + "'", e); } @@ -194,7 +263,7 @@ public void validateCache(String name, ServerStoreConfiguration serverStoreConfi throws ClusteredTierValidationException, TimeoutException { try { invokeInternal(timeouts.getLifecycleOperationTimeout(), messageFactory.validateServerStore(name , serverStoreConfiguration), false); - addReconnectData(name); + caches.add(name); } catch (ClusterException e) { throw new ClusteredTierValidationException("Error validating clustered tier '" + name + "'", e); } @@ -203,7 +272,7 @@ public void validateCache(String name, ServerStoreConfiguration serverStoreConfi public void releaseCache(String name) throws ClusteredTierReleaseException, TimeoutException { try { invokeInternal(timeouts.getLifecycleOperationTimeout(), messageFactory.releaseServerStore(name), false); - removeReconnectData(name); + caches.remove(name); } catch (ClusterException e) { throw new ClusteredTierReleaseException("Error releasing clustered tier '" + name + "'", e); } @@ -212,7 +281,6 @@ public void releaseCache(String name) throws ClusteredTierReleaseException, Time public void destroyCache(String name) throws ClusteredTierDestructionException, TimeoutException { try { invokeInternal(timeouts.getLifecycleOperationTimeout(), messageFactory.destroyServerStore(name), true); - removeReconnectData(name); } catch (ResourceBusyException e) { throw new ClusteredTierDestructionException(e.getMessage(), e); } catch (ClusterException e) { @@ -220,18 +288,6 @@ public void destroyCache(String name) throws ClusteredTierDestructionException, } } - private void addReconnectData(String name) { - reconnectData.add(name); - reconnectDatalen += name.length(); - } - - private void removeReconnectData(String name) { - if (!reconnectData.contains(name)) { - reconnectData.remove(name); - reconnectDatalen -= name.length(); - } - } - /** * Sends a message to the {@code EhcacheActiveEntity} associated with this {@code EhcacheClientEntity} and * awaits a response. @@ -246,24 +302,23 @@ private void removeReconnectData(String name) { */ public EhcacheEntityResponse invoke(EhcacheEntityMessage message, boolean replicate) throws ClusterException, TimeoutException { - TimeoutDuration timeLimit; - if (message.getType() == EhcacheEntityMessage.Type.SERVER_STORE_OP - && GET_STORE_OPS.contains(getServerStoreOp(message.getOpCode()))) { - timeLimit = timeouts.getReadOperationTimeout(); - } else { - timeLimit = timeouts.getMutativeOperationTimeout(); + TimeoutDuration timeLimit = timeouts.getMutativeOperationTimeout(); + if (message instanceof EhcacheOperationMessage) { + if (GET_STORE_OPS.contains(((EhcacheOperationMessage) message).getMessageType())) { + timeLimit = timeouts.getReadOperationTimeout(); + } } return invokeInternal(timeLimit, message, replicate); } - private static final Set GET_STORE_OPS = EnumSet.of(GET); + private static final Set GET_STORE_OPS = EnumSet.of(EhcacheMessageType.GET_STORE); private EhcacheEntityResponse invokeInternal(TimeoutDuration timeLimit, EhcacheEntityMessage message, boolean replicate) throws ClusterException, TimeoutException { try { EhcacheEntityResponse response = waitFor(timeLimit, invokeAsync(message, replicate)); - if (Type.FAILURE.equals(response.getType())) { + if (EhcacheResponseType.FAILURE.equals(response.getResponseType())) { throw ((Failure)response).getCause(); } else { return response; @@ -283,12 +338,11 @@ private EhcacheEntityResponse invokeInternal(TimeoutDuration timeLimit, EhcacheE public InvokeFuture invokeAsync(EhcacheEntityMessage message, boolean replicate) throws MessageCodecException { + getClientId(); if (replicate) { - return endpoint.beginInvoke().message(message).replicate(true).invoke(); //TODO: remove replicate call once - //https://github.com/Terracotta-OSS/terracotta-apis/issues/139 is fixed - } else { - return endpoint.beginInvoke().message(message).replicate(false).invoke(); + message.setId(sequenceGenerator.getAndIncrement()); } + return endpoint.beginInvoke().message(message).replicate(replicate).invoke(); } private static T waitFor(TimeoutDuration timeLimit, InvokeFuture future) @@ -317,7 +371,7 @@ private static T waitFor(TimeoutDuration timeLimit, InvokeFuture future) */ public static final class Timeouts { - public static final TimeoutDuration DEFAULT_READ_OPERATION_TIMEOUT = TimeoutDuration.of(5, TimeUnit.SECONDS); + public static final TimeoutDuration DEFAULT_READ_OPERATION_TIMEOUT = TimeoutDuration.of(20, TimeUnit.SECONDS); private final TimeoutDuration readOperationTimeout; private final TimeoutDuration mutativeOperationTimeout; diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java index 3391af2f1f..0b91b03c86 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java @@ -27,9 +27,11 @@ import org.terracotta.connection.Connection; import org.terracotta.connection.entity.EntityRef; import org.terracotta.exception.EntityAlreadyExistsException; +import org.terracotta.exception.EntityConfigurationException; import org.terracotta.exception.EntityNotFoundException; import org.terracotta.exception.EntityNotProvidedException; import org.terracotta.exception.EntityVersionMismatchException; +import org.terracotta.exception.PermanentEntityException; import java.util.Map; import java.util.UUID; @@ -91,7 +93,7 @@ public void abandonLeadership(String entityIdentifier) { * lifecycle operation timeout */ public void create(final String identifier, final ServerSideConfiguration config) - throws EntityAlreadyExistsException, EhcacheEntityCreationException, EntityBusyException, TimeoutException { + throws EntityAlreadyExistsException, EhcacheEntityCreationException, EntityBusyException, TimeoutException { Hold existingMaintenance = maintenanceHolds.get(identifier); Hold localMaintenance = null; if (existingMaintenance == null) { @@ -99,43 +101,60 @@ public void create(final String identifier, final ServerSideConfiguration config } if (existingMaintenance == null && localMaintenance == null) { throw new EntityBusyException("Unable to create clustered tier manager for id " - + identifier + ": another client owns the maintenance lease"); - } else { + + identifier + ": another client owns the maintenance lease"); + } + + boolean finished = false; + + try { + EntityRef ref = getEntityRef(identifier); try { - EntityRef ref = getEntityRef(identifier); - try { - while (true) { - ref.create(UUID.randomUUID()); + while (true) { + ref.create(UUID.randomUUID()); + try { + EhcacheClientEntity entity = ref.fetchEntity(); try { - EhcacheClientEntity entity = ref.fetchEntity(); - try { - entity.setTimeouts(entityTimeouts); - entity.configure(config); - return; - } finally { + entity.setTimeouts(entityTimeouts); + entity.configure(config); + finished = true; + return; + } finally { + if (finished) { entity.close(); + } else { + silentlyClose(entity, identifier); } - } catch (ClusteredTierManagerConfigurationException e) { - try { - ref.destroy(); - } catch (EntityNotFoundException f) { - //ignore - } - throw new EhcacheEntityCreationException("Unable to configure clustered tier manager for id " + identifier, e); - } catch (EntityNotFoundException e) { - //continue; } + } catch (ClusteredTierManagerConfigurationException e) { + try { + ref.destroy(); + } catch (EntityNotFoundException f) { + //ignore + } + throw new EhcacheEntityCreationException("Unable to configure clustered tier manager for id " + identifier, e); + } catch (EntityNotFoundException e) { + //continue; } - } catch (EntityNotProvidedException e) { - LOGGER.error("Unable to create clustered tier manager for id {}", identifier, e); - throw new AssertionError(e); - } catch (EntityVersionMismatchException e) { - LOGGER.error("Unable to create clustered tier manager for id {}", identifier, e); - throw new AssertionError(e); } - } finally { - if (localMaintenance != null) { + } catch (EntityNotProvidedException e) { + LOGGER.error("Unable to create clustered tier manager for id {}", identifier, e); + throw new AssertionError(e); + } catch (EntityVersionMismatchException e) { + LOGGER.error("Unable to create clustered tier manager for id {}", identifier, e); + throw new AssertionError(e); + } catch (PermanentEntityException e) { + LOGGER.error("Unable to create entity - server indicates it is permanent", e); + throw new AssertionError(e); + } catch (EntityConfigurationException e) { + LOGGER.error("Unable to create entity - configuration exception", e); + throw new AssertionError(e); + } + } finally { + if (localMaintenance != null) { + if (finished) { localMaintenance.unlock(); + } else { + silentlyUnlock(localMaintenance, identifier); } } } @@ -156,64 +175,97 @@ public void create(final String identifier, final ServerSideConfiguration config * lifecycle operation timeout */ public EhcacheClientEntity retrieve(String identifier, ServerSideConfiguration config) - throws EntityNotFoundException, EhcacheEntityValidationException, TimeoutException { + throws EntityNotFoundException, EhcacheEntityValidationException, TimeoutException { + + Hold fetchHold = createAccessLockFor(identifier).readLock(); + + EhcacheClientEntity entity; try { - Hold fetchHold = createAccessLockFor(identifier).readLock(); - EhcacheClientEntity entity = getEntityRef(identifier).fetchEntity(); - /* - * Currently entities are never closed as doing so can stall the client - * when the server is dead. Instead the connection is forcibly closed, - * which suits our purposes since that will unlock the fetchHold too. - */ - boolean validated = false; - try { - entity.setTimeouts(entityTimeouts); - entity.validate(config); - validated = true; - return entity; - } catch (ClusteredTierManagerValidationException e) { - throw new EhcacheEntityValidationException("Unable to validate clustered tier manager for id " + identifier, e); - } finally { - if (!validated) { - entity.close(); - fetchHold.unlock(); - } - } + entity = getEntityRef(identifier).fetchEntity(); } catch (EntityVersionMismatchException e) { LOGGER.error("Unable to retrieve clustered tier manager for id {}", identifier, e); + silentlyUnlock(fetchHold, identifier); throw new AssertionError(e); } + + /* + * Currently entities are never closed as doing so can stall the client + * when the server is dead. Instead the connection is forcibly closed, + * which suits our purposes since that will unlock the fetchHold too. + */ + boolean validated = false; + try { + entity.setTimeouts(entityTimeouts); + entity.validate(config); + validated = true; + return entity; + } catch (ClusteredTierManagerValidationException e) { + throw new EhcacheEntityValidationException("Unable to validate clustered tier manager for id " + identifier, e); + } finally { + if (!validated) { + silentlyClose(entity, identifier); + silentlyUnlock(fetchHold, identifier); + } + } } public void destroy(final String identifier) throws EhcacheEntityNotFoundException, EntityBusyException { Hold existingMaintenance = maintenanceHolds.get(identifier); Hold localMaintenance = null; + if (existingMaintenance == null) { localMaintenance = createAccessLockFor(identifier).tryWriteLock(); } + if (existingMaintenance == null && localMaintenance == null) { throw new EntityBusyException("Destroy operation failed; " + identifier + " clustered tier's maintenance lease held"); - } else { + } + + boolean finished = false; + + try { + EntityRef ref = getEntityRef(identifier); try { - EntityRef ref = getEntityRef(identifier); - try { - if (!ref.destroy()) { - throw new EntityBusyException("Destroy operation failed; " + identifier + " clustered tier in use by other clients"); - } - } catch (EntityNotProvidedException e) { - LOGGER.error("Unable to delete clustered tier manager for id {}", identifier, e); - throw new AssertionError(e); - } catch (EntityNotFoundException e) { - throw new EhcacheEntityNotFoundException(e); + if (!ref.destroy()) { + throw new EntityBusyException("Destroy operation failed; " + identifier + " clustered tier in use by other clients"); } - } finally { - if (localMaintenance != null) { + finished = true; + } catch (EntityNotProvidedException e) { + LOGGER.error("Unable to delete clustered tier manager for id {}", identifier, e); + throw new AssertionError(e); + } catch (EntityNotFoundException e) { + throw new EhcacheEntityNotFoundException(e); + } catch (PermanentEntityException e) { + LOGGER.error("Unable to destroy entity - server says it is permanent", e); + throw new AssertionError(e); + } + } finally { + if (localMaintenance != null) { + if (finished) { localMaintenance.unlock(); + } else { + silentlyUnlock(localMaintenance, identifier); } } } } + private void silentlyClose(EhcacheClientEntity entity, String identifier) { + try { + entity.close(); + } catch (Exception e) { + LOGGER.error("Failed to close entity {}", identifier, e); + } + } + + private void silentlyUnlock(Hold localMaintenance, String identifier) { + try { + localMaintenance.unlock(); + } catch(Exception e) { + LOGGER.error("Failed to unlock for id {}", identifier, e); + } + } + private VoltronReadWriteLock createAccessLockFor(String entityIdentifier) { return new VoltronReadWriteLock(connection, "EhcacheClientEntityFactory-AccessLock-" + entityIdentifier); } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityService.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityService.java index bc7c03e6c5..b45d486f70 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityService.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityService.java @@ -19,10 +19,15 @@ import java.util.UUID; import org.ehcache.clustered.common.internal.ClusteredEhcacheIdentity; +import org.ehcache.clustered.common.internal.messages.CommonConfigCodec; import org.ehcache.clustered.common.internal.messages.EhcacheCodec; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.ehcache.clustered.common.internal.messages.LifeCycleMessageCodec; +import org.ehcache.clustered.common.internal.messages.ResponseCodec; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpCodec; +import org.ehcache.clustered.common.internal.messages.StateRepositoryOpCodec; import org.terracotta.entity.EntityClientEndpoint; import org.terracotta.entity.EntityClientService; import org.terracotta.entity.MessageCodec; @@ -45,12 +50,13 @@ public UUID deserializeConfiguration(byte[] configuration) { } @Override - public EhcacheClientEntity create(EntityClientEndpoint endpoint) { + public EhcacheClientEntity create(EntityClientEndpoint endpoint) { return new EhcacheClientEntity(endpoint); } @Override public MessageCodec getMessageCodec() { - return EhcacheCodec.messageCodec(); + return new EhcacheCodec(new ServerStoreOpCodec(), new LifeCycleMessageCodec(new CommonConfigCodec()), + new StateRepositoryOpCodec(), new ResponseCodec()); } } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImpl.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImpl.java index 8c74da7ead..9d1da5223a 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImpl.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImpl.java @@ -50,7 +50,7 @@ public boolean isPersistent() { @Override public void validateUpdate(ResourcePool newPool) { - super.validateUpdate(newPool); + throw new UnsupportedOperationException("Updating CLUSTERED resource is not supported"); } @Override diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImpl.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImpl.java index f5ea3c82f3..e49d2b99b0 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImpl.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImpl.java @@ -18,6 +18,7 @@ import org.ehcache.clustered.client.config.ClusteredResourceType; import org.ehcache.clustered.common.PoolAllocation; +import org.ehcache.config.ResourcePool; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.config.SizedResourcePoolImpl; @@ -64,6 +65,11 @@ public PoolAllocation getPoolAllocation() { return new PoolAllocation.Dedicated(this.getFromResource(), this.getUnit().toBytes(this.getSize())); } + @Override + public void validateUpdate(final ResourcePool newPool) { + throw new UnsupportedOperationException("Updating CLUSTERED resource is not supported"); + } + @Override public String toString() { final StringBuilder sb = new StringBuilder("Pool {"); diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImpl.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImpl.java index 7d394beea5..647f133654 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImpl.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImpl.java @@ -19,6 +19,7 @@ import org.ehcache.clustered.client.config.ClusteredResourceType; import org.ehcache.clustered.client.config.SharedClusteredResourcePool; import org.ehcache.clustered.common.PoolAllocation; +import org.ehcache.config.ResourcePool; import org.ehcache.core.config.AbstractResourcePool; /** @@ -60,6 +61,11 @@ public PoolAllocation getPoolAllocation() { return new PoolAllocation.Shared(this.getSharedResourcePool()); } + @Override + public void validateUpdate(final ResourcePool newPool) { + throw new UnsupportedOperationException("Updating CLUSTERED resource is not supported"); + } + @Override public String toString() { return "Pool {" diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLock.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLock.java index 153bc111a5..b6d9d6e991 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLock.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLock.java @@ -19,15 +19,21 @@ import java.io.Closeable; import org.ehcache.clustered.common.internal.lock.LockMessaging.HoldType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.terracotta.connection.Connection; import org.terracotta.connection.entity.EntityRef; import org.terracotta.exception.EntityAlreadyExistsException; +import org.terracotta.exception.EntityConfigurationException; import org.terracotta.exception.EntityNotFoundException; import org.terracotta.exception.EntityNotProvidedException; import org.terracotta.exception.EntityVersionMismatchException; +import org.terracotta.exception.PermanentEntityException; public class VoltronReadWriteLock { + private static final Logger LOGGER = LoggerFactory.getLogger(VoltronReadWriteLock.class); + private final EntityRef reference; public VoltronReadWriteLock(Connection connection, String id) { @@ -71,13 +77,19 @@ private Hold tryLock(final HoldType type) { } } - private boolean tryDestroy() { + private void tryDestroy() { try { - return reference.destroy(); + boolean destroyed = reference.destroy(); + if (destroyed) { + LOGGER.debug("Destroyed lock entity " + reference.getName()); + } } catch (EntityNotProvidedException e) { throw new AssertionError(e); } catch (EntityNotFoundException e) { - return false; + // Nothing to do + } catch (PermanentEntityException e) { + LOGGER.error("Failed to destroy lock entity - server says it is permanent", e); + throw new AssertionError(e); } } @@ -109,7 +121,9 @@ public void close() { public void unlock() { client.unlock(type); client.close(); - tryDestroy(); + if (type == HoldType.WRITE) { + tryDestroy(); + } } } @@ -118,8 +132,12 @@ private VoltronReadWriteLockClient createClientEntity() { while (true) { try { reference.create(null); + LOGGER.debug("Created lock entity " + reference.getName()); } catch (EntityAlreadyExistsException f) { //ignore + } catch (EntityConfigurationException e) { + LOGGER.error("Error creating lock entity - configuration exception", e); + throw new AssertionError(e); } try { return reference.fetchEntity(); diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/AbstractClientEntityFactory.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/AbstractClientEntityFactory.java index 82733fdab0..b07093a170 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/AbstractClientEntityFactory.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/AbstractClientEntityFactory.java @@ -23,9 +23,11 @@ import org.terracotta.connection.entity.Entity; import org.terracotta.connection.entity.EntityRef; import org.terracotta.exception.EntityAlreadyExistsException; +import org.terracotta.exception.EntityConfigurationException; import org.terracotta.exception.EntityNotFoundException; import org.terracotta.exception.EntityNotProvidedException; import org.terracotta.exception.EntityVersionMismatchException; +import org.terracotta.exception.PermanentEntityException; abstract class AbstractClientEntityFactory implements ClientEntityFactory { @@ -86,6 +88,9 @@ public void create() throws EntityAlreadyExistsException { } catch (EntityVersionMismatchException e) { LOGGER.error("Unable to create entity {} for id {}", entityType.getName(), entityIdentifier, e); throw new AssertionError(e); + } catch (EntityConfigurationException e) { + LOGGER.error("Unable to create entity - configuration exception", e); + throw new AssertionError(e); } } @@ -109,6 +114,9 @@ public void destroy() throws EntityNotFoundException, EntityBusyException { } catch (EntityNotProvidedException e) { LOGGER.error("Unable to destroy entity {} for id {}", entityType.getName(), entityIdentifier, e); throw new AssertionError(e); + } catch (PermanentEntityException e) { + LOGGER.error("Unable to destroy entity - server says it is permanent", e); + throw new AssertionError(e); } } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ConcurrentClusteredMap.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateHolder.java similarity index 58% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ConcurrentClusteredMap.java rename to clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateHolder.java index 3d1f96569c..d0693a7458 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ConcurrentClusteredMap.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateHolder.java @@ -21,18 +21,17 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; import org.ehcache.clustered.common.internal.messages.StateRepositoryMessageFactory; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; +import org.ehcache.spi.persistence.StateHolder; import java.util.AbstractMap; -import java.util.Collection; import java.util.HashSet; import java.util.Map; import java.util.Set; -import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeoutException; import static org.ehcache.clustered.client.internal.service.ValueCodecFactory.getCodecForClass; -public class ConcurrentClusteredMap implements ConcurrentMap { +public class ClusteredStateHolder implements StateHolder { private final StateRepositoryMessageFactory messageFactory; private final EhcacheClientEntity entity; @@ -40,35 +39,16 @@ public class ConcurrentClusteredMap implements ConcurrentMap { private final ValueCodec keyCodec; private final ValueCodec valueCodec; - public ConcurrentClusteredMap(final String cacheId, final String mapId, final EhcacheClientEntity entity, Class keyClass, Class valueClass) { + public ClusteredStateHolder(final String cacheId, final String mapId, final EhcacheClientEntity entity, Class keyClass, Class valueClass) { this.keyClass = keyClass; this.keyCodec = getCodecForClass(keyClass); this.valueCodec = getCodecForClass(valueClass); - this.messageFactory = new StateRepositoryMessageFactory(cacheId, mapId); + this.messageFactory = new StateRepositoryMessageFactory(cacheId, mapId, entity.getClientId()); this.entity = entity; } @Override - public int size() { - throw new UnsupportedOperationException("TODO"); - } - - @Override - public boolean isEmpty() { - throw new UnsupportedOperationException("TODO"); - } - - @Override - public boolean containsKey(final Object key) { - throw new UnsupportedOperationException("TODO"); - } - - @Override - public boolean containsValue(final Object value) { - throw new UnsupportedOperationException("TODO"); - } - - @Override + @SuppressWarnings("unchecked") public V get(final Object key) { if (!keyClass.isAssignableFrom(key.getClass())) { return null; @@ -90,41 +70,12 @@ private Object getResponse(StateRepositoryOpMessage message) { } @Override - public V put(final K key, final V value) { - throw new UnsupportedOperationException("TODO"); - } - - @Override - public V remove(final Object key) { - throw new UnsupportedOperationException("TODO"); - } - - @Override - public void putAll(final Map m) { - throw new UnsupportedOperationException("TODO"); - } - - @Override - public void clear() { - throw new UnsupportedOperationException("TODO"); - } - - @Override - public Set keySet() { - throw new UnsupportedOperationException("TODO"); - } - - @Override - public Collection values() { - throw new UnsupportedOperationException("TODO"); - } - - @Override - public Set> entrySet() { + @SuppressWarnings("unchecked") + public Set> entrySet() { @SuppressWarnings("unchecked") - Set> response = (Set>) getResponse(messageFactory.entrySetMessage()); - Set> entries = new HashSet>(); - for (Entry objectEntry : response) { + Set> response = (Set>) getResponse(messageFactory.entrySetMessage()); + Set> entries = new HashSet>(); + for (Map.Entry objectEntry : response) { entries.add(new AbstractMap.SimpleEntry(keyCodec.decode(objectEntry.getKey()), valueCodec.decode(objectEntry.getValue()))); } @@ -132,23 +83,10 @@ public Set> entrySet() { } @Override + @SuppressWarnings("unchecked") public V putIfAbsent(final K key, final V value) { Object response = getResponse(messageFactory.putIfAbsentMessage(keyCodec.encode(key), valueCodec.encode(value))); return valueCodec.decode(response); } - @Override - public boolean remove(final Object key, final Object value) { - throw new UnsupportedOperationException("TODO"); - } - - @Override - public boolean replace(final K key, final V oldValue, final V newValue) { - throw new UnsupportedOperationException("TODO"); - } - - @Override - public V replace(final K key, final V value) { - throw new UnsupportedOperationException("TODO"); - } } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepository.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepository.java index c80d91d240..d4205d028e 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepository.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepository.java @@ -18,11 +18,10 @@ import org.ehcache.clustered.client.internal.EhcacheClientEntity; import org.ehcache.clustered.client.service.ClusteringService; -import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; +import org.ehcache.spi.persistence.StateHolder; import org.ehcache.spi.persistence.StateRepository; import java.io.Serializable; -import java.util.concurrent.ConcurrentMap; /** * ClusteredStateRepository @@ -40,7 +39,7 @@ class ClusteredStateRepository implements StateRepository { } @Override - public ConcurrentMap getPersistentConcurrentMap(String name, Class keyClass, Class valueClass) { - return new ConcurrentClusteredMap(clusterCacheIdentifier.getId(), composedId + "-" + name, clientEntity, keyClass, valueClass); + public StateHolder getPersistentStateHolder(String name, Class keyClass, Class valueClass) { + return new ClusteredStateHolder(clusterCacheIdentifier.getId(), composedId + "-" + name, clientEntity, keyClass, valueClass); } } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteringServiceFactory.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteringServiceFactory.java index fb12d4c83c..d835ae6407 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteringServiceFactory.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteringServiceFactory.java @@ -27,6 +27,7 @@ * * @author Clifford W. Johnson */ +@ServiceFactory.RequiresConfiguration public class ClusteringServiceFactory implements ServiceFactory { @Override diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java index 77f46ff091..8a9fc2eabb 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java @@ -26,7 +26,6 @@ import org.ehcache.clustered.client.internal.EhcacheEntityNotFoundException; import org.ehcache.clustered.client.internal.EhcacheEntityValidationException; import org.ehcache.clustered.client.internal.config.ExperimentalClusteringServiceConfiguration; -import org.ehcache.clustered.client.internal.store.ClusteredStore; import org.ehcache.clustered.client.internal.store.EventualServerStoreProxy; import org.ehcache.clustered.client.internal.store.ServerStoreProxy; import org.ehcache.clustered.client.internal.store.StrongServerStoreProxy; @@ -45,7 +44,6 @@ import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.service.MaintainableService; import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.spi.service.ServiceProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -68,7 +66,6 @@ /** * Provides support for accessing server-based cluster services. */ -@ServiceDependencies(ClusteredStore.Provider.class) class DefaultClusteringService implements ClusteringService, EntityService { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultClusteringService.class); @@ -81,9 +78,9 @@ class DefaultClusteringService implements ClusteringService, EntityService { private final ConcurrentMap knownPersistenceSpaces = new ConcurrentHashMap(); private final EhcacheClientEntity.Timeouts operationTimeouts; - private Connection clusterConnection; + private volatile Connection clusterConnection; private EhcacheClientEntityFactory entityFactory; - private EhcacheClientEntity entity; + EhcacheClientEntity entity; private volatile boolean inMaintenance = false; @@ -125,7 +122,7 @@ public ClientEntityFactory newClientEntityFactory(St return new AbstractClientEntityFactory(entityIdentifier, entityType, entityVersion, configuration) { @Override protected Connection getConnection() { - if (clusterConnection == null) { + if (!isConnected()) { throw new IllegalStateException(getClass().getSimpleName() + " not started."); } return clusterConnection; @@ -133,6 +130,11 @@ protected Connection getConnection() { }; } + @Override + public boolean isConnected() { + return clusterConnection != null; + } + @Override public void start(final ServiceProvider serviceProvider) { initClusterConnection(); @@ -153,16 +155,25 @@ public void start(final ServiceProvider serviceProvider) { } } catch (RuntimeException e) { entityFactory = null; - try { - clusterConnection.close(); - clusterConnection = null; - } catch (IOException ex) { - LOGGER.warn("Error closing cluster connection: " + ex); - } + closeConnection(); throw e; } } + @Override + public void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope) { + initClusterConnection(); + createEntityFactory(); + if(maintenanceScope == MaintenanceScope.CACHE_MANAGER) { + if (!entityFactory.acquireLeadership(entityIdentifier)) { + entityFactory = null; + closeConnection(); + throw new IllegalStateException("Couldn't acquire cluster-wide maintenance lease"); + } + } + inMaintenance = true; + } + private void createEntityFactory() { entityFactory = new EhcacheClientEntityFactory(clusterConnection, operationTimeouts); } @@ -204,24 +215,6 @@ private EhcacheClientEntity autoCreateEntity() throws EhcacheEntityValidationExc } } - @Override - public void startForMaintenance(ServiceProvider serviceProvider) { - initClusterConnection(); - createEntityFactory(); - - if (!entityFactory.acquireLeadership(entityIdentifier)) { - entityFactory = null; - try { - clusterConnection.close(); - clusterConnection = null; - } catch (IOException e) { - LOGGER.warn("Error closing cluster connection: " + e); - } - throw new IllegalStateException("Couldn't acquire cluster-wide maintenance lease"); - } - inMaintenance = true; - } - @Override public void stop() { LOGGER.info("stop called for clustered tiers on {}", this.clusterUri); @@ -238,14 +231,7 @@ public void stop() { entity = null; - try { - if (clusterConnection != null) { - clusterConnection.close(); - clusterConnection = null; - } - } catch (IOException ex) { - throw new RuntimeException(ex); - } + closeConnection(); } @Override @@ -315,38 +301,42 @@ public StateRepository getStateRepositoryWithin(PersistenceSpaceIdentifier id } } + private void checkStarted() { + if(!isStarted()) { + throw new IllegalStateException(getClass().getName() + " should be started to call destroy"); + } + } + @Override public void destroy(String name) throws CachePersistenceException { - boolean wasStarted = isStarted(); - // If the cluster isn't started, start it first to be able to destroy the cache - if(!wasStarted) { - initClusterConnection(); - createEntityFactory(); + checkStarted(); + + // will happen when in maintenance mode + if(entity == null) { try { entity = entityFactory.retrieve(entityIdentifier, configuration.getServerConfiguration()); } catch (EntityNotFoundException e) { // No entity on the server, so no need to destroy anything } catch (TimeoutException e) { throw new CachePersistenceException("Could not connect to the clustered tier manager '" + entityIdentifier - + "'; retrieve operation timed out", e); + + "'; retrieve operation timed out", e); } } + try { - entity.destroyCache(name); + if (entity != null) { + entity.destroyCache(name); + } } catch (ClusteredTierDestructionException e) { throw new CachePersistenceException(e.getMessage() + " (on " + clusterUri + ")", e); } catch (TimeoutException e) { throw new CachePersistenceException("Could not destroy clustered tier '" + name + "' on " + clusterUri + "; destroy operation timed out" + clusterUri, e); - } finally { - if (!wasStarted) { - stop(); - } } } protected boolean isStarted() { - return entity != null; + return entityFactory != null; } @Override @@ -376,6 +366,19 @@ public ServerStoreProxy getServerStoreProxy(final ClusteredCacheIdentifie throw new IllegalStateException("A clustered resource is required for a clustered cache"); } + ServerStoreProxy serverStoreProxy; + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory(cacheId, entity.getClientId()); + switch (configuredConsistency) { + case STRONG: + serverStoreProxy = new StrongServerStoreProxy(messageFactory, entity); + break; + case EVENTUAL: + serverStoreProxy = new EventualServerStoreProxy(messageFactory, entity); + break; + default: + throw new AssertionError("Unknown consistency : " + configuredConsistency); + } + final ServerStoreConfiguration clientStoreConfiguration = new ServerStoreConfiguration( clusteredResourcePool.getPoolAllocation(), storeConfig.getKeyType().getName(), @@ -390,40 +393,28 @@ public ServerStoreProxy getServerStoreProxy(final ClusteredCacheIdentifie try { if (configuration.isAutoCreate()) { try { - this.entity.validateCache(cacheId, clientStoreConfiguration); - } catch (ClusteredTierValidationException ex) { - if (ex.getCause() instanceof InvalidStoreException) { - try { - this.entity.createCache(cacheId, clientStoreConfiguration); - } catch (TimeoutException e) { - throw new CachePersistenceException("Unable to create clustered tier proxy '" - + cacheIdentifier.getId() + "' for entity '" + entityIdentifier - + "'; create operation timed out", e); - } - } else { - throw ex; + entity.createCache(cacheId, clientStoreConfiguration); + } catch (ClusteredTierCreationException e) { + // An InvalidStoreException means the cache already exists. That's fine, the validateCache will then work + if (!(e.getCause() instanceof InvalidStoreException)) { + throw e; } + entity.validateCache(cacheId, clientStoreConfiguration); } } else { - this.entity.validateCache(cacheId, clientStoreConfiguration); + entity.validateCache(cacheId, clientStoreConfiguration); } } catch (ClusteredTierException e) { + serverStoreProxy.close(); throw new CachePersistenceException("Unable to create clustered tier proxy '" + cacheIdentifier.getId() + "' for entity '" + entityIdentifier + "'", e); } catch (TimeoutException e) { + serverStoreProxy.close(); throw new CachePersistenceException("Unable to create clustered tier proxy '" + cacheIdentifier.getId() + "' for entity '" + entityIdentifier + "'; validate operation timed out", e); } - ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory(cacheId); - switch (configuredConsistency) { - case STRONG: - return new StrongServerStoreProxy(messageFactory, entity); - case EVENTUAL: - return new EventualServerStoreProxy(messageFactory, entity); - default: - throw new AssertionError("Unknown consistency : " + configuredConsistency); - } + return serverStoreProxy; } @Override @@ -444,6 +435,18 @@ public void releaseServerStoreProxy(ServerStoreProxy storeProxy) { } } + private void closeConnection() { + Connection conn = clusterConnection; + clusterConnection = null; + if(conn != null) { + try { + conn.close(); + } catch (IOException e) { + LOGGER.warn("Error closing cluster connection: " + e); + } + } + } + /** * Supplies the identifier to use for identifying a client-side cache to its server counterparts. */ diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java index 17a8b29ddf..311f6c6607 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java @@ -46,21 +46,29 @@ import org.ehcache.core.spi.store.events.StoreEventSource; import org.ehcache.core.spi.store.StoreAccessException; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; +import org.ehcache.core.statistics.AuthoritativeTierOperationOutcomes; import org.ehcache.core.statistics.StoreOperationOutcomes; import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.time.TimeSourceService; +import org.ehcache.core.statistics.TierOperationOutcomes; import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; import org.ehcache.impl.internal.events.NullStoreEventDispatcher; +import org.ehcache.spi.persistence.StateRepository; +import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; +import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.terracotta.context.annotations.ContextAttribute; +import org.terracotta.statistics.MappedOperationStatistic; import org.terracotta.statistics.StatisticsManager; import org.terracotta.statistics.observer.OperationObserver; import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -68,9 +76,9 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.Callable; import java.util.concurrent.TimeoutException; -import static java.util.Collections.singleton; import static org.ehcache.core.exceptions.StorePassThroughException.handleRuntimeException; import static org.ehcache.core.internal.service.ServiceLocator.findSingletonAmongst; import static org.terracotta.statistics.StatisticBuilder.operation; @@ -80,7 +88,8 @@ */ public class ClusteredStore implements AuthoritativeTier { - private static final String STATISTICS_TAG = "clustered-store"; + private static final String STATISTICS_TAG = "Clustered"; + private static final int TIER_HEIGHT = ClusteredResourceType.Types.UNKNOWN.getTierHeight(); //TierHeight is the same for all ClusteredResourceType.Types private final OperationsCodec codec; private final ChainResolver resolver; @@ -99,8 +108,8 @@ public class ClusteredStore implements AuthoritativeTier { private final OperationObserver conditionalReplaceObserver; // Needed for JSR-107 compatibility even if unused private final OperationObserver evictionObserver; + private final OperationObserver getAndFaultObserver; - private final ClusteredStoreStatsSettings clusteredStoreStatsSettings; private ClusteredStore(final OperationsCodec codec, final ChainResolver resolver, TimeSource timeSource) { this.codec = codec; @@ -115,8 +124,36 @@ private ClusteredStore(final OperationsCodec codec, final ChainResolver tags = new HashSet(Arrays.asList(STATISTICS_TAG, "tier")); + Map properties = new HashMap(); + properties.put("discriminator", STATISTICS_TAG); + StatisticsManager.createPassThroughStatistic(this, "mappings", tags, properties, new Callable() { + @Override + public Number call() throws Exception { + return -1L; + } + }); + StatisticsManager.createPassThroughStatistic(this, "maxMappings", tags, properties, new Callable() { + @Override + public Number call() throws Exception { + return -1L; + } + }); + StatisticsManager.createPassThroughStatistic(this, "allocatedMemory", tags, properties, new Callable() { + @Override + public Number call() throws Exception { + return -1L; + } + }); + StatisticsManager.createPassThroughStatistic(this, "occupiedMemory", tags, properties, new Callable() { + @Override + public Number call() throws Exception { + return -1L; + } + }); - this.clusteredStoreStatsSettings = new ClusteredStoreStatsSettings(this); } /** @@ -406,7 +443,7 @@ public Map> bulkCompute(final Set keys, final Fun throws StoreAccessException { Map> valueHolderMap = new HashMap>(); if(remappingFunction instanceof Ehcache.PutAllFunction) { - Ehcache.PutAllFunction putAllFunction = (Ehcache.PutAllFunction)remappingFunction; + Ehcache.PutAllFunction putAllFunction = (Ehcache.PutAllFunction)remappingFunction; Map entriesToRemap = putAllFunction.getEntriesToRemap(); for(Map.Entry entry: entriesToRemap.entrySet()) { PutStatus putStatus = silentPut(entry.getKey(), entry.getValue()); @@ -416,7 +453,7 @@ public Map> bulkCompute(final Set keys, final Fun } } } else if(remappingFunction instanceof Ehcache.RemoveAllFunction) { - Ehcache.RemoveAllFunction removeAllFunction = (Ehcache.RemoveAllFunction)remappingFunction; + Ehcache.RemoveAllFunction removeAllFunction = (Ehcache.RemoveAllFunction)remappingFunction; for (K key : keys) { boolean removed = silentRemove(key); if(removed) { @@ -472,7 +509,21 @@ public List getConfigurationChangeListeners() @Override public ValueHolder getAndFault(K key) throws StoreAccessException { - return get(key); + getAndFaultObserver.begin(); + V value; + try { + value = getInternal(key); + } catch (TimeoutException e) { + getAndFaultObserver.end(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.TIMEOUT); + return null; + } + if(value == null) { + getAndFaultObserver.end(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS); + return null; + } else { + getAndFaultObserver.end(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.HIT); + return new ClusteredValueHolder(value); + } } @Override @@ -495,6 +546,7 @@ public void setInvalidationValve(InvalidationValve valve) { /** * Provider of {@link ClusteredStore} instances. */ + @ServiceDependencies({TimeSourceService.class, ClusteringService.class}) public static class Provider implements Store.Provider, AuthoritativeTier.Provider { private static final Logger LOGGER = LoggerFactory.getLogger(Provider.class); @@ -510,16 +562,36 @@ public static class Provider implements Store.Provider, AuthoritativeTier.Provid private volatile ClusteringService clusteringService; private final Map, StoreConfig> createdStores = new ConcurrentWeakIdentityHashMap, StoreConfig>(); + private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap, Collection>>(); @Override public ClusteredStore createStore(final Configuration storeConfig, final ServiceConfiguration... serviceConfigs) { + ClusteredStore store = createStoreInternal(storeConfig, serviceConfigs); + Collection> tieredOps = new ArrayList>(); + + MappedOperationStatistic get = + new MappedOperationStatistic( + store, TierOperationOutcomes.GET_TRANSLATION, "get", TIER_HEIGHT, "get", STATISTICS_TAG); + StatisticsManager.associate(get).withParent(store); + tieredOps.add(get); + + MappedOperationStatistic evict = + new MappedOperationStatistic( + store, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", TIER_HEIGHT, "eviction", STATISTICS_TAG); + StatisticsManager.associate(evict).withParent(store); + tieredOps.add(evict); + + tierOperationStatistics.put(store, tieredOps); + return store; + } - DefaultCacheLoaderWriterConfiguration loaderWriterConfiguration = findSingletonAmongst(DefaultCacheLoaderWriterConfiguration.class, (Object[])serviceConfigs); + private ClusteredStore createStoreInternal(Configuration storeConfig, Object[] serviceConfigs) { + DefaultCacheLoaderWriterConfiguration loaderWriterConfiguration = findSingletonAmongst(DefaultCacheLoaderWriterConfiguration.class, serviceConfigs); if (loaderWriterConfiguration != null) { throw new IllegalStateException("CacheLoaderWriter is not supported with clustered tiers"); } - CacheEventListenerConfiguration eventListenerConfiguration = findSingletonAmongst(CacheEventListenerConfiguration.class, (Object[])serviceConfigs); + CacheEventListenerConfiguration eventListenerConfiguration = findSingletonAmongst(CacheEventListenerConfiguration.class, serviceConfigs); if (eventListenerConfiguration != null) { throw new IllegalStateException("CacheEventListener is not supported with clustered tiers"); } @@ -539,11 +611,11 @@ public ClusteredStore createStore(final Configuration storeCo throw new IllegalStateException(Provider.class.getCanonicalName() + ".createStore can not create clustered tier with multiple clustered resources"); } - ClusteredStoreConfiguration clusteredStoreConfiguration = findSingletonAmongst(ClusteredStoreConfiguration.class, (Object[])serviceConfigs); + ClusteredStoreConfiguration clusteredStoreConfiguration = findSingletonAmongst(ClusteredStoreConfiguration.class, serviceConfigs); if (clusteredStoreConfiguration == null) { clusteredStoreConfiguration = new ClusteredStoreConfiguration(); } - ClusteredCacheIdentifier cacheId = findSingletonAmongst(ClusteredCacheIdentifier.class, (Object[]) serviceConfigs); + ClusteredCacheIdentifier cacheId = findSingletonAmongst(ClusteredCacheIdentifier.class, serviceConfigs); TimeSource timeSource = serviceProvider.getService(TimeSourceService.class).getTimeSource(); @@ -552,7 +624,7 @@ public ClusteredStore createStore(final Configuration storeCo ClusteredStore store = new ClusteredStore(codec, resolver, timeSource); - StatisticsManager.associate(store.clusteredStoreStatsSettings).withParent(store); + createdStores.put(store, new StoreConfig(cacheId, storeConfig, clusteredStoreConfiguration.getConsistency())); return store; } @@ -564,7 +636,8 @@ public void releaseStore(final Store resource) { } ClusteredStore clusteredStore = (ClusteredStore)resource; this.clusteringService.releaseServerStoreProxy(clusteredStore.storeProxy); - StatisticsManager.dissociate(clusteredStore.clusteredStoreStatsSettings).fromParent(clusteredStore); + StatisticsManager.nodeFor(clusteredStore).clean(); + tierOperationStatistics.remove(clusteredStore); } @Override @@ -573,15 +646,40 @@ public void initStore(final Store resource) { if (storeConfig == null) { throw new IllegalArgumentException("Given clustered tier is not managed by this provider : " + resource); } - final ClusteredStore clusteredStore = (ClusteredStore) resource; + final ClusteredStore clusteredStore = (ClusteredStore) resource; + ClusteredCacheIdentifier cacheIdentifier = storeConfig.getCacheIdentifier(); try { - clusteredStore.storeProxy = clusteringService.getServerStoreProxy(storeConfig.getCacheIdentifier(), storeConfig.getStoreConfig(), storeConfig.getConsistency()); + clusteredStore.storeProxy = clusteringService.getServerStoreProxy(cacheIdentifier, storeConfig.getStoreConfig(), storeConfig.getConsistency()); } catch (CachePersistenceException e) { - throw new RuntimeException("Unable to create clustered tier proxy - " + storeConfig.getCacheIdentifier(), e); + throw new RuntimeException("Unable to create clustered tier proxy - " + cacheIdentifier, e); + } + + Serializer keySerializer = clusteredStore.codec.getKeySerializer(); + if (keySerializer instanceof StatefulSerializer) { + StateRepository stateRepository = null; + try { + stateRepository = clusteringService.getStateRepositoryWithin(cacheIdentifier, cacheIdentifier.getId() + "-Key"); + } catch (CachePersistenceException e) { + throw new RuntimeException(e); + } + ((StatefulSerializer)keySerializer).init(stateRepository); } + Serializer valueSerializer = clusteredStore.codec.getValueSerializer(); + if (valueSerializer instanceof StatefulSerializer) { + StateRepository stateRepository = null; + try { + stateRepository = clusteringService.getStateRepositoryWithin(cacheIdentifier, cacheIdentifier.getId() + "-Value"); + } catch (CachePersistenceException e) { + throw new RuntimeException(e); + } + ((StatefulSerializer)valueSerializer).init(stateRepository); + } + clusteredStore.storeProxy.addInvalidationListener(new ServerStoreProxy.InvalidationListener() { @Override public void onInvalidateHash(long hash) { + StoreOperationOutcomes.EvictionOutcome result = StoreOperationOutcomes.EvictionOutcome.SUCCESS; + clusteredStore.evictionObserver.begin(); if (clusteredStore.invalidationValve != null) { try { LOGGER.debug("CLIENT: calling invalidation valve for hash {}", hash); @@ -589,8 +687,10 @@ public void onInvalidateHash(long hash) { } catch (StoreAccessException sae) { //TODO: what should be done here? delegate to resilience strategy? LOGGER.error("Error invalidating hash {}", hash, sae); + result = StoreOperationOutcomes.EvictionOutcome.FAILURE; } } + clusteredStore.evictionObserver.end(result); } @Override @@ -640,7 +740,23 @@ public void stop() { @Override public AuthoritativeTier createAuthoritativeTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - return createStore(storeConfig, serviceConfigs); + ClusteredStore authoritativeTier = createStoreInternal(storeConfig, serviceConfigs); + Collection> tieredOps = new ArrayList>(); + + MappedOperationStatistic get = + new MappedOperationStatistic( + authoritativeTier, TierOperationOutcomes.GET_AND_FAULT_TRANSLATION, "get", TIER_HEIGHT, "getAndFault", STATISTICS_TAG); + StatisticsManager.associate(get).withParent(authoritativeTier); + tieredOps.add(get); + + MappedOperationStatistic evict = + new MappedOperationStatistic( + authoritativeTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", TIER_HEIGHT, "eviction", STATISTICS_TAG); + StatisticsManager.associate(evict).withParent(authoritativeTier); + tieredOps.add(evict); + + tierOperationStatistics.put(authoritativeTier, tieredOps); + return authoritativeTier; } @Override @@ -657,16 +773,16 @@ public void initAuthoritativeTier(AuthoritativeTier resource) { private static class StoreConfig { private final ClusteredCacheIdentifier cacheIdentifier; - private final Store.Configuration storeConfig; + private final Store.Configuration storeConfig; private final Consistency consistency; - StoreConfig(ClusteredCacheIdentifier cacheIdentifier, Configuration storeConfig, Consistency consistency) { + StoreConfig(ClusteredCacheIdentifier cacheIdentifier, Configuration storeConfig, Consistency consistency) { this.cacheIdentifier = cacheIdentifier; this.storeConfig = storeConfig; this.consistency = consistency; } - public Configuration getStoreConfig() { + public Configuration getStoreConfig() { return this.storeConfig; } @@ -678,14 +794,4 @@ public Consistency getConsistency() { return consistency; } } - - private static final class ClusteredStoreStatsSettings { - @ContextAttribute("tags") private final Set tags = singleton("store"); - @ContextAttribute("authoritativeTier") private final ClusteredStore authoritativeTier; - - ClusteredStoreStatsSettings(ClusteredStore store) { - this.authoritativeTier = store; - } - } - } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxy.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxy.java new file mode 100644 index 0000000000..8bc103a176 --- /dev/null +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxy.java @@ -0,0 +1,222 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal.store; + +import org.ehcache.clustered.client.internal.EhcacheClientEntity; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.ehcache.clustered.common.internal.messages.EhcacheResponseType; +import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; +import org.ehcache.clustered.common.internal.store.Chain; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.ByteBuffer; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.TimeoutException; + +/** + * Provides client-side access to the services of a {@code ServerStore}. + */ +class CommonServerStoreProxy implements ServerStoreProxy { + + private static final Logger LOGGER = LoggerFactory.getLogger(CommonServerStoreProxy.class); + + private final ServerStoreMessageFactory messageFactory; + private final EhcacheClientEntity entity; + + private final List invalidationListeners = new CopyOnWriteArrayList(); + private final Map, EhcacheClientEntity.ResponseListener> responseListeners + = new ConcurrentHashMap, EhcacheClientEntity.ResponseListener>(); + + CommonServerStoreProxy(final ServerStoreMessageFactory messageFactory, final EhcacheClientEntity entity) { + this.messageFactory = messageFactory; + this.entity = entity; + this.responseListeners.put(EhcacheEntityResponse.ServerInvalidateHash.class, new EhcacheClientEntity.ResponseListener() { + @Override + public void onResponse(EhcacheEntityResponse.ServerInvalidateHash response) { + if (response.getCacheId().equals(messageFactory.getCacheId())) { + long key = response.getKey(); + LOGGER.debug("CLIENT: on cache {}, server requesting hash {} to be invalidated", messageFactory.getCacheId(), key); + for (InvalidationListener listener : invalidationListeners) { + listener.onInvalidateHash(key); + } + } else { + LOGGER.debug("CLIENT: on cache {}, ignoring invalidation on unrelated cache : {}", messageFactory.getCacheId(), response.getCacheId()); + } + } + }); + this.responseListeners.put(EhcacheEntityResponse.ClientInvalidateHash.class, new EhcacheClientEntity.ResponseListener() { + @Override + public void onResponse(EhcacheEntityResponse.ClientInvalidateHash response) { + final String cacheId = response.getCacheId(); + final long key = response.getKey(); + final int invalidationId = response.getInvalidationId(); + + if (cacheId.equals(messageFactory.getCacheId())) { + LOGGER.debug("CLIENT: doing work to invalidate hash {} from cache {} (ID {})", key, cacheId, invalidationId); + for (InvalidationListener listener : invalidationListeners) { + listener.onInvalidateHash(key); + } + + try { + LOGGER.debug("CLIENT: ack'ing invalidation of hash {} from cache {} (ID {})", key, cacheId, invalidationId); + entity.invokeAsync(messageFactory.clientInvalidationAck(invalidationId), false); + } catch (Exception e) { + //TODO: what should be done here? + LOGGER.error("error acking client invalidation of hash {} on cache {}", key, cacheId, e); + } + } else { + LOGGER.debug("CLIENT: on cache {}, ignoring invalidation on unrelated cache : {}", messageFactory.getCacheId(), response.getCacheId()); + } + } + }); + this.responseListeners.put(EhcacheEntityResponse.ClientInvalidateAll.class, new EhcacheClientEntity.ResponseListener() { + @Override + public void onResponse(EhcacheEntityResponse.ClientInvalidateAll response) { + final String cacheId = response.getCacheId(); + final int invalidationId = response.getInvalidationId(); + + if (cacheId.equals(messageFactory.getCacheId())) { + LOGGER.debug("CLIENT: doing work to invalidate all from cache {} (ID {})", cacheId, invalidationId); + for (InvalidationListener listener : invalidationListeners) { + listener.onInvalidateAll(); + } + + try { + LOGGER.debug("CLIENT: ack'ing invalidation of all from cache {} (ID {})", cacheId, invalidationId); + entity.invokeAsync(messageFactory.clientInvalidationAck(invalidationId), false); + } catch (Exception e) { + //TODO: what should be done here? + LOGGER.error("error acking client invalidation of all on cache {}", cacheId, e); + } + } else { + LOGGER.debug("CLIENT: on cache {}, ignoring invalidation on unrelated cache : {}", messageFactory.getCacheId(), response.getCacheId()); + } + } + }); + + addResponseListenersToEntity(); + } + + @SuppressWarnings("unchecked") + private void addResponseListenersToEntity() { + for (Map.Entry, EhcacheClientEntity.ResponseListener> classResponseListenerEntry : + this.responseListeners.entrySet()) { + this.entity.addResponseListener(classResponseListenerEntry.getKey(), (EhcacheClientEntity.ResponseListener)classResponseListenerEntry.getValue()); + } + } + + @Override + public String getCacheId() { + return messageFactory.getCacheId(); + } + + @Override + public void addInvalidationListener(InvalidationListener listener) { + invalidationListeners.add(listener); + } + + @Override + public boolean removeInvalidationListener(InvalidationListener listener) { + return invalidationListeners.remove(listener); + } + + void addResponseListeners(Class listenerClass, EhcacheClientEntity.ResponseListener listener) { + this.responseListeners.put(listenerClass, listener); + this.entity.addResponseListener(listenerClass, listener); + } + + @SuppressWarnings("unchecked") + @Override + public void close() { + for (Map.Entry, EhcacheClientEntity.ResponseListener> classResponseListenerEntry : + this.responseListeners.entrySet()) { + this.entity.removeResponseListener(classResponseListenerEntry.getKey(), (EhcacheClientEntity.ResponseListener) classResponseListenerEntry.getValue()); + } + } + + @Override + public Chain get(long key) throws TimeoutException { + EhcacheEntityResponse response; + try { + response = entity.invoke(messageFactory.getOperation(key), false); + } catch (TimeoutException e) { + throw e; + } catch (Exception e) { + throw new ServerStoreProxyException(e); + } + if (response != null && response.getResponseType() == EhcacheResponseType.GET_RESPONSE) { + return ((EhcacheEntityResponse.GetResponse)response).getChain(); + } else { + throw new ServerStoreProxyException("Response for get operation was invalid : " + + (response != null ? response.getResponseType() : "null message")); + } + } + + @Override + public void append(long key, ByteBuffer payLoad) throws TimeoutException { + try { + entity.invoke(messageFactory.appendOperation(key, payLoad), true); + } catch (TimeoutException e) { + throw e; + } catch (Exception e) { + throw new ServerStoreProxyException(e); + } + } + + @Override + public Chain getAndAppend(long key, ByteBuffer payLoad) throws TimeoutException { + EhcacheEntityResponse response; + try { + response = entity.invoke(messageFactory.getAndAppendOperation(key, payLoad), true); + } catch (TimeoutException e) { + throw e; + } catch (Exception e) { + throw new ServerStoreProxyException(e); + } + if (response != null && response.getResponseType() == EhcacheResponseType.GET_RESPONSE) { + return ((EhcacheEntityResponse.GetResponse)response).getChain(); + } else { + throw new ServerStoreProxyException("Response for getAndAppend operation was invalid : " + + (response != null ? response.getResponseType() : "null message")); + } + } + + @Override + public void replaceAtHead(long key, Chain expect, Chain update) { + // TODO: Optimize this method to just send sequences for expect Chain + try { + entity.invokeAsync(messageFactory.replaceAtHeadOperation(key, expect, update), true); + } catch (Exception e) { + throw new ServerStoreProxyException(e); + } + } + + @Override + public void clear() throws TimeoutException { + try { + entity.invoke(messageFactory.clearOperation(), true); + } catch (TimeoutException e) { + throw e; + } catch (Exception e) { + throw new ServerStoreProxyException(e); + } + } +} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxy.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxy.java index 7858717bc8..6920e447f6 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxy.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxy.java @@ -16,76 +16,18 @@ package org.ehcache.clustered.client.internal.store; import org.ehcache.clustered.client.internal.EhcacheClientEntity; -import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; import org.ehcache.clustered.common.internal.store.Chain; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import java.nio.ByteBuffer; -import java.util.List; -import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.TimeoutException; -/** - * @author Ludovic Orban - */ public class EventualServerStoreProxy implements ServerStoreProxy { - private static final Logger LOGGER = LoggerFactory.getLogger(EventualServerStoreProxy.class); - private final ServerStoreProxy delegate; - private final List invalidationListeners = new CopyOnWriteArrayList(); public EventualServerStoreProxy(final ServerStoreMessageFactory messageFactory, final EhcacheClientEntity entity) { - this.delegate = new NoInvalidationServerStoreProxy(messageFactory, entity); - entity.addResponseListener(EhcacheEntityResponse.ServerInvalidateHash.class, new EhcacheClientEntity.ResponseListener() { - @Override - public void onResponse(EhcacheEntityResponse.ServerInvalidateHash response) { - if (response.getCacheId().equals(messageFactory.getCacheId())) { - long key = response.getKey(); - LOGGER.debug("CLIENT: on cache {}, server requesting hash {} to be invalidated", messageFactory.getCacheId(), key); - for (InvalidationListener listener : invalidationListeners) { - listener.onInvalidateHash(key); - } - } else { - LOGGER.debug("CLIENT: on cache {}, ignoring invalidation on unrelated cache : {}", messageFactory.getCacheId(), response.getCacheId()); - } - } - }); - entity.addResponseListener(EhcacheEntityResponse.ClientInvalidateHash.class, new EhcacheClientEntity.ResponseListener() { - @Override - public void onResponse(EhcacheEntityResponse.ClientInvalidateHash response) { - final String cacheId = response.getCacheId(); - final long key = response.getKey(); - final int invalidationId = response.getInvalidationId(); - - if (cacheId.equals(messageFactory.getCacheId())) { - LOGGER.debug("CLIENT: doing work to invalidate hash {} from cache {} (ID {})", key, cacheId, invalidationId); - for (InvalidationListener listener : invalidationListeners) { - listener.onInvalidateHash(key); - } - } else { - LOGGER.debug("CLIENT: on cache {}, ignoring invalidation on unrelated cache : {}", messageFactory.getCacheId(), response.getCacheId()); - } - } - }); - entity.addResponseListener(EhcacheEntityResponse.ClientInvalidateAll.class, new EhcacheClientEntity.ResponseListener() { - @Override - public void onResponse(EhcacheEntityResponse.ClientInvalidateAll response) { - final String cacheId = response.getCacheId(); - final int invalidationId = response.getInvalidationId(); - - if (cacheId.equals(messageFactory.getCacheId())) { - LOGGER.debug("CLIENT: doing work to invalidate all from cache {} (ID {})", cacheId, invalidationId); - for (InvalidationListener listener : invalidationListeners) { - listener.onInvalidateAll(); - } - } else { - LOGGER.debug("CLIENT: on cache {}, ignoring invalidation on unrelated cache : {}", messageFactory.getCacheId(), response.getCacheId()); - } - } - }); + this.delegate = new CommonServerStoreProxy(messageFactory, entity); } @Override @@ -95,12 +37,17 @@ public String getCacheId() { @Override public void addInvalidationListener(InvalidationListener listener) { - invalidationListeners.add(listener); + delegate.addInvalidationListener(listener); } @Override public boolean removeInvalidationListener(InvalidationListener listener) { - return invalidationListeners.remove(listener); + return delegate.removeInvalidationListener(listener); + } + + @Override + public void close() { + delegate.close(); } @Override diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxy.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxy.java deleted file mode 100644 index b2a6c4c1c0..0000000000 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxy.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.internal.store; - -import org.ehcache.clustered.client.internal.EhcacheClientEntity; -import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; -import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; -import org.ehcache.clustered.common.internal.store.Chain; - -import java.nio.ByteBuffer; -import java.util.concurrent.TimeoutException; - -/** - * Provides client-side access to the services of a {@code ServerStore}. - */ -class NoInvalidationServerStoreProxy implements ServerStoreProxy { - - private final ServerStoreMessageFactory messageFactory; - private final EhcacheClientEntity entity; - - NoInvalidationServerStoreProxy(ServerStoreMessageFactory messageFactory, final EhcacheClientEntity entity) { - this.messageFactory = messageFactory; - this.entity = entity; - } - - @Override - public String getCacheId() { - return messageFactory.getCacheId(); - } - - @Override - public void addInvalidationListener(InvalidationListener listener) { - throw new UnsupportedOperationException(); - } - - @Override - public boolean removeInvalidationListener(InvalidationListener listener) { - throw new UnsupportedOperationException(); - } - - @Override - public Chain get(long key) throws TimeoutException { - EhcacheEntityResponse response; - try { - response = entity.invoke(messageFactory.getOperation(key), false); - } catch (TimeoutException e) { - throw e; - } catch (Exception e) { - throw new ServerStoreProxyException(e); - } - if (response != null && response.getType() == EhcacheEntityResponse.Type.GET_RESPONSE) { - return ((EhcacheEntityResponse.GetResponse)response).getChain(); - } else { - throw new ServerStoreProxyException("Response for get operation was invalid : " + - (response != null ? response.getType().toString() : "null message")); - } - } - - @Override - public void append(long key, ByteBuffer payLoad) throws TimeoutException { - try { - entity.invoke(messageFactory.appendOperation(key, payLoad), true); - } catch (TimeoutException e) { - throw e; - } catch (Exception e) { - throw new ServerStoreProxyException(e); - } - } - - @Override - public Chain getAndAppend(long key, ByteBuffer payLoad) throws TimeoutException { - EhcacheEntityResponse response; - try { - response = entity.invoke(messageFactory.getAndAppendOperation(key, payLoad), true); - } catch (TimeoutException e) { - throw e; - } catch (Exception e) { - throw new ServerStoreProxyException(e); - } - if (response != null && response.getType() == EhcacheEntityResponse.Type.GET_RESPONSE) { - return ((EhcacheEntityResponse.GetResponse)response).getChain(); - } else { - throw new ServerStoreProxyException("Response for getAndAppend operation was invalid : " + - (response != null ? response.getType().toString() : "null message")); - } - } - - @Override - public void replaceAtHead(long key, Chain expect, Chain update) { - // TODO: Optimize this method to just send sequences for expect Chain - try { - entity.invokeAsync(messageFactory.replaceAtHeadOperation(key, expect, update), true); - } catch (Exception e) { - throw new ServerStoreProxyException(e); - } - } - - @Override - public void clear() throws TimeoutException { - try { - entity.invoke(messageFactory.clearOperation(), true); - } catch (TimeoutException e) { - throw e; - } catch (Exception e) { - throw new ServerStoreProxyException(e); - } - } -} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ServerStoreProxy.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ServerStoreProxy.java index dadb81a480..c8b93f178e 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ServerStoreProxy.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ServerStoreProxy.java @@ -60,4 +60,9 @@ interface InvalidationListener { */ boolean removeInvalidationListener(InvalidationListener listener); + /** + * Closes this proxy. + */ + void close(); + } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java index 2f9f34182a..f9bd1ff898 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java @@ -17,41 +17,51 @@ import org.ehcache.clustered.client.internal.EhcacheClientEntity; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.ehcache.clustered.common.internal.messages.ReconnectMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; import org.ehcache.clustered.common.internal.store.Chain; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.nio.ByteBuffer; -import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; -/** - * @author Ludovic Orban - */ public class StrongServerStoreProxy implements ServerStoreProxy { private static final Logger LOGGER = LoggerFactory.getLogger(StrongServerStoreProxy.class); - private final ServerStoreProxy delegate; + private final CommonServerStoreProxy delegate; private final ConcurrentMap hashInvalidationsInProgress = new ConcurrentHashMap(); private final Lock invalidateAllLock = new ReentrantLock(); - private CountDownLatch invalidateAllLatch; - private final List invalidationListeners = new CopyOnWriteArrayList(); + private volatile CountDownLatch invalidateAllLatch; private final EhcacheClientEntity entity; + private final EhcacheClientEntity.ReconnectListener reconnectListener; + private final EhcacheClientEntity.DisconnectionListener disconnectionListener; public StrongServerStoreProxy(final ServerStoreMessageFactory messageFactory, final EhcacheClientEntity entity) { - this.delegate = new NoInvalidationServerStoreProxy(messageFactory, entity); + this.delegate = new CommonServerStoreProxy(messageFactory, entity); this.entity = entity; - entity.addResponseListener(EhcacheEntityResponse.HashInvalidationDone.class, new EhcacheClientEntity.ResponseListener() { + this.reconnectListener = new EhcacheClientEntity.ReconnectListener() { + @Override + public void onHandleReconnect(ReconnectMessage reconnectMessage) { + Set inflightInvalidations = hashInvalidationsInProgress.keySet(); + reconnectMessage.addInvalidationsInProgress(delegate.getCacheId(), inflightInvalidations); + if (invalidateAllLatch != null) { + reconnectMessage.addClearInProgress(delegate.getCacheId()); + } + } + }; + entity.addReconnectListener(reconnectListener); + + delegate.addResponseListeners(EhcacheEntityResponse.HashInvalidationDone.class, new EhcacheClientEntity.ResponseListener() { @Override public void onResponse(EhcacheEntityResponse.HashInvalidationDone response) { if (response.getCacheId().equals(messageFactory.getCacheId())) { @@ -66,7 +76,7 @@ public void onResponse(EhcacheEntityResponse.HashInvalidationDone response) { } } }); - entity.addResponseListener(EhcacheEntityResponse.AllInvalidationDone.class, new EhcacheClientEntity.ResponseListener() { + delegate.addResponseListeners(EhcacheEntityResponse.AllInvalidationDone.class, new EhcacheClientEntity.ResponseListener() { @Override public void onResponse(EhcacheEntityResponse.AllInvalidationDone response) { if (response.getCacheId().equals(messageFactory.getCacheId())) { @@ -90,70 +100,8 @@ public void onResponse(EhcacheEntityResponse.AllInvalidationDone response) { } } }); - entity.addResponseListener(EhcacheEntityResponse.ServerInvalidateHash.class, new EhcacheClientEntity.ResponseListener() { - @Override - public void onResponse(EhcacheEntityResponse.ServerInvalidateHash response) { - if (response.getCacheId().equals(messageFactory.getCacheId())) { - long key = response.getKey(); - LOGGER.debug("CLIENT: on cache {}, server requesting hash {} to be invalidated", messageFactory.getCacheId(), key); - for (InvalidationListener listener : invalidationListeners) { - listener.onInvalidateHash(key); - } - } else { - LOGGER.debug("CLIENT: on cache {}, ignoring invalidation on unrelated cache : {}", messageFactory.getCacheId(), response.getCacheId()); - } - } - }); - entity.addResponseListener(EhcacheEntityResponse.ClientInvalidateHash.class, new EhcacheClientEntity.ResponseListener() { - @Override - public void onResponse(EhcacheEntityResponse.ClientInvalidateHash response) { - final String cacheId = response.getCacheId(); - final long key = response.getKey(); - final int invalidationId = response.getInvalidationId(); - if (cacheId.equals(messageFactory.getCacheId())) { - LOGGER.debug("CLIENT: doing work to invalidate hash {} from cache {} (ID {})", key, cacheId, invalidationId); - for (InvalidationListener listener : invalidationListeners) { - listener.onInvalidateHash(key); - } - - try { - LOGGER.debug("CLIENT: ack'ing invalidation of hash {} from cache {} (ID {})", key, cacheId, invalidationId); - entity.invokeAsync(messageFactory.clientInvalidationAck(invalidationId), true); - } catch (Exception e) { - //TODO: what should be done here? - LOGGER.error("error acking client invalidation of hash {} on cache {}", key, cacheId, e); - } - } else { - LOGGER.debug("CLIENT: on cache {}, ignoring invalidation on unrelated cache : {}", messageFactory.getCacheId(), response.getCacheId()); - } - } - }); - entity.addResponseListener(EhcacheEntityResponse.ClientInvalidateAll.class, new EhcacheClientEntity.ResponseListener() { - @Override - public void onResponse(EhcacheEntityResponse.ClientInvalidateAll response) { - final String cacheId = response.getCacheId(); - final int invalidationId = response.getInvalidationId(); - - if (cacheId.equals(messageFactory.getCacheId())) { - LOGGER.debug("CLIENT: doing work to invalidate all from cache {} (ID {})", cacheId, invalidationId); - for (InvalidationListener listener : invalidationListeners) { - listener.onInvalidateAll(); - } - - try { - LOGGER.debug("CLIENT: ack'ing invalidation of all from cache {} (ID {})", cacheId, invalidationId); - entity.invokeAsync(messageFactory.clientInvalidationAck(invalidationId), true); - } catch (Exception e) { - //TODO: what should be done here? - LOGGER.error("error acking client invalidation of all on cache {}", cacheId, e); - } - } else { - LOGGER.debug("CLIENT: on cache {}, ignoring invalidation on unrelated cache : {}", messageFactory.getCacheId(), response.getCacheId()); - } - } - }); - entity.addDisconnectionListener(new EhcacheClientEntity.DisconnectionListener() { + this.disconnectionListener = new EhcacheClientEntity.DisconnectionListener() { @Override public void onDisconnection() { for (Map.Entry entry : hashInvalidationsInProgress.entrySet()) { @@ -170,7 +118,8 @@ public void onDisconnection() { invalidateAllLock.unlock(); } } - }); + }; + entity.addDisconnectionListener(disconnectionListener); } private T performWaitingForHashInvalidation(long key, NullaryFunction c) throws InterruptedException, TimeoutException { @@ -188,6 +137,7 @@ private T performWaitingForHashInvalidation(long key, NullaryFunction c) try { T result = c.apply(); + LOGGER.debug("CLIENT: Waiting for invalidations on key {}", key); awaitOnLatch(latch); LOGGER.debug("CLIENT: key {} invalidated on all clients, unblocking call", key); return result; @@ -266,12 +216,19 @@ public String getCacheId() { @Override public void addInvalidationListener(InvalidationListener listener) { - invalidationListeners.add(listener); + delegate.addInvalidationListener(listener); } @Override public boolean removeInvalidationListener(InvalidationListener listener) { - return invalidationListeners.remove(listener); + return delegate.removeInvalidationListener(listener); + } + + @Override + public void close() { + this.entity.removeDisconnectionListener(this.disconnectionListener); + this.entity.removeReconnectListener(this.reconnectListener); + delegate.close(); } @Override diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/BaseKeyValueOperation.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/BaseKeyValueOperation.java index 3059e109af..b2ecae09eb 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/BaseKeyValueOperation.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/BaseKeyValueOperation.java @@ -46,10 +46,11 @@ abstract class BaseKeyValueOperation implements Operation { } this.timeStamp = buffer.getLong(); int keySize = buffer.getInt(); + int maxLimit = buffer.limit(); buffer.limit(buffer.position() + keySize); ByteBuffer keyBlob = buffer.slice(); buffer.position(buffer.limit()); - buffer.limit(buffer.capacity()); + buffer.limit(maxLimit); try { this.key = keySerializer.read(keyBlob); } catch (ClassNotFoundException e) { diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ChainResolver.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ChainResolver.java index c705f9b4ff..66a8966609 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ChainResolver.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ChainResolver.java @@ -122,7 +122,7 @@ public V value() { } } } else { - payload.flip(); + payload.rewind(); chainBuilder = chainBuilder.add(payload); } } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ConditionalReplaceOperation.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ConditionalReplaceOperation.java index afa68fb3e7..70e1f0532f 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ConditionalReplaceOperation.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ConditionalReplaceOperation.java @@ -53,16 +53,17 @@ public ConditionalReplaceOperation(final K key, final V oldValue, final V newVal } this.timeStamp = buffer.getLong(); int keySize = buffer.getInt(); + int maxLimit = buffer.limit(); buffer.limit(buffer.position() + keySize); ByteBuffer keyBlob = buffer.slice(); buffer.position(buffer.limit()); - buffer.limit(buffer.capacity()); + buffer.limit(maxLimit); int oldValueSize = buffer.getInt(); buffer.limit(buffer.position() + oldValueSize); ByteBuffer oldValueBlob = buffer.slice(); buffer.position(buffer.limit()); - buffer.limit(buffer.capacity()); + buffer.limit(maxLimit); ByteBuffer valueBlob = buffer.slice(); @@ -146,7 +147,8 @@ public boolean equals(final Object obj) { return false; } - ConditionalReplaceOperation other = (ConditionalReplaceOperation)obj; + @SuppressWarnings("unchecked") + ConditionalReplaceOperation other = (ConditionalReplaceOperation) obj; if(this.getOpCode() != other.getOpCode()) { return false; } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/RemoveOperation.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/RemoveOperation.java index 5e0de52354..63385b7829 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/RemoveOperation.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/RemoveOperation.java @@ -96,7 +96,8 @@ public boolean equals(final Object obj) { return false; } - RemoveOperation other = (RemoveOperation)obj; + @SuppressWarnings("unchecked") + RemoveOperation other = (RemoveOperation) obj; if(this.getOpCode() != other.getOpCode()) { return false; } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/codecs/OperationsCodec.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/codecs/OperationsCodec.java index c6868312b6..16bbf347f9 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/codecs/OperationsCodec.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/codecs/OperationsCodec.java @@ -42,4 +42,12 @@ public Operation decode(ByteBuffer buffer) { buffer.rewind(); return opCode.decode(buffer, keySerializer, valueSerializer); } + + public Serializer getKeySerializer() { + return keySerializer; + } + + public Serializer getValueSerializer() { + return valueSerializer; + } } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/service/ClusteringService.java b/clustered/client/src/main/java/org/ehcache/clustered/client/service/ClusteringService.java index bb89c24456..b0cc5ed2f6 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/service/ClusteringService.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/service/ClusteringService.java @@ -31,6 +31,11 @@ public interface ClusteringService extends PersistableResourceService { ClusteringServiceConfiguration getConfiguration(); + /** + * @return true if a connection to a cluster exists + */ + boolean isConnected(); + /** * Gets a {@link ServerStoreProxy} though which a server-resident {@code ServerStore} is accessed. * diff --git a/clustered/client/src/test/java/org/ehcache/clustered/ClusteredResourcePoolUpdationTest.java b/clustered/client/src/test/java/org/ehcache/clustered/ClusteredResourcePoolUpdationTest.java new file mode 100644 index 0000000000..6a80761325 --- /dev/null +++ b/clustered/client/src/test/java/org/ehcache/clustered/ClusteredResourcePoolUpdationTest.java @@ -0,0 +1,102 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.net.URI; + +public class ClusteredResourcePoolUpdationTest { + + private static final URI CLUSTER_URI = URI.create("terracotta://example.com:9540/my-application"); + + private static PersistentCacheManager cacheManager; + private static Cache dedicatedCache; + private static Cache sharedCache; + + @Rule + public ExpectedException expectedException = ExpectedException.none(); + + @BeforeClass + public static void setUp() throws Exception { + UnitTestConnectionService.add(CLUSTER_URI, + new UnitTestConnectionService.PassthroughServerBuilder() + .resource("primary-server-resource", 8, MemoryUnit.MB) + .resource("secondary-server-resource", 8, MemoryUnit.MB) + .build()); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER_URI).autoCreate() + .defaultServerResource("primary-server-resource") + .resourcePool("resource-pool-a", 2, MemoryUnit.MB, "secondary-server-resource") + .resourcePool("resource-pool-b", 4, MemoryUnit.MB)) + .withCache("dedicated-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB)))) + .withCache("shared-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredShared("resource-pool-a")))) + .build(); + cacheManager.init(); + + dedicatedCache = cacheManager.getCache("dedicated-cache", Long.class, String.class); + sharedCache = cacheManager.getCache("shared-cache", Long.class, String.class); + } + + @AfterClass + public static void tearDown() throws Exception { + cacheManager.close(); + UnitTestConnectionService.remove(CLUSTER_URI); + } + + @Test + public void testClusteredDedicatedResourcePoolUpdation() throws Exception { + expectedException.expect(UnsupportedOperationException.class); + expectedException.expectMessage("Updating CLUSTERED resource is not supported"); + dedicatedCache.getRuntimeConfiguration().updateResourcePools( + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 32, MemoryUnit.MB)) + .build() + ); + } + + @Test + public void testClusteredSharedResourcePoolUpdation() throws Exception { + expectedException.expect(UnsupportedOperationException.class); + expectedException.expectMessage("Updating CLUSTERED resource is not supported"); + sharedCache.getRuntimeConfiguration().updateResourcePools( + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredShared("resource-pool-a")) + .build() + ); + } +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredCacheDestroyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredCacheDestroyTest.java index 8e6b78909f..c191427c81 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredCacheDestroyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredCacheDestroyTest.java @@ -46,7 +46,6 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; @@ -186,7 +185,17 @@ public void testDestroyCacheWithCacheManagerStopped() throws CachePersistenceExc } @Test - public void testDestroyCacheWithCacheManagerStopped_whenUsedExclusively() throws CachePersistenceException { + public void testDestroyCacheWithTwoCacheManagerOnSameCache_forbiddenWhenInUse() throws CachePersistenceException { + PersistentCacheManager persistentCacheManager1 = clusteredCacheManagerBuilder.build(true); + PersistentCacheManager persistentCacheManager2 = clusteredCacheManagerBuilder.build(true); + + expectedException.expect(CachePersistenceException.class); + expectedException.expectMessage("Cannot destroy clustered tier 'clustered-cache': in use by 1 other client(s) (on terracotta://example.com:9540)"); + persistentCacheManager1.destroyCache(CLUSTERED_CACHE); + } + + @Test + public void testDestroyCacheWithTwoCacheManagerOnSameCache_firstRemovesSecondDestroy() throws CachePersistenceException { PersistentCacheManager persistentCacheManager1 = clusteredCacheManagerBuilder.build(true); PersistentCacheManager persistentCacheManager2 = clusteredCacheManagerBuilder.build(true); @@ -196,12 +205,12 @@ public void testDestroyCacheWithCacheManagerStopped_whenUsedExclusively() throws } @Test - public void testDestroyCacheWithCacheManagerStopped_forbiddenWhenInUse() throws CachePersistenceException { - PersistentCacheManager persistentCacheManager1 = clusteredCacheManagerBuilder.build(true); + public void testDestroyCacheWithTwoCacheManagerOnSameCache_secondDoesntHaveTheCacheButPreventExclusiveAccessToCluster() throws CachePersistenceException { + PersistentCacheManager persistentCacheManager1 = clusteredCacheManagerBuilder.build(false); PersistentCacheManager persistentCacheManager2 = clusteredCacheManagerBuilder.build(true); - expectedException.expect(CachePersistenceException.class); - expectedException.expectMessage("Cannot destroy clustered tier 'clustered-cache': in use by 1 other client(s) (on terracotta://example.com:9540)"); + persistentCacheManager2.removeCache(CLUSTERED_CACHE); + persistentCacheManager1.destroyCache(CLUSTERED_CACHE); } } diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredConcurrencyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredConcurrencyTest.java new file mode 100644 index 0000000000..f9625c9c9f --- /dev/null +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredConcurrencyTest.java @@ -0,0 +1,122 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client; + +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.ClusteredStoreConfiguration; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.net.URI; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicReference; + +/** + * This test makes sure a clustered cache can be opened from many client instances. As usual with concurrency tests, a + * success doesn't mean it will work forever and a failure might not occur reliably. However, it puts together all + * conditions to make it fail in case of race condition + * + * @author Henri Tremblay + */ +public class ClusteredConcurrencyTest { + + private static final URI CLUSTER_URI = URI.create("terracotta://example.com:9540/my-application"); + private static final String CACHE_NAME = "clustered-cache"; + + private AtomicReference exception = new AtomicReference(); + + @Before + public void definePassthroughServer() throws Exception { + UnitTestConnectionService.add(CLUSTER_URI, + new UnitTestConnectionService.PassthroughServerBuilder() + .resource("primary-server-resource", 64, MemoryUnit.MB) + .resource("secondary-server-resource", 64, MemoryUnit.MB) + .build()); + } + + @After + public void removePassthroughServer() throws Exception { + UnitTestConnectionService.remove(CLUSTER_URI); + } + + @Test + public void test() throws Throwable { + final int THREAD_NUM = 50; + + final CountDownLatch latch = new CountDownLatch(THREAD_NUM + 1); + + List threads = new ArrayList(THREAD_NUM); + for (int i = 0; i < THREAD_NUM; i++) { + Thread t1 = new Thread(content(latch)); + t1.start(); + threads.add(t1); + } + + latch.countDown(); + latch.await(); + + for(Thread t : threads) { + t.join(); + } + + Throwable throwable = exception.get(); + if(throwable != null) { + throw throwable; + } + } + + private Runnable content(final CountDownLatch latch) { + return new Runnable() { + @Override + public void run() { + try { + CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER_URI).autoCreate() + .defaultServerResource("primary-server-resource") + .resourcePool("resource-pool-a", 32, MemoryUnit.MB) + .resourcePool("resource-pool-b", 32, MemoryUnit.MB, "secondary-server-resource")) + .withCache(CACHE_NAME, CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 32, MemoryUnit.MB))) + .add(new ClusteredStoreConfiguration(Consistency.STRONG))); + + latch.countDown(); + try { + latch.await(); + } catch (InterruptedException e) { + // continue + } + + clusteredCacheManagerBuilder.build(true); + } catch (Throwable t) { + exception.compareAndSet(null, t); // only keep the first exception + } + } + }; + } +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/UnSupportedCombinationsWIthClusteredCacheTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/UnSupportedCombinationsWIthClusteredCacheTest.java index 2af0633f05..4a0f36fcfe 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/UnSupportedCombinationsWIthClusteredCacheTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/UnSupportedCombinationsWIthClusteredCacheTest.java @@ -184,7 +184,7 @@ public void deleteAll(Iterable keys) throws BulkCacheWritingExce private static class TestEventListener implements CacheEventListener { @Override - public void onEvent(CacheEvent event) { + public void onEvent(CacheEvent event) { } } diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/config/ClusteringServiceConfigurationTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/config/ClusteringServiceConfigurationTest.java index 858e85f2ae..d6cfee67a2 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/config/ClusteringServiceConfigurationTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/config/ClusteringServiceConfigurationTest.java @@ -56,7 +56,7 @@ public void testGetReadOperationTimeout() throws Exception { @Test public void testDefaultReadOperationTimeout() throws Exception { - assertThat(new ClusteringServiceConfiguration(DEFAULT_URI).getReadOperationTimeout(), is(TimeoutDuration.of(5, TimeUnit.SECONDS))); + assertThat(new ClusteringServiceConfiguration(DEFAULT_URI).getReadOperationTimeout(), is(TimeoutDuration.of(20, TimeUnit.SECONDS))); } @Test(expected = NullPointerException.class) diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/config/TimeoutDurationTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/config/TimeoutDurationTest.java index 042478aa02..dfd7b37bb0 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/config/TimeoutDurationTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/config/TimeoutDurationTest.java @@ -41,6 +41,7 @@ public class TimeoutDurationTest { @Test public void testEquals() throws Exception { + @SuppressWarnings("unchecked") List> equalPairs = Arrays.asList( Pair.of(TimeoutDuration.of(1, NANOSECONDS), TimeoutDuration.of(1, NANOSECONDS)), Pair.of(TimeoutDuration.of(1, MICROSECONDS), TimeoutDuration.of(1000, NANOSECONDS)), @@ -70,7 +71,7 @@ public void testEquals() throws Exception { Pair.of(TimeoutDuration.of(1, DAYS), TimeoutDuration.of(24L, HOURS)), - Pair.of(TimeoutDuration.of(7, NANOSECONDS), TimeoutDuration.of(1 * 7, NANOSECONDS)), + Pair.of(TimeoutDuration.of(7, NANOSECONDS), TimeoutDuration.of(7, NANOSECONDS)), Pair.of(TimeoutDuration.of(7, MICROSECONDS), TimeoutDuration.of(1000 * 7, NANOSECONDS)), Pair.of(TimeoutDuration.of(7, MILLISECONDS), TimeoutDuration.of(1000000 * 7, NANOSECONDS)), Pair.of(TimeoutDuration.of(7, SECONDS), TimeoutDuration.of(1000000000L * 7, NANOSECONDS)), @@ -128,6 +129,7 @@ public void testEquals() throws Exception { assertThat(pair.getFirst().hashCode(), is(equalTo(pair.getSecond().hashCode()))); } + @SuppressWarnings("unchecked") List> unEqualPairs = Arrays.asList( Pair.of(TimeoutDuration.of(Long.MAX_VALUE, DAYS), TimeoutDuration.of(Long.MAX_VALUE, HOURS)), Pair.of(TimeoutDuration.of(Long.MAX_VALUE, DAYS), TimeoutDuration.of(Long.MAX_VALUE, MINUTES)), @@ -238,4 +240,4 @@ public T getSecond() { return this.second; } } -} \ No newline at end of file +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactoryTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactoryTest.java index 765816809d..fa853b800a 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactoryTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactoryTest.java @@ -22,7 +22,10 @@ import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.lock.LockMessaging.HoldType; import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockClient; +import org.junit.Before; import org.junit.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; import org.terracotta.connection.Connection; import static org.hamcrest.core.Is.is; @@ -44,12 +47,21 @@ public class EhcacheClientEntityFactoryTest { + @Mock + private EntityRef entityRef; + @Mock + private EhcacheClientEntity entity; + @Mock + private Connection connection; + + @Before + public void setUp() { + MockitoAnnotations.initMocks(this); + } + @Test public void testCreate() throws Exception { - EhcacheClientEntity entity = mock(EhcacheClientEntity.class); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(entity); - Connection connection = mock(Connection.class); when(connection.getEntityRef(eq(EhcacheClientEntity.class), anyInt(), anyString())).thenReturn(entityRef); addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); @@ -63,11 +75,8 @@ public void testCreate() throws Exception { @Test public void testCreateBadConfig() throws Exception { - EhcacheClientEntity entity = mock(EhcacheClientEntity.class); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(entity); doThrow(ClusteredTierManagerConfigurationException.class).when(entity).configure(any(ServerSideConfiguration.class)); - Connection connection = mock(Connection.class); when(connection.getEntityRef(eq(EhcacheClientEntity.class), anyInt(), anyString())).thenReturn(entityRef); addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); @@ -87,9 +96,7 @@ public void testCreateBadConfig() throws Exception { @Test public void testCreateWhenExisting() throws Exception { - EntityRef entityRef = mock(EntityRef.class); doThrow(EntityAlreadyExistsException.class).when(entityRef).create(any()); - Connection connection = mock(Connection.class); when(connection.getEntityRef(eq(EhcacheClientEntity.class), anyInt(), anyString())).thenReturn(entityRef); addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); @@ -105,10 +112,7 @@ public void testCreateWhenExisting() throws Exception { @Test public void testRetrieve() throws Exception { - EhcacheClientEntity entity = mock(EhcacheClientEntity.class); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(entity); - Connection connection = mock(Connection.class); when(connection.getEntityRef(eq(EhcacheClientEntity.class), anyInt(), anyString())).thenReturn(entityRef); addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); @@ -121,11 +125,8 @@ public void testRetrieve() throws Exception { @Test public void testRetrieveFailedValidate() throws Exception { - EhcacheClientEntity entity = mock(EhcacheClientEntity.class); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(entity); doThrow(IllegalArgumentException.class).when(entity).validate(any(ServerSideConfiguration.class)); - Connection connection = mock(Connection.class); when(connection.getEntityRef(eq(EhcacheClientEntity.class), anyInt(), anyString())).thenReturn(entityRef); addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); @@ -142,11 +143,10 @@ public void testRetrieveFailedValidate() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testRetrieveWhenNotExisting() throws Exception { - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenThrow(EntityNotFoundException.class); doThrow(EntityAlreadyExistsException.class).when(entityRef).create(any()); - Connection connection = mock(Connection.class); when(connection.getEntityRef(eq(EhcacheClientEntity.class), anyInt(), anyString())).thenReturn(entityRef); addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); @@ -162,9 +162,7 @@ public void testRetrieveWhenNotExisting() throws Exception { @Test public void testDestroy() throws Exception { - EntityRef entityRef = mock(EntityRef.class); doReturn(Boolean.TRUE).when(entityRef).destroy(); - Connection connection = mock(Connection.class); when(connection.getEntityRef(eq(EhcacheClientEntity.class), anyInt(), anyString())).thenReturn(entityRef); addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); @@ -176,9 +174,7 @@ public void testDestroy() throws Exception { @Test public void testDestroyWhenNotExisting() throws Exception { - EntityRef entityRef = mock(EntityRef.class); doThrow(EntityNotFoundException.class).when(entityRef).destroy(); - Connection connection = mock(Connection.class); when(connection.getEntityRef(eq(EhcacheClientEntity.class), anyInt(), anyString())).thenReturn(entityRef); addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); @@ -199,6 +195,7 @@ private static void addMockUnlockedLock(Connection connection, String lockname) private static void addMockLock(Connection connection, String lockname, boolean result, Boolean ... results) throws Exception { VoltronReadWriteLockClient lock = mock(VoltronReadWriteLockClient.class); when(lock.tryLock(any(HoldType.class))).thenReturn(result, results); + @SuppressWarnings("unchecked") EntityRef interlockRef = mock(EntityRef.class); when(connection.getEntityRef(eq(VoltronReadWriteLockClient.class), anyInt(), eq(lockname))).thenReturn(interlockRef); when(interlockRef.fetchEntity()).thenReturn(lock); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java index 23cfd4b976..6c47a30e6c 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java @@ -32,6 +32,7 @@ import java.util.IdentityHashMap; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Properties; import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockEntityClientService; @@ -44,20 +45,28 @@ import org.terracotta.connection.ConnectionException; import org.terracotta.connection.ConnectionPropertyNames; import org.terracotta.connection.ConnectionService; +import org.terracotta.connection.entity.Entity; +import org.terracotta.connection.entity.EntityRef; import org.terracotta.entity.EntityClientService; import org.terracotta.entity.EntityMessage; import org.terracotta.entity.EntityResponse; import org.terracotta.entity.EntityServerService; import org.terracotta.entity.ServiceProvider; import org.terracotta.entity.ServiceProviderConfiguration; -import org.terracotta.offheapresource.OffHeapResourcesConfiguration; +import org.terracotta.exception.EntityNotFoundException; +import org.terracotta.exception.EntityNotProvidedException; +import org.terracotta.exception.PermanentEntityException; import org.terracotta.offheapresource.OffHeapResourcesProvider; import org.terracotta.offheapresource.config.MemoryUnit; import org.terracotta.offheapresource.config.OffheapResourcesType; import org.terracotta.offheapresource.config.ResourceType; +import org.terracotta.passthrough.IAsynchronousServerCrasher; +import org.terracotta.passthrough.PassthroughConnection; import org.terracotta.passthrough.PassthroughServer; import org.terracotta.passthrough.PassthroughServerRegistry; +import static org.mockito.Mockito.mock; + /** * A {@link ConnectionService} implementation used to simulate Voltron server connections for unit testing purposes. @@ -137,6 +146,8 @@ public static void add(URI uri, PassthroughServer server) { } SERVERS.put(keyURI, new ServerDescriptor(server)); + // TODO rework that better + server.registerAsynchronousServerCrasher(mock(IAsynchronousServerCrasher.class)); server.start(true, false); LOGGER.info("Started PassthroughServer at {}", keyURI); } @@ -207,9 +218,6 @@ public static PassthroughServer remove(URI uri) { URI keyURI = createKey(uri); ServerDescriptor serverDescriptor = SERVERS.remove(keyURI); if (serverDescriptor != null) { - serverDescriptor.server.stop(); - LOGGER.info("Stopped PassthroughServer at {}", keyURI); - for (Connection connection : serverDescriptor.getConnections().keySet()) { try { LOGGER.warn("Force close {}", formatConnectionId(connection)); @@ -220,6 +228,31 @@ public static PassthroughServer remove(URI uri) { // Ignored } } + + //open destroy connection. You need to make sure connection doesn't have any entities associated with it. + PassthroughConnection connection = serverDescriptor.server.connectNewClient("destroy-connection"); + + for(Entry entry : serverDescriptor.knownEntities.entrySet()) { + @SuppressWarnings("unchecked") + Class type = (Class) entry.getKey(); + List args = (List)entry.getValue(); + Long version = (Long)args.get(0); + String stringArg = (String)args.get(1); + + try { + EntityRef entityRef = connection.getEntityRef(type, version, stringArg); + entityRef.destroy(); + } catch (EntityNotProvidedException ex) { + LOGGER.error("Entity destroy failed: ", ex); + } catch (EntityNotFoundException ex) { + LOGGER.error("Entity destroy failed: ", ex); + } catch (PermanentEntityException ex) { + LOGGER.error("Entity destroy failed (permanent???): ", ex); + } + } + + serverDescriptor.server.stop(); + LOGGER.info("Stopped PassthroughServer at {}", keyURI); return serverDescriptor.server; } else { return null; @@ -332,7 +365,7 @@ public PassthroughServer build() { } if (!this.resources.getResource().isEmpty()) { - newServer.registerServiceProvider(new OffHeapResourcesProvider(), new OffHeapResourcesConfiguration(this.resources)); + newServer.registerExtendedConfiguration(new OffHeapResourcesProvider(this.resources)); } for (Map.Entry entry : serviceProviders.entrySet()) { @@ -465,6 +498,7 @@ synchronized void removeConnections() { private static final class ServerDescriptor { private final PassthroughServer server; private final Map connections = new IdentityHashMap(); + private final Map, List> knownEntities = new HashMap, List>(); ServerDescriptor(PassthroughServer server) { this.server = server; @@ -481,6 +515,13 @@ synchronized void add(Connection connection, Properties properties) { synchronized void remove(Connection connection) { this.connections.remove(connection); } + + public void addKnownEntity(Class arg, Object arg1, Object arg2) { + List set = new ArrayList(); + set.add(arg1); + set.add(arg2); + knownEntities.put(arg, set); + } } /** @@ -498,11 +539,16 @@ private static final class ConnectionInvocationHandler implements InvocationHand } @Override + @SuppressWarnings("unchecked") public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { if (method.getName().equals("close")) { serverDescriptor.remove(connection); LOGGER.info("Client closed {}", formatConnectionId(connection)); } + + if (method.getName().equals("getEntityRef")) { + serverDescriptor.addKnownEntity((Class) args[0], args[1] ,args[2]); + } try { return method.invoke(connection, args); } catch (InvocationTargetException e) { diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImplTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImplTest.java new file mode 100644 index 0000000000..12fd179431 --- /dev/null +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImplTest.java @@ -0,0 +1,32 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal.config; + +import org.ehcache.config.ResourcePool; +import org.junit.Test; + +import static org.mockito.Mockito.mock; + +public class ClusteredResourcePoolImplTest { + + @Test(expected = UnsupportedOperationException.class) + public void validateUpdate() throws Exception { + ClusteredResourcePoolImpl resourcePool = new ClusteredResourcePoolImpl(); + resourcePool.validateUpdate(mock(ResourcePool.class)); + } + +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImplTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImplTest.java new file mode 100644 index 0000000000..99daba303a --- /dev/null +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImplTest.java @@ -0,0 +1,33 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal.config; + +import org.ehcache.config.ResourcePool; +import org.ehcache.config.units.MemoryUnit; +import org.junit.Test; + +import static org.mockito.Mockito.mock; + +public class DedicatedClusteredResourcePoolImplTest { + + @Test(expected = UnsupportedOperationException.class) + public void validateUpdate() throws Exception { + DedicatedClusteredResourcePoolImpl resourcePool = new DedicatedClusteredResourcePoolImpl("foo", 3, MemoryUnit.MB); + resourcePool.validateUpdate(mock(ResourcePool.class)); + } + +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImplTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImplTest.java new file mode 100644 index 0000000000..e4f0dea3d6 --- /dev/null +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImplTest.java @@ -0,0 +1,32 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal.config; + +import org.ehcache.config.ResourcePool; +import org.junit.Test; + +import static org.mockito.Mockito.mock; + +public class SharedClusteredResourcePoolImplTest { + + @Test(expected = UnsupportedOperationException.class) + public void validateUpdate() throws Exception { + SharedClusteredResourcePoolImpl resourcePool = new SharedClusteredResourcePoolImpl("foo"); + resourcePool.validateUpdate(mock(ResourcePool.class)); + } + +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteringServiceConfigurationParserTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteringServiceConfigurationParserTest.java index 000df77bce..8b72a046ca 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteringServiceConfigurationParserTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteringServiceConfigurationParserTest.java @@ -170,7 +170,7 @@ public void testGetTimeoutNone() throws Exception { ServiceLocator.findSingletonAmongst(ClusteringServiceConfiguration.class, serviceCreationConfigurations); assertThat(clusteringServiceConfiguration, is(notNullValue())); - assertThat(clusteringServiceConfiguration.getReadOperationTimeout(), is(TimeoutDuration.of(5, TimeUnit.SECONDS))); + assertThat(clusteringServiceConfiguration.getReadOperationTimeout(), is(TimeoutDuration.of(20, TimeUnit.SECONDS))); } @Test diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockTest.java index 72c174e981..762a5fc320 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockTest.java @@ -16,7 +16,11 @@ package org.ehcache.clustered.client.internal.lock; +import org.junit.Before; +import org.junit.Ignore; import org.junit.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; import org.terracotta.connection.Connection; import org.terracotta.connection.entity.EntityRef; @@ -27,21 +31,30 @@ import static org.junit.Assert.assertThat; import static org.mockito.Matchers.any; import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import org.terracotta.exception.EntityAlreadyExistsException; public class VoltronReadWriteLockTest { + @Mock + private VoltronReadWriteLockClient client; + + @Mock + private EntityRef entityRef; + + @Mock + private Connection connection; + + @Before + public void setUp() { + MockitoAnnotations.initMocks(this); + } + @Test public void testCreateLockEntityWhenNotExisting() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); - - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -52,13 +65,9 @@ public void testCreateLockEntityWhenNotExisting() throws Exception { @Test public void testFetchExistingLockEntityWhenExists() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); - - EntityRef entityRef = mock(EntityRef.class); doThrow(EntityAlreadyExistsException.class).when(entityRef).create(any(Void.class)); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -67,12 +76,8 @@ public void testFetchExistingLockEntityWhenExists() throws Exception { @Test public void testWriteLockLocksWrite() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); - - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -83,12 +88,8 @@ public void testWriteLockLocksWrite() throws Exception { @Test public void testReadLockLocksRead() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); - - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -99,12 +100,8 @@ public void testReadLockLocksRead() throws Exception { @Test public void testWriteUnlockUnlocksWrite() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); - - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -115,12 +112,8 @@ public void testWriteUnlockUnlocksWrite() throws Exception { @Test public void testReadUnlockUnlocksRead() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); - - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -131,12 +124,8 @@ public void testReadUnlockUnlocksRead() throws Exception { @Test public void testWriteUnlockClosesEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); - - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -147,12 +136,8 @@ public void testWriteUnlockClosesEntity() throws Exception { @Test public void testReadUnlockClosesEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); - - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -163,12 +148,8 @@ public void testReadUnlockClosesEntity() throws Exception { @Test public void testWriteUnlockDestroysEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); - - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -178,13 +159,10 @@ public void testWriteUnlockDestroysEntity() throws Exception { } @Test + @Ignore("Enable once https://github.com/Terracotta-OSS/terracotta-core/issues/379 is fixed and TODO removed") public void testReadUnlockDestroysEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); - - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -195,13 +173,10 @@ public void testReadUnlockDestroysEntity() throws Exception { @Test public void testTryWriteLockTryLocksWrite() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(WRITE)).thenReturn(true); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -211,13 +186,10 @@ public void testTryWriteLockTryLocksWrite() throws Exception { @Test public void testTryReadLockTryLocksRead() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(READ)).thenReturn(true); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -227,13 +199,10 @@ public void testTryReadLockTryLocksRead() throws Exception { @Test public void testTryWriteUnlockUnlocksWrite() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(WRITE)).thenReturn(true); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -244,13 +213,10 @@ public void testTryWriteUnlockUnlocksWrite() throws Exception { @Test public void testTryReadUnlockUnlocksRead() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(READ)).thenReturn(true); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -261,13 +227,10 @@ public void testTryReadUnlockUnlocksRead() throws Exception { @Test public void testTryWriteUnlockClosesEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(WRITE)).thenReturn(true); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -278,13 +241,10 @@ public void testTryWriteUnlockClosesEntity() throws Exception { @Test public void testTryReadUnlockClosesEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(READ)).thenReturn(true); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -295,13 +255,10 @@ public void testTryReadUnlockClosesEntity() throws Exception { @Test public void testTryWriteUnlockDestroysEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(WRITE)).thenReturn(true); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -311,14 +268,12 @@ public void testTryWriteUnlockDestroysEntity() throws Exception { } @Test + @Ignore("Enable once https://github.com/Terracotta-OSS/terracotta-core/issues/379 is fixed and TODO removed") public void testTryReadUnlockDestroysEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(READ)).thenReturn(true); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -329,13 +284,10 @@ public void testTryReadUnlockDestroysEntity() throws Exception { @Test public void testTryWriteLockFailingClosesEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(WRITE)).thenReturn(false); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -345,13 +297,10 @@ public void testTryWriteLockFailingClosesEntity() throws Exception { @Test public void testTryReadLockFailingClosesEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(READ)).thenReturn(false); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -360,14 +309,12 @@ public void testTryReadLockFailingClosesEntity() throws Exception { } @Test + @Ignore("Enable once https://github.com/Terracotta-OSS/terracotta-core/issues/379 is fixed and TODO removed") public void testTryWriteLockFailingDestroysEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(WRITE)).thenReturn(false); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -376,14 +323,12 @@ public void testTryWriteLockFailingDestroysEntity() throws Exception { } @Test + @Ignore("Enable once https://github.com/Terracotta-OSS/terracotta-core/issues/379 is fixed and TODO removed") public void testTryReadLockFailingDestroysEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(READ)).thenReturn(false); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepositoryReplicationTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepositoryReplicationTest.java index a19f504036..c44abf73a1 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepositoryReplicationTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepositoryReplicationTest.java @@ -25,20 +25,18 @@ import org.ehcache.clustered.client.service.ClusteringService; import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; import org.ehcache.clustered.server.EhcacheServerEntityService; +import org.ehcache.spi.persistence.StateHolder; import org.junit.After; import org.junit.Before; import org.junit.Test; -import org.terracotta.offheapresource.OffHeapResourcesConfiguration; import org.terracotta.offheapresource.OffHeapResourcesProvider; import org.terracotta.offheapresource.config.MemoryUnit; import org.terracotta.passthrough.PassthroughClusterControl; -import org.terracotta.passthrough.PassthroughServer; import org.terracotta.passthrough.PassthroughTestHelpers; import java.io.Serializable; import java.lang.reflect.Field; import java.net.URI; -import java.util.concurrent.ConcurrentMap; import static org.ehcache.clustered.client.internal.UnitTestConnectionService.getOffheapResourcesType; import static org.hamcrest.Matchers.hasSize; @@ -54,19 +52,15 @@ public class ClusteredStateRepositoryReplicationTest { @Before public void setUp() throws Exception { this.clusterControl = PassthroughTestHelpers.createActivePassive(STRIPENAME, - new PassthroughTestHelpers.ServerInitializer() { - @Override - public void registerServicesForServer(PassthroughServer server) { - server.registerServerEntityService(new EhcacheServerEntityService()); - server.registerClientEntityService(new EhcacheClientEntityService()); - server.registerServerEntityService(new VoltronReadWriteLockServerEntityService()); - server.registerClientEntityService(new VoltronReadWriteLockEntityClientService()); - server.registerServiceProvider(new OffHeapResourcesProvider(), - new OffHeapResourcesConfiguration(getOffheapResourcesType("test", 32, MemoryUnit.MB))); - - UnitTestConnectionService.addServerToStripe(STRIPENAME, server); - } - } + server -> { + server.registerServerEntityService(new EhcacheServerEntityService()); + server.registerClientEntityService(new EhcacheClientEntityService()); + server.registerServerEntityService(new VoltronReadWriteLockServerEntityService()); + server.registerClientEntityService(new VoltronReadWriteLockEntityClientService()); + server.registerExtendedConfiguration(new OffHeapResourcesProvider(getOffheapResourcesType("test", 32, MemoryUnit.MB))); + + UnitTestConnectionService.addServerToStripe(STRIPENAME, server); + } ); clusterControl.waitForActive(); @@ -104,15 +98,15 @@ public Class getServiceType() { } }, "test", clientEntity); - ConcurrentMap testMap = stateRepository.getPersistentConcurrentMap("testMap", String.class, String.class); - testMap.putIfAbsent("One", "One"); - testMap.putIfAbsent("Two", "Two"); + StateHolder testHolder = stateRepository.getPersistentStateHolder("testHolder", String.class, String.class); + testHolder.putIfAbsent("One", "One"); + testHolder.putIfAbsent("Two", "Two"); clusterControl.terminateActive(); clusterControl.waitForActive(); - assertThat(testMap.get("One"), is("One")); - assertThat(testMap.get("Two"), is("Two")); + assertThat(testHolder.get("One"), is("One")); + assertThat(testHolder.get("Two"), is("Two")); service.stop(); } @@ -142,7 +136,7 @@ public Class getServiceType() { } }, "test", clientEntity); - ConcurrentMap testMap = stateRepository.getPersistentConcurrentMap("testMap", TestVal.class, TestVal.class); + StateHolder testMap = stateRepository.getPersistentStateHolder("testMap", TestVal.class, TestVal.class); testMap.putIfAbsent(new TestVal("One"), new TestVal("One")); testMap.putIfAbsent(new TestVal("Two"), new TestVal("Two")); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java index f33cf67af7..122eef6c6e 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java @@ -48,6 +48,7 @@ import org.ehcache.impl.internal.spi.serialization.DefaultSerializationProvider; import org.ehcache.spi.persistence.PersistableResourceService; import org.ehcache.spi.persistence.StateRepository; +import org.ehcache.spi.service.MaintainableService; import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; @@ -78,6 +79,7 @@ import static org.ehcache.config.ResourceType.Core.OFFHEAP; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; @@ -216,7 +218,9 @@ public void testStartStopAutoCreate() throws Exception { .autoCreate() .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); + assertThat(service.isConnected(), is(false)); service.start(null); + assertThat(service.isConnected(), is(true)); assertThat(UnitTestConnectionService.getConnectionProperties(clusterUri).size(), is(1)); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); @@ -326,7 +330,9 @@ public void testStartForMaintenanceAutoStart() throws Exception { .autoCreate() .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); - service.startForMaintenance(null); + assertThat(service.isConnected(), is(false)); + service.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); + assertThat(service.isConnected(), is(true)); assertThat(UnitTestConnectionService.getConnectionProperties(clusterUri).size(), is(1)); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); @@ -350,7 +356,7 @@ public void testStartForMaintenanceOtherAutoCreate() throws Exception { DefaultClusteringService maintenanceService = new DefaultClusteringService(configuration); try { - maintenanceService.startForMaintenance(null); + maintenanceService.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); fail("Expecting IllegalStateException"); } catch (IllegalStateException e) { // Expected @@ -388,7 +394,7 @@ public void testStartForMaintenanceOtherCreated() throws Exception { assertThat(activeEntity.getConnectedClients().size(), is(0)); DefaultClusteringService maintenanceService = new DefaultClusteringService(configuration); - maintenanceService.startForMaintenance(null); + maintenanceService.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); assertThat(activeEntities.size(), is(1)); @@ -441,14 +447,14 @@ public void testStartForMaintenanceInterlock() throws Exception { .autoCreate() .build(); DefaultClusteringService maintenanceService1 = new DefaultClusteringService(configuration); - maintenanceService1.startForMaintenance(null); + maintenanceService1.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); assertThat(activeEntities.size(), is(0)); DefaultClusteringService maintenanceService2 = new DefaultClusteringService(configuration); try { - maintenanceService2.startForMaintenance(null); + maintenanceService2.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); fail("Expecting IllegalStateException"); } catch (IllegalStateException e) { assertThat(e.getMessage(), containsString(" acquire cluster-wide ")); @@ -464,14 +470,14 @@ public void testStartForMaintenanceSequence() throws Exception { .autoCreate() .build(); DefaultClusteringService maintenanceService1 = new DefaultClusteringService(configuration); - maintenanceService1.startForMaintenance(null); + maintenanceService1.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); maintenanceService1.stop(); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); assertThat(activeEntities.size(), is(0)); DefaultClusteringService maintenanceService2 = new DefaultClusteringService(configuration); - maintenanceService2.startForMaintenance(null); + maintenanceService2.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); maintenanceService2.stop(); activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); @@ -543,7 +549,7 @@ public void testBasicDestroyAll() throws Exception { assertThat(e.getMessage(), containsString("Maintenance mode required")); } - createService.startForMaintenance(null); + createService.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); createService.destroyAll(); @@ -1318,7 +1324,7 @@ public void testGetServerStoreProxyDedicatedDestroy() throws Exception { } @Test - public void testDestroyWhenStoppedWorks() throws Exception { + public void testDestroyCantBeCalledIfStopped() throws Exception { String cacheAlias = "cacheAlias"; String targetResource = "serverResource2"; ClusteringServiceConfiguration configuration = @@ -1327,28 +1333,11 @@ public void testDestroyWhenStoppedWorks() throws Exception { .defaultServerResource("defaultResource") .build(); DefaultClusteringService creationService = new DefaultClusteringService(configuration); - creationService.start(null); - DefaultSerializationProvider serializationProvider = new DefaultSerializationProvider(null); - serializationProvider.start(providerContaining()); - Store.Configuration storeConfiguration = - getDedicatedStoreConfig(targetResource, serializationProvider, Long.class, String.class); - - ServerStoreProxy serverStoreProxy = creationService.getServerStoreProxy( - getClusteredCacheIdentifier(creationService, cacheAlias), storeConfiguration, Consistency.EVENTUAL); - assertThat(serverStoreProxy.getCacheId(), is(cacheAlias)); + expectedException.expect(IllegalStateException.class); + expectedException.expectMessage(endsWith(" should be started to call destroy")); - creationService.stop(); creationService.destroy(cacheAlias); - - List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); - ObservableEhcacheActiveEntity activeEntity = activeEntities.get(0); - - assertThat(activeEntity.getDedicatedResourcePoolIds(), is(Matchers.empty())); - assertThat(activeEntity.getStores(), is(Matchers.empty())); - assertThat(activeEntity.getInUseStores().keySet(), is(Matchers.empty())); - - assertThat("Service must be stopped after destroying the cache", creationService.isStarted(), is(false)); } @Test @@ -1399,7 +1388,7 @@ public void testFullDestroyAll() throws Exception { assertThat(e.getMessage(), containsString("Maintenance mode required")); } - createService.startForMaintenance(null); + createService.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); createService.destroyAll(); @@ -1770,7 +1759,7 @@ public void testStoreValidation_MismatchedPoolTypes_ConfiguredDedicatedValidateS DefaultClusteringService accessService = new DefaultClusteringService(accessConfig); accessService.start(null); - Store.Configuration accessStoreConfig = + Store.Configuration accessStoreConfig = getSharedStoreConfig("serverResource1", serializationProvider, Long.class, String.class); try { @@ -1921,7 +1910,8 @@ public void testGetServerStoreProxyReturnsEventualStore() throws Exception { ClusteringService.ClusteredCacheIdentifier cacheIdentifier = (ClusteredCacheIdentifier) service.getPersistenceSpaceIdentifier("my-cache", null); ResourcePools resourcePools = mock(ResourcePools.class); - Store.Configuration storeConfig = mock(Store.Configuration.class); + @SuppressWarnings("unchecked") + Store.Configuration storeConfig = mock(Store.Configuration.class); when(storeConfig.getResourcePools()).thenReturn(resourcePools); when(resourcePools.getPoolForResource(eq(DEDICATED))).thenReturn(new DedicatedClusteredResourcePoolImpl("serverResource1", 1L, MemoryUnit.MB)); when(storeConfig.getKeyType()).thenReturn(String.class); @@ -1932,7 +1922,7 @@ public void testGetServerStoreProxyReturnsEventualStore() throws Exception { } @Test - public void testGetServerStoreProxyReturnsEventualStoreByDefault() throws Exception { + public void testGetServerStoreProxyReturnsStrongStore() throws Exception { String entityIdentifier = "my-application"; ClusteringServiceConfiguration configuration = new ClusteringServiceConfiguration( @@ -1944,37 +1934,92 @@ public void testGetServerStoreProxyReturnsEventualStoreByDefault() throws Except ClusteringService.ClusteredCacheIdentifier cacheIdentifier = (ClusteredCacheIdentifier) service.getPersistenceSpaceIdentifier("my-cache", null); ResourcePools resourcePools = mock(ResourcePools.class); - Store.Configuration storeConfig = mock(Store.Configuration.class); + @SuppressWarnings("unchecked") + Store.Configuration storeConfig = mock(Store.Configuration.class); when(storeConfig.getResourcePools()).thenReturn(resourcePools); when(resourcePools.getPoolForResource(eq(DEDICATED))).thenReturn(new DedicatedClusteredResourcePoolImpl("serverResource1", 1L, MemoryUnit.MB)); when(storeConfig.getKeyType()).thenReturn(String.class); when(storeConfig.getValueType()).thenReturn(Object.class); - ServerStoreProxy serverStoreProxy = service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.EVENTUAL); - assertThat(serverStoreProxy, instanceOf(EventualServerStoreProxy.class)); + ServerStoreProxy serverStoreProxy = service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.STRONG); + assertThat(serverStoreProxy, instanceOf(StrongServerStoreProxy.class)); } @Test - public void testGetServerStoreProxyReturnsStrongStore() throws Exception { + public void testGetServerStoreProxyFailureClearsEntityListeners() throws Exception { + // Initial setup begin String entityIdentifier = "my-application"; ClusteringServiceConfiguration configuration = - new ClusteringServiceConfiguration( - URI.create(CLUSTER_URI_BASE + entityIdentifier), - true, new ServerSideConfiguration(Collections.emptyMap())); + new ClusteringServiceConfiguration( + URI.create(CLUSTER_URI_BASE + entityIdentifier), + true, new ServerSideConfiguration(Collections.emptyMap())); DefaultClusteringService service = new DefaultClusteringService(configuration); service.start(null); ClusteringService.ClusteredCacheIdentifier cacheIdentifier = (ClusteredCacheIdentifier) service.getPersistenceSpaceIdentifier("my-cache", null); ResourcePools resourcePools = mock(ResourcePools.class); - Store.Configuration storeConfig = mock(Store.Configuration.class); + @SuppressWarnings("unchecked") + Store.Configuration storeConfig = mock(Store.Configuration.class); when(storeConfig.getResourcePools()).thenReturn(resourcePools); when(resourcePools.getPoolForResource(eq(DEDICATED))).thenReturn(new DedicatedClusteredResourcePoolImpl("serverResource1", 1L, MemoryUnit.MB)); when(storeConfig.getKeyType()).thenReturn(String.class); when(storeConfig.getValueType()).thenReturn(Object.class); - ServerStoreProxy serverStoreProxy = service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.STRONG); - assertThat(serverStoreProxy, instanceOf(StrongServerStoreProxy.class)); + service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.STRONG); // Creates the store + service.stop(); + // Initial setup end + + service.start(null); + when(resourcePools.getPoolForResource(eq(DEDICATED))).thenReturn(new DedicatedClusteredResourcePoolImpl("serverResource1", 2L, MemoryUnit.MB)); + try { + service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.STRONG); + fail("Server store proxy creation should have failed"); + } catch (CachePersistenceException cpe) { + assertThat(service.entity.getDisconnectionListeners().isEmpty(), is(true)); + assertThat(service.entity.getReconnectListeners().isEmpty(), is(true)); + } + } + + @Test + public void testGetServerStoreProxyFailureDoesNotClearOtherStoreEntityListeners() throws Exception { + // Initial setup begin + String entityIdentifier = "my-application"; + ClusteringServiceConfiguration configuration = + new ClusteringServiceConfiguration( + URI.create(CLUSTER_URI_BASE + entityIdentifier), + true, new ServerSideConfiguration(Collections.emptyMap())); + DefaultClusteringService service = new DefaultClusteringService(configuration); + service.start(null); + + ClusteringService.ClusteredCacheIdentifier cacheIdentifier = (ClusteredCacheIdentifier) service.getPersistenceSpaceIdentifier("my-cache", null); + + ResourcePools resourcePools = mock(ResourcePools.class); + @SuppressWarnings("unchecked") + Store.Configuration storeConfig = mock(Store.Configuration.class); + when(storeConfig.getResourcePools()).thenReturn(resourcePools); + when(resourcePools.getPoolForResource(eq(DEDICATED))).thenReturn(new DedicatedClusteredResourcePoolImpl("serverResource1", 1L, MemoryUnit.MB)); + when(storeConfig.getKeyType()).thenReturn(String.class); + when(storeConfig.getValueType()).thenReturn(Object.class); + + service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.STRONG); // Creates the store + service.stop(); + // Initial setup end + + service.start(null); + ClusteringService.ClusteredCacheIdentifier otherCacheIdentifier = (ClusteredCacheIdentifier) service.getPersistenceSpaceIdentifier("my-other-cache", null); + service.getServerStoreProxy(otherCacheIdentifier, storeConfig, Consistency.STRONG); // Creates one more store + int disconnectionListenersSize = service.entity.getDisconnectionListeners().size(); + int reconnectionListenersSize = service.entity.getReconnectListeners().size(); + + when(resourcePools.getPoolForResource(eq(DEDICATED))).thenReturn(new DedicatedClusteredResourcePoolImpl("serverResource1", 2L, MemoryUnit.MB)); + try { + service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.STRONG); + fail("Server store proxy creation should have failed"); + } catch (CachePersistenceException cpe) { + assertThat(service.entity.getDisconnectionListeners().size(), is(disconnectionListenersSize)); + assertThat(service.entity.getReconnectListeners().size(), is(reconnectionListenersSize)); + } } @Test diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java index c52a1b7067..8dcc68ea32 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java @@ -25,14 +25,13 @@ import org.ehcache.config.ResourcePool; import org.ehcache.config.ResourcePools; import org.ehcache.config.ResourceType; -import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.config.ResourcePoolsImpl; import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.core.spi.store.Store; import org.ehcache.expiry.Expirations; import org.ehcache.expiry.Expiry; -import org.ehcache.impl.internal.DefaultTimeSourceService; import org.ehcache.impl.internal.store.disk.OffHeapDiskStore; import org.ehcache.impl.internal.store.heap.OnHeapStore; import org.ehcache.impl.internal.store.offheap.OffHeapStore; @@ -41,32 +40,19 @@ import org.ehcache.impl.serialization.StringSerializer; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.service.ServiceConfiguration; -import org.junit.Before; import org.junit.Test; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.terracotta.context.ContextManager; -import org.terracotta.context.TreeNode; -import org.terracotta.context.query.Matcher; -import org.terracotta.context.query.Matchers; -import org.terracotta.context.query.Query; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; -import static com.sun.corba.se.impl.util.RepositoryId.cache; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.startsWith; import static org.junit.Assert.*; import static org.mockito.Mockito.mock; -import static org.terracotta.context.query.Matchers.attributes; -import static org.terracotta.context.query.Matchers.context; -import static org.terracotta.context.query.Matchers.hasAttribute; -import static org.terracotta.context.query.QueryBuilder.queryBuilder; /** * Provides basic tests for {@link org.ehcache.clustered.client.internal.store.ClusteredStore.Provider ClusteredStore.Provider}. @@ -76,12 +62,13 @@ public class ClusteredStoreProviderTest { @Test public void testRank() throws Exception { ClusteredStore.Provider provider = new ClusteredStore.Provider(); - ServiceLocator serviceLocator = new ServiceLocator( - new TieredStore.Provider(), - new OnHeapStore.Provider(), - new OffHeapStore.Provider(), - new OffHeapDiskStore.Provider(), - mock(ClusteringService.class)); + ServiceLocator serviceLocator = dependencySet() + .with(new TieredStore.Provider()) + .with(new OnHeapStore.Provider()) + .with(new OffHeapStore.Provider()) + .with(mock(DiskResourceService.class)) + .with(new OffHeapDiskStore.Provider()) + .with(mock(ClusteringService.class)).build(); provider.start(serviceLocator); assertRank(provider, 1, ClusteredResourceType.Types.DEDICATED); @@ -94,13 +81,14 @@ public void testRank() throws Exception { @Test public void testRankTiered() throws Exception { TieredStore.Provider provider = new TieredStore.Provider(); - ServiceLocator serviceLocator = new ServiceLocator( - provider, - new ClusteredStore.Provider(), - new OnHeapStore.Provider(), - new OffHeapStore.Provider(), - new OffHeapDiskStore.Provider(), - mock(ClusteringService.class)); + ServiceLocator serviceLocator = dependencySet() + .with(provider) + .with(new ClusteredStore.Provider()) + .with(new OnHeapStore.Provider()) + .with(new OffHeapStore.Provider()) + .with(new OffHeapDiskStore.Provider()) + .with(mock(DiskResourceService.class)) + .with(mock(ClusteringService.class)).build(); serviceLocator.startAllServices(); assertRank(provider, 0, ClusteredResourceType.Types.DEDICATED, ResourceType.Core.DISK); @@ -132,48 +120,12 @@ public void testRankTiered() throws Exception { @Test public void testAuthoritativeRank() throws Exception { ClusteredStore.Provider provider = new ClusteredStore.Provider(); - ServiceLocator serviceLocator = new ServiceLocator(mock(ClusteringService.class)); + ServiceLocator serviceLocator = dependencySet().with(mock(ClusteringService.class)).build(); provider.start(serviceLocator); - assertThat(provider.rankAuthority(ClusteredResourceType.Types.DEDICATED, Collections.EMPTY_LIST), is(1)); - assertThat(provider.rankAuthority(ClusteredResourceType.Types.SHARED, Collections.EMPTY_LIST), is(1)); - assertThat(provider.rankAuthority(new UnmatchedResourceType(), Collections.EMPTY_LIST), is(0)); - } - - @Test - public void testStatisticsAssociations() throws Exception { - ClusteredStore.Provider provider = new ClusteredStore.Provider(); - ServiceLocator serviceLocator = new ServiceLocator( - new TieredStore.Provider(), - new OnHeapStore.Provider(), - new OffHeapStore.Provider(), - new OffHeapDiskStore.Provider(), - new DefaultTimeSourceService(null), - mock(ClusteringService.class)); - provider.start(serviceLocator); - - ClusteredStore store = provider.createStore(getStoreConfig()); - - Query storeQuery = queryBuilder() - .children() - .filter(context(attributes(Matchers.>allOf( - hasAttribute("tags", new Matcher>() { - @Override - protected boolean matchesSafely(Set object) { - return object.containsAll(Collections.singleton("store")); - } - }))))) - .build(); - - Set nodes = Collections.singleton(ContextManager.nodeFor(store)); - - Set storeResult = storeQuery.execute(nodes); - assertThat(storeResult.isEmpty(), is(false)); - - provider.releaseStore(store); - - storeResult = storeQuery.execute(nodes); - assertThat(storeResult.isEmpty(), is(true)); + assertThat(provider.rankAuthority(ClusteredResourceType.Types.DEDICATED, Collections.>emptyList()), is(1)); + assertThat(provider.rankAuthority(ClusteredResourceType.Types.SHARED, Collections.>emptyList()), is(1)); + assertThat(provider.rankAuthority(new UnmatchedResourceType(), Collections.>emptyList()), is(0)); } private void assertRank(final Store.Provider provider, final int expectedRank, final ResourceType... resources) { @@ -221,6 +173,7 @@ public ClassLoader getClassLoader() { } @Override + @SuppressWarnings("unchecked") public ResourcePools getResourcePools() { Map, DedicatedClusteredResourcePoolImpl> poolMap = Collections .singletonMap(ClusteredResourceType.Types.DEDICATED, new DedicatedClusteredResourcePoolImpl("test", 10, MemoryUnit.MB)); @@ -265,4 +218,4 @@ public int getTierHeight() { return 10; } } -} \ No newline at end of file +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java index 824ac1e872..5ec50e4569 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java @@ -34,7 +34,6 @@ import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.StoreAccessException; import org.ehcache.core.spi.store.StoreAccessTimeoutException; -import org.ehcache.core.spi.time.SystemTimeSource; import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.statistics.StoreOperationOutcomes; import org.ehcache.expiry.Expirations; @@ -58,7 +57,6 @@ import static org.ehcache.clustered.util.StatisticsTestUtils.validateStat; import static org.ehcache.clustered.util.StatisticsTestUtils.validateStats; -import static org.ehcache.expiry.Expirations.noExpiration; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.junit.Assert.*; @@ -76,7 +74,7 @@ public class ClusteredStoreTest { private static final String CACHE_IDENTIFIER = "testCache"; private static final URI CLUSTER_URI = URI.create("terracotta://localhost:9510"); - ClusteredStore store; + private ClusteredStore store; @Before public void setup() throws Exception { @@ -102,8 +100,8 @@ public void setup() throws Exception { null ); clientEntity.createCache(CACHE_IDENTIFIER, serverStoreConfiguration); - ServerStoreMessageFactory factory = new ServerStoreMessageFactory(CACHE_IDENTIFIER); - ServerStoreProxy serverStoreProxy = new NoInvalidationServerStoreProxy(factory, clientEntity); + ServerStoreMessageFactory factory = new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientEntity.getClientId()); + ServerStoreProxy serverStoreProxy = new CommonServerStoreProxy(factory, clientEntity); TestTimeSource testTimeSource = new TestTimeSource(); @@ -128,9 +126,10 @@ public void testPut() throws Exception { } @Test(expected = StoreAccessTimeoutException.class) + @SuppressWarnings("unchecked") public void testPutTimeout() throws Exception { ServerStoreProxy proxy = mock(ServerStoreProxy.class); - OperationsCodec codec = mock(OperationsCodec.class); + OperationsCodec codec = mock(OperationsCodec.class); TimeSource timeSource = mock(TimeSource.class); when(proxy.getAndAppend(anyLong(), any(ByteBuffer.class))).thenThrow(TimeoutException.class); ClusteredStore store = new ClusteredStore(codec, null, proxy, timeSource); @@ -148,8 +147,10 @@ public void testGet() throws Exception { @Test(expected = StoreAccessException.class) public void testGetThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") OperationsCodec codec = mock(OperationsCodec.class); - ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") + ChainResolver chainResolver = mock(ChainResolver.class); ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); TestTimeSource testTimeSource = mock(TestTimeSource.class); @@ -158,6 +159,7 @@ public void testGetThrowsOnlySAE() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testGetTimeout() throws Exception { ServerStoreProxy proxy = mock(ServerStoreProxy.class); when(proxy.get(1L)).thenThrow(TimeoutException.class); @@ -171,8 +173,11 @@ public void testGetThatCompactsInvokesReplace() throws Exception { TestTimeSource timeSource = new TestTimeSource(); timeSource.advanceTime(134556L); long now = timeSource.getTimeMillis(); + @SuppressWarnings("unchecked") OperationsCodec operationsCodec = new OperationsCodec(new LongSerializer(), new StringSerializer()); - ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") + ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") ResolvedChain resolvedChain = mock(ResolvedChain.class); when(resolvedChain.isCompacted()).thenReturn(true); when(chainResolver.resolve(any(Chain.class), eq(42L), eq(now))).thenReturn(resolvedChain); @@ -193,7 +198,9 @@ public void testGetThatDoesNotCompactsInvokesReplace() throws Exception { timeSource.advanceTime(134556L); long now = timeSource.getTimeMillis(); OperationsCodec operationsCodec = new OperationsCodec(new LongSerializer(), new StringSerializer()); - ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") + ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") ResolvedChain resolvedChain = mock(ResolvedChain.class); when(resolvedChain.isCompacted()).thenReturn(false); when(chainResolver.resolve(any(Chain.class), eq(42L), eq(now))).thenReturn(resolvedChain); @@ -219,8 +226,10 @@ public void testContainsKey() throws Exception { @Test(expected = StoreAccessException.class) public void testContainsKeyThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") OperationsCodec codec = mock(OperationsCodec.class); - ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") + ChainResolver chainResolver = mock(ChainResolver.class); ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); TestTimeSource testTimeSource = mock(TestTimeSource.class); @@ -240,8 +249,10 @@ public void testRemove() throws Exception { @Test(expected = StoreAccessException.class) public void testRemoveThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") OperationsCodec codec = mock(OperationsCodec.class); - ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") + ChainResolver chainResolver = mock(ChainResolver.class); ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); TestTimeSource testTimeSource = mock(TestTimeSource.class); @@ -250,9 +261,10 @@ public void testRemoveThrowsOnlySAE() throws Exception { } @Test(expected = StoreAccessTimeoutException.class) + @SuppressWarnings("unchecked") public void testRemoveTimeout() throws Exception { ServerStoreProxy proxy = mock(ServerStoreProxy.class); - OperationsCodec codec = mock(OperationsCodec.class); + OperationsCodec codec = mock(OperationsCodec.class); TimeSource timeSource = mock(TimeSource.class); when(proxy.getAndAppend(anyLong(), any(ByteBuffer.class))).thenThrow(TimeoutException.class); ClusteredStore store = new ClusteredStore(codec, null, proxy, timeSource); @@ -279,8 +291,10 @@ public void testClear() throws Exception { @Test(expected = StoreAccessException.class) public void testClearThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") OperationsCodec codec = mock(OperationsCodec.class); - ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") + ChainResolver chainResolver = mock(ChainResolver.class); ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); doThrow(new RuntimeException()).when(serverStoreProxy).clear(); TestTimeSource testTimeSource = mock(TestTimeSource.class); @@ -291,7 +305,8 @@ public void testClearThrowsOnlySAE() throws Exception { @Test(expected = StoreAccessTimeoutException.class) public void testClearTimeout() throws Exception { ServerStoreProxy proxy = mock(ServerStoreProxy.class); - OperationsCodec codec = mock(OperationsCodec.class); + @SuppressWarnings("unchecked") + OperationsCodec codec = mock(OperationsCodec.class); TimeSource timeSource = mock(TimeSource.class); doThrow(TimeoutException.class).when(proxy).clear(); ClusteredStore store = new ClusteredStore(codec, null, proxy, timeSource); @@ -308,8 +323,10 @@ public void testPutIfAbsent() throws Exception { @Test(expected = StoreAccessException.class) public void testPutIfAbsentThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") OperationsCodec codec = mock(OperationsCodec.class); - ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") + ChainResolver chainResolver = mock(ChainResolver.class); ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); TestTimeSource testTimeSource = mock(TestTimeSource.class); @@ -318,9 +335,10 @@ public void testPutIfAbsentThrowsOnlySAE() throws Exception { } @Test(expected = StoreAccessTimeoutException.class) + @SuppressWarnings("unchecked") public void testPutIfAbsentTimeout() throws Exception { ServerStoreProxy proxy = mock(ServerStoreProxy.class); - OperationsCodec codec = mock(OperationsCodec.class); + OperationsCodec codec = mock(OperationsCodec.class); TimeSource timeSource = mock(TimeSource.class); when(proxy.getAndAppend(anyLong(), any(ByteBuffer.class))).thenThrow(TimeoutException.class); ClusteredStore store = new ClusteredStore(codec, null, proxy, timeSource); @@ -341,8 +359,10 @@ public void testConditionalRemove() throws Exception { @Test(expected = StoreAccessException.class) public void testConditionalRemoveThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") OperationsCodec codec = mock(OperationsCodec.class); - ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") + ChainResolver chainResolver = mock(ChainResolver.class); ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); TestTimeSource testTimeSource = mock(TestTimeSource.class); @@ -351,9 +371,10 @@ public void testConditionalRemoveThrowsOnlySAE() throws Exception { } @Test(expected = StoreAccessTimeoutException.class) + @SuppressWarnings("unchecked") public void testConditionalRemoveTimeout() throws Exception { ServerStoreProxy proxy = mock(ServerStoreProxy.class); - OperationsCodec codec = mock(OperationsCodec.class); + OperationsCodec codec = mock(OperationsCodec.class); TimeSource timeSource = mock(TimeSource.class); when(proxy.getAndAppend(anyLong(), any(ByteBuffer.class))).thenThrow(TimeoutException.class); ClusteredStore store = new ClusteredStore(codec, null, proxy, timeSource); @@ -371,8 +392,10 @@ public void testReplace() throws Exception { @Test(expected = StoreAccessException.class) public void testReplaceThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") OperationsCodec codec = mock(OperationsCodec.class); - ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") + ChainResolver chainResolver = mock(ChainResolver.class); ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); TestTimeSource testTimeSource = mock(TestTimeSource.class); @@ -381,9 +404,10 @@ public void testReplaceThrowsOnlySAE() throws Exception { } @Test(expected = StoreAccessTimeoutException.class) + @SuppressWarnings("unchecked") public void testReplaceTimeout() throws Exception { ServerStoreProxy proxy = mock(ServerStoreProxy.class); - OperationsCodec codec = mock(OperationsCodec.class); + OperationsCodec codec = mock(OperationsCodec.class); TimeSource timeSource = mock(TimeSource.class); when(proxy.getAndAppend(anyLong(), any(ByteBuffer.class))).thenThrow(TimeoutException.class); ClusteredStore store = new ClusteredStore(codec, null, proxy, timeSource); @@ -405,8 +429,10 @@ public void testConditionalReplace() throws Exception { @Test(expected = StoreAccessException.class) public void testConditionalReplaceThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") OperationsCodec codec = mock(OperationsCodec.class); - ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") + ChainResolver chainResolver = mock(ChainResolver.class); ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); TestTimeSource testTimeSource = mock(TestTimeSource.class); @@ -415,9 +441,10 @@ public void testConditionalReplaceThrowsOnlySAE() throws Exception { } @Test(expected = StoreAccessTimeoutException.class) + @SuppressWarnings("unchecked") public void testConditionalReplaceTimeout() throws Exception { ServerStoreProxy proxy = mock(ServerStoreProxy.class); - OperationsCodec codec = mock(OperationsCodec.class); + OperationsCodec codec = mock(OperationsCodec.class); TimeSource timeSource = mock(TimeSource.class); when(proxy.getAndAppend(anyLong(), any(ByteBuffer.class))).thenThrow(TimeoutException.class); ClusteredStore store = new ClusteredStore(codec, null, proxy, timeSource); @@ -460,6 +487,7 @@ public void testBulkComputeRemoveAll() throws Exception { @Test(expected = UnsupportedOperationException.class) public void testBulkComputeThrowsForGenericFunction() throws Exception { + @SuppressWarnings("unchecked") Function>, Iterable>> remappingFunction = mock(Function.class); store.bulkCompute(new HashSet(Arrays.asList(1L, 2L)), remappingFunction); @@ -480,9 +508,10 @@ public void testBulkComputeIfAbsentGetAll() throws Exception { @Test(expected = UnsupportedOperationException.class) public void testBulkComputeIfAbsentThrowsForGenericFunction() throws Exception { + @SuppressWarnings("unchecked") Function, Iterable>> mappingFunction = mock(Function.class); store.bulkComputeIfAbsent(new HashSet(Arrays.asList(1L, 2L)), mappingFunction); } -} \ No newline at end of file +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxyTest.java similarity index 96% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java rename to clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxyTest.java index 1f9b9fe01c..d32c635208 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxyTest.java @@ -44,13 +44,13 @@ import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; -public class NoInvalidationServerStoreProxyTest { +public class CommonServerStoreProxyTest { private static final String CACHE_IDENTIFIER = "testCache"; private static final URI CLUSTER_URI = URI.create("terracotta://localhost:9510"); private static EhcacheClientEntity clientEntity; - private static NoInvalidationServerStoreProxy serverStoreProxy; + private static CommonServerStoreProxy serverStoreProxy; @BeforeClass public static void setUp() throws Exception { @@ -72,7 +72,7 @@ public static void setUp() throws Exception { clientEntity.createCache(CACHE_IDENTIFIER, new ServerStoreConfiguration(resourcePool.getPoolAllocation(), Long.class.getName(), Long.class.getName(), Long.class.getName(), Long.class.getName(), LongSerializer.class.getName(), LongSerializer.class .getName(), null)); - serverStoreProxy = new NoInvalidationServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER), clientEntity); + serverStoreProxy = new CommonServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientEntity.getClientId()), clientEntity); } @AfterClass diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java index 347621cf9f..f37fcaf29f 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java @@ -19,13 +19,17 @@ import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; import org.ehcache.clustered.client.internal.EhcacheClientEntity; import org.ehcache.clustered.client.internal.EhcacheClientEntityFactory; +import org.ehcache.clustered.client.internal.EhcacheClientEntityService; import org.ehcache.clustered.client.internal.UnitTestConnectionService; import org.ehcache.clustered.client.internal.UnitTestConnectionService.PassthroughServerBuilder; +import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockEntityClientService; import org.ehcache.clustered.common.Consistency; import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; +import org.ehcache.clustered.server.ObservableEhcacheServerEntityService; import org.ehcache.config.units.MemoryUnit; import org.ehcache.impl.serialization.LongSerializer; import org.junit.AfterClass; @@ -37,6 +41,7 @@ import java.util.Collections; import java.util.List; import java.util.Properties; +import java.util.concurrent.CompletableFuture; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -55,41 +60,50 @@ public class EventualServerStoreProxyTest { private static final String CACHE_IDENTIFIER = "testCache"; private static final URI CLUSTER_URI = URI.create("terracotta://localhost:9510"); + private static EhcacheClientEntity clientEntity1; private static EhcacheClientEntity clientEntity2; private static EventualServerStoreProxy serverStoreProxy1; private static EventualServerStoreProxy serverStoreProxy2; + private static ObservableEhcacheServerEntityService observableEhcacheServerEntityService = new ObservableEhcacheServerEntityService(); @BeforeClass public static void setUp() throws Exception { UnitTestConnectionService.add(CLUSTER_URI, new PassthroughServerBuilder() + .serverEntityService(observableEhcacheServerEntityService) + .clientEntityService(new EhcacheClientEntityService()) + .serverEntityService(new VoltronReadWriteLockServerEntityService()) + .clientEntityService(new VoltronReadWriteLockEntityClientService()) .resource("defaultResource", 128, MemoryUnit.MB) .build()); - Connection connection = new UnitTestConnectionService().connect(CLUSTER_URI, new Properties()); + UnitTestConnectionService unitTestConnectionService = new UnitTestConnectionService(); + Connection connection1 = unitTestConnectionService.connect(CLUSTER_URI, new Properties()); + Connection connection2 = unitTestConnectionService.connect(CLUSTER_URI, new Properties()); - EhcacheClientEntityFactory entityFactory = new EhcacheClientEntityFactory(connection); + EhcacheClientEntityFactory entityFactory1 = new EhcacheClientEntityFactory(connection1); + EhcacheClientEntityFactory entityFactory2 = new EhcacheClientEntityFactory(connection2); - entityFactory.create("TestCacheManager", + entityFactory1.create("TestCacheManager", new ServerSideConfiguration("defaultResource", Collections.emptyMap())); - clientEntity1 = entityFactory.retrieve("TestCacheManager", + clientEntity1 = entityFactory1.retrieve("TestCacheManager", new ServerSideConfiguration("defaultResource", Collections.emptyMap())); - clientEntity2 = entityFactory.retrieve("TestCacheManager", + clientEntity2 = entityFactory2.retrieve("TestCacheManager", new ServerSideConfiguration("defaultResource", Collections.emptyMap())); ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(16L, MemoryUnit.MB); ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfiguration(resourcePool.getPoolAllocation(), Long.class.getName(), Long.class.getName(), Long.class.getName(), Long.class.getName(), LongSerializer.class.getName(), LongSerializer.class - .getName(), Consistency.STRONG); + .getName(), Consistency.EVENTUAL); clientEntity1.createCache(CACHE_IDENTIFIER, serverStoreConfiguration); // required to attach the store to the client clientEntity1.validateCache(CACHE_IDENTIFIER, serverStoreConfiguration); clientEntity2.validateCache(CACHE_IDENTIFIER, serverStoreConfiguration); - serverStoreProxy1 = new EventualServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER), clientEntity1); - serverStoreProxy2 = new EventualServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER), clientEntity2); + serverStoreProxy1 = new EventualServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientEntity1.getClientId()), clientEntity1); + serverStoreProxy2 = new EventualServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientEntity2.getClientId()), clientEntity2); } @AfterClass @@ -164,6 +178,8 @@ public void onInvalidateAll() { // test that each time the server evicted, the other client got notified on top of normal invalidations assertThat(store2InvalidatedHashes.size(), is(ITERATIONS + evictionCount)); + assertThatClientsWaitingForInvalidationIsEmpty(); + serverStoreProxy1.removeInvalidationListener(listener1); serverStoreProxy2.removeInvalidationListener(listener2); } @@ -191,6 +207,7 @@ public void onInvalidateAll() { latch.await(5, TimeUnit.SECONDS); assertThat(invalidatedHash.get(), is(1L)); + assertThatClientsWaitingForInvalidationIsEmpty(); serverStoreProxy1.removeInvalidationListener(listener); } @@ -217,6 +234,7 @@ public void onInvalidateAll() { latch.await(5, TimeUnit.SECONDS); assertThat(invalidatedHash.get(), is(1L)); + assertThatClientsWaitingForInvalidationIsEmpty(); serverStoreProxy1.removeInvalidationListener(listener); } @@ -243,7 +261,23 @@ public void onInvalidateAll() { latch.await(5, TimeUnit.SECONDS); assertThat(invalidatedAll.get(), is(true)); + assertThatClientsWaitingForInvalidationIsEmpty(); serverStoreProxy1.removeInvalidationListener(listener); } + private static void assertThatClientsWaitingForInvalidationIsEmpty() throws Exception { + ObservableEhcacheServerEntityService.ObservableEhcacheActiveEntity activeEntity = observableEhcacheServerEntityService.getServedActiveEntities().get(0); + CompletableFuture future = CompletableFuture.supplyAsync(() -> { + while (true) { + try { + if (activeEntity.getClientsWaitingForInvalidation().size() == 0) { + return true; + } + } catch (Exception e) { + } + } + }); + assertThat(future.get(5, TimeUnit.SECONDS), is(true)); + } + } diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java index cc0be0eec9..1ae2894efc 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java @@ -58,7 +58,7 @@ public class StrongServerStoreProxyTest { - private static final ExecutorService executorService = Executors.newCachedThreadPool(); + private static final ExecutorService EXECUTOR_SERVICE = Executors.newCachedThreadPool(); private static final String CACHE_IDENTIFIER = "testCache"; private static final URI CLUSTER_URI = URI.create("terracotta://localhost:9510"); @@ -74,15 +74,18 @@ public static void setUp() throws Exception { new PassthroughServerBuilder() .resource("defaultResource", 128, MemoryUnit.MB) .build()); - Connection connection = new UnitTestConnectionService().connect(CLUSTER_URI, new Properties()); + UnitTestConnectionService unitTestConnectionService = new UnitTestConnectionService(); + Connection connection1 = unitTestConnectionService.connect(CLUSTER_URI, new Properties()); + Connection connection2 = unitTestConnectionService.connect(CLUSTER_URI, new Properties()); - EhcacheClientEntityFactory entityFactory = new EhcacheClientEntityFactory(connection); + EhcacheClientEntityFactory entityFactory1 = new EhcacheClientEntityFactory(connection1); + EhcacheClientEntityFactory entityFactory2 = new EhcacheClientEntityFactory(connection2); - entityFactory.create("TestCacheManager", + entityFactory1.create("TestCacheManager", new ServerSideConfiguration("defaultResource", Collections.emptyMap())); - clientEntity1 = entityFactory.retrieve("TestCacheManager", + clientEntity1 = entityFactory1.retrieve("TestCacheManager", new ServerSideConfiguration("defaultResource", Collections.emptyMap())); - clientEntity2 = entityFactory.retrieve("TestCacheManager", + clientEntity2 = entityFactory2.retrieve("TestCacheManager", new ServerSideConfiguration("defaultResource", Collections.emptyMap())); ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(4L, MemoryUnit.MB); @@ -96,8 +99,8 @@ public static void setUp() throws Exception { clientEntity1.validateCache(CACHE_IDENTIFIER, serverStoreConfiguration); clientEntity2.validateCache(CACHE_IDENTIFIER, serverStoreConfiguration); - serverStoreProxy1 = new StrongServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER), clientEntity1); - serverStoreProxy2 = new StrongServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER), clientEntity2); + serverStoreProxy1 = new StrongServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientEntity1.getClientId()), clientEntity1); + serverStoreProxy2 = new StrongServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientEntity2.getClientId()), clientEntity2); } @AfterClass @@ -115,7 +118,7 @@ public static void tearDown() throws Exception { } UnitTestConnectionService.remove(CLUSTER_URI); - executorService.shutdown(); + EXECUTOR_SERVICE.shutdown(); } @Test @@ -227,14 +230,14 @@ public void onInvalidateAll() { }; serverStoreProxy2.addInvalidationListener(listener); - executorService.submit(new Callable() { + EXECUTOR_SERVICE.submit(new Callable() { @Override public Object call() throws Exception { serverStoreProxy1.append(1L, createPayload(1L)); return null; } }); - executorService.submit(new Callable() { + EXECUTOR_SERVICE.submit(new Callable() { @Override public Object call() throws Exception { serverStoreProxy1.append(1L, createPayload(1L)); @@ -320,14 +323,14 @@ public void onInvalidateAll() { }; serverStoreProxy2.addInvalidationListener(listener); - executorService.submit(new Callable() { + EXECUTOR_SERVICE.submit(new Callable() { @Override public Future call() throws Exception { serverStoreProxy1.clear(); return null; } }); - executorService.submit(new Callable() { + EXECUTOR_SERVICE.submit(new Callable() { @Override public Future call() throws Exception { serverStoreProxy1.clear(); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ChainResolverExpiryTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ChainResolverExpiryTest.java index 83357f23fd..155a439bd6 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ChainResolverExpiryTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ChainResolverExpiryTest.java @@ -51,7 +51,7 @@ public class ChainResolverExpiryTest { - private static final OperationsCodec codec = new OperationsCodec(new LongSerializer(), new StringSerializer()); + private static final OperationsCodec codec = new OperationsCodec(new LongSerializer(), new StringSerializer()); private static TestTimeSource timeSource = null; @@ -61,9 +61,10 @@ public void initialSetup() { } @Test + @SuppressWarnings("unchecked") public void testGetExpiryForAccessIsIgnored() { Expiry expiry = mock(Expiry.class); - ChainResolver chainResolver = new ChainResolver(codec, expiry); + ChainResolver chainResolver = new ChainResolver(codec, expiry); when(expiry.getExpiryForCreation(anyLong(), anyString())).thenReturn(Duration.INFINITE); @@ -83,9 +84,10 @@ public void testGetExpiryForAccessIsIgnored() { } @Test + @SuppressWarnings("unchecked") public void testGetExpiryForCreationIsInvokedOnlyOnce() { Expiry expiry = mock(Expiry.class); - ChainResolver chainResolver = new ChainResolver(codec, expiry); + ChainResolver chainResolver = new ChainResolver(codec, expiry); when(expiry.getExpiryForCreation(anyLong(), anyString())).thenReturn(Duration.INFINITE); @@ -108,9 +110,10 @@ public void testGetExpiryForCreationIsInvokedOnlyOnce() { } @Test + @SuppressWarnings("unchecked") public void testGetExpiryForCreationIsNotInvokedForReplacedChains() { Expiry expiry = mock(Expiry.class); - ChainResolver chainResolver = new ChainResolver(codec, expiry); + ChainResolver chainResolver = new ChainResolver(codec, expiry); when(expiry.getExpiryForCreation(anyLong(), anyString())).thenReturn(Duration.INFINITE); @@ -130,10 +133,11 @@ public void testGetExpiryForCreationIsNotInvokedForReplacedChains() { } @Test + @SuppressWarnings("unchecked") public void testGetExpiryForCreationIsInvokedAfterRemoveOperations() { Expiry expiry = mock(Expiry.class); - ChainResolver chainResolver = new ChainResolver(codec, expiry); + ChainResolver chainResolver = new ChainResolver(codec, expiry); when(expiry.getExpiryForCreation(anyLong(), anyString())).thenReturn(Duration.INFINITE); @@ -180,9 +184,10 @@ public void testGetExpiryForCreationIsInvokedAfterRemoveOperations() { } @Test + @SuppressWarnings("unchecked") public void testNullGetExpiryForCreation() { Expiry expiry = mock(Expiry.class); - ChainResolver chainResolver = new ChainResolver(codec, expiry); + ChainResolver chainResolver = new ChainResolver(codec, expiry); when(expiry.getExpiryForCreation(anyLong(), anyString())).thenReturn(null); @@ -198,9 +203,10 @@ public void testNullGetExpiryForCreation() { } @Test + @SuppressWarnings("unchecked") public void testNullGetExpiryForUpdate() { Expiry expiry = mock(Expiry.class); - ChainResolver chainResolver = new ChainResolver(codec, expiry); + ChainResolver chainResolver = new ChainResolver(codec, expiry); when(expiry.getExpiryForUpdate(anyLong(), any(ValueSupplier.class), anyString())).thenReturn(null); @@ -209,18 +215,19 @@ public void testNullGetExpiryForUpdate() { list.add(new PutOperation(1L, "New", timeSource.getTimeMillis())); Chain chain = getChainFromOperations(list); - ResolvedChain resolvedChain = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); + ResolvedChain resolvedChain = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); - assertThat(resolvedChain.getResolvedResult(1L).getValue().toString(), is("New")); + assertThat(resolvedChain.getResolvedResult(1L).getValue(), is("New")); assertTrue(getOperationsListFromChain(resolvedChain.getCompactedChain()).get(0).isExpiryAvailable()); assertThat(getOperationsListFromChain(resolvedChain.getCompactedChain()).get(0).expirationTime(), is(10L)); assertThat(resolvedChain.isCompacted(), is(true)); } @Test + @SuppressWarnings("unchecked") public void testGetExpiryForUpdateUpdatesExpirationTimeStamp() { Expiry expiry = mock(Expiry.class); - ChainResolver chainResolver = new ChainResolver(codec, expiry); + ChainResolver chainResolver = new ChainResolver(codec, expiry); when(expiry.getExpiryForUpdate(anyLong(), any(ValueSupplier.class), anyString())).thenReturn(new Duration(2L, TimeUnit.MILLISECONDS)); @@ -229,18 +236,19 @@ public void testGetExpiryForUpdateUpdatesExpirationTimeStamp() { list.add(new PutOperation(1L, "New", timeSource.getTimeMillis())); Chain chain = getChainFromOperations(list); - ResolvedChain resolvedChain = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); + ResolvedChain resolvedChain = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); - assertThat(resolvedChain.getResolvedResult(1L).getValue().toString(), is("New")); + assertThat(resolvedChain.getResolvedResult(1L).getValue(), is("New")); assertTrue(getOperationsListFromChain(resolvedChain.getCompactedChain()).get(0).isExpiryAvailable()); assertThat(getOperationsListFromChain(resolvedChain.getCompactedChain()).get(0).expirationTime(), is(2L)); assertThat(resolvedChain.isCompacted(), is(true)); } @Test + @SuppressWarnings("unchecked") public void testExpiryThrowsException() { Expiry expiry = mock(Expiry.class); - ChainResolver chainResolver = new ChainResolver(codec, expiry); + ChainResolver chainResolver = new ChainResolver(codec, expiry); when(expiry.getExpiryForUpdate(anyLong(), any(ValueSupplier.class), anyString())).thenThrow(new RuntimeException("Test Update Expiry")); when(expiry.getExpiryForCreation(anyLong(), anyString())).thenThrow(new RuntimeException("Test Create Expiry")); @@ -250,7 +258,7 @@ public void testExpiryThrowsException() { list.add(new PutOperation(1L, "Two", timeSource.getTimeMillis())); Chain chain = getChainFromOperations(list); - ResolvedChain resolvedChain = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); + ResolvedChain resolvedChain = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); assertThat(resolvedChain.getResolvedResult(1L), nullValue()); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ConditionalRemoveOperationTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ConditionalRemoveOperationTest.java index dbc66039a5..8466768361 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ConditionalRemoveOperationTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ConditionalRemoveOperationTest.java @@ -50,8 +50,8 @@ public void testApply() throws Exception { result = operation.apply(anotherOperation); assertNull(result); - PutIfAbsentOperation yetAnotherOperation = new PutIfAbsentOperation(1L, "two", System.currentTimeMillis()); + PutIfAbsentOperation yetAnotherOperation = new PutIfAbsentOperation(1L, "two", System.currentTimeMillis()); result = operation.apply(yetAnotherOperation); assertSame(yetAnotherOperation, result); } -} \ No newline at end of file +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/LazyValueHolderTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/LazyValueHolderTest.java index 2c2b04bb0c..e1d71e740c 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/LazyValueHolderTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/LazyValueHolderTest.java @@ -17,7 +17,10 @@ package org.ehcache.clustered.client.internal.store.operations; import org.ehcache.spi.serialization.Serializer; +import org.junit.Before; import org.junit.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; import java.nio.ByteBuffer; import java.util.Date; @@ -32,11 +35,18 @@ public class LazyValueHolderTest { + @Mock + private Serializer serializer; + + @Before + public void setUp() { + MockitoAnnotations.initMocks(this); + } + @Test public void testGetValueDecodeOnlyOnce() throws Exception { Date date = mock(Date.class); ByteBuffer buffer = mock(ByteBuffer.class); - Serializer serializer = mock(Serializer.class); doReturn(date).when(serializer).read(buffer); LazyValueHolder valueHolder = new LazyValueHolder(buffer, serializer); @@ -51,7 +61,6 @@ public void testGetValueDecodeOnlyOnce() throws Exception { public void testEncodeEncodesOnlyOnce() throws Exception { Date date = mock(Date.class); ByteBuffer buffer = mock(ByteBuffer.class); - Serializer serializer = mock(Serializer.class); doReturn(buffer).when(serializer).serialize(date); LazyValueHolder valueHolder = new LazyValueHolder(date); @@ -65,11 +74,10 @@ public void testEncodeEncodesOnlyOnce() throws Exception { @Test public void testEncodeDoesNotEncodeAlreadyEncodedValue() throws Exception { ByteBuffer buffer = mock(ByteBuffer.class); - Serializer serializer = mock(Serializer.class); LazyValueHolder valueHolder = new LazyValueHolder(buffer, serializer); ByteBuffer encoded = valueHolder.encode(serializer); assertThat(encoded, sameInstance(buffer)); verify(serializer, never()).serialize(any(Date.class)); //Value not serialized as the serialized form was available on creation itself } -} \ No newline at end of file +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/replication/ActivePassiveClientIdTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/replication/ActivePassiveClientIdTest.java new file mode 100644 index 0000000000..cd48e0a5c4 --- /dev/null +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/replication/ActivePassiveClientIdTest.java @@ -0,0 +1,118 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.replication; + +import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.internal.EhcacheClientEntity; +import org.ehcache.clustered.client.internal.EhcacheClientEntityService; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockEntityClientService; +import org.ehcache.clustered.client.internal.service.ClusteringServiceFactory; +import org.ehcache.clustered.client.service.ClusteringService; +import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; +import org.ehcache.clustered.server.ObservableEhcacheServerEntityService; +import org.ehcache.clustered.server.ObservableEhcacheServerEntityService.ObservableEhcachePassiveEntity; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.terracotta.offheapresource.OffHeapResourcesProvider; +import org.terracotta.offheapresource.config.MemoryUnit; +import org.terracotta.passthrough.PassthroughClusterControl; +import org.terracotta.passthrough.PassthroughTestHelpers; + +import java.net.URI; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; + +import static org.ehcache.clustered.client.internal.UnitTestConnectionService.getOffheapResourcesType; +import static org.ehcache.clustered.client.replication.ReplicationUtil.getEntity; +import static org.ehcache.clustered.client.replication.ReplicationUtil.getServerStoreConfiguration; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertThat; + +public class ActivePassiveClientIdTest { + + private PassthroughClusterControl clusterControl; + private static String STRIPENAME = "stripe"; + private static String STRIPE_URI = "passthrough://" + STRIPENAME; + private ObservableEhcacheServerEntityService observableEhcacheServerEntityService; + + @Before + public void setUp() throws Exception { + this.observableEhcacheServerEntityService = new ObservableEhcacheServerEntityService(); + this.clusterControl = PassthroughTestHelpers.createActivePassive(STRIPENAME, + server -> { + server.registerServerEntityService(observableEhcacheServerEntityService); + server.registerClientEntityService(new EhcacheClientEntityService()); + server.registerServerEntityService(new VoltronReadWriteLockServerEntityService()); + server.registerClientEntityService(new VoltronReadWriteLockEntityClientService()); + server.registerExtendedConfiguration(new OffHeapResourcesProvider(getOffheapResourcesType("test", 32, MemoryUnit.MB))); + + UnitTestConnectionService.addServerToStripe(STRIPENAME, server); + } + ); + + clusterControl.waitForActive(); + clusterControl.waitForRunningPassivesInStandby(); + } + + @After + public void tearDown() throws Exception { + UnitTestConnectionService.removeStripe(STRIPENAME); + clusterControl.tearDown(); + } + + @Test + public void testClientIdGetsTrackedAtPassive() throws Exception { + ClusteringServiceConfiguration configuration = + ClusteringServiceConfigurationBuilder.cluster(URI.create(STRIPE_URI)) + .autoCreate() + .build(); + + ClusteringService service = new ClusteringServiceFactory().create(configuration); + + service.start(null); + + ObservableEhcachePassiveEntity ehcachePassiveEntity = observableEhcacheServerEntityService.getServedPassiveEntities().get(0); + + assertThat(ehcachePassiveEntity.getMessageTrackerMap().size(), is(0)); + + EhcacheClientEntity clientEntity = getEntity(service); + + clientEntity.createCache("testCache", getServerStoreConfiguration("test")); + + assertThat(ehcachePassiveEntity.getMessageTrackerMap().size(), is(1)); + + service.stop(); + + CompletableFuture completableFuture = CompletableFuture.supplyAsync(() -> { + while (true) { + try { + if (ehcachePassiveEntity.getMessageTrackerMap().size() == 0) { + return true; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + }); + assertThat(completableFuture.get(2, TimeUnit.SECONDS), is(true)); + + } + +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/LifeCycleMessageActivePassvieReplicationTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/replication/LifeCycleMessageActivePassvieReplicationTest.java similarity index 78% rename from clustered/client/src/test/java/org/ehcache/clustered/client/LifeCycleMessageActivePassvieReplicationTest.java rename to clustered/client/src/test/java/org/ehcache/clustered/client/replication/LifeCycleMessageActivePassvieReplicationTest.java index 859c197fe6..bdd0c270d3 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/LifeCycleMessageActivePassvieReplicationTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/replication/LifeCycleMessageActivePassvieReplicationTest.java @@ -14,11 +14,9 @@ * limitations under the License. */ -package org.ehcache.clustered.client; +package org.ehcache.clustered.client.replication; -import org.ehcache.clustered.client.config.ClusteredResourcePool; import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; -import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; import org.ehcache.clustered.client.internal.EhcacheClientEntity; import org.ehcache.clustered.client.internal.EhcacheClientEntityService; @@ -29,28 +27,23 @@ import org.ehcache.clustered.client.internal.service.ClusteredTierManagerConfigurationException; import org.ehcache.clustered.client.internal.service.ClusteringServiceFactory; import org.ehcache.clustered.client.service.ClusteringService; -import org.ehcache.clustered.common.Consistency; -import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.exceptions.InvalidStoreException; import org.ehcache.clustered.common.internal.exceptions.InvalidStoreManagerException; import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; import org.ehcache.clustered.server.EhcacheServerEntityService; -import org.ehcache.impl.serialization.CompactJavaSerializer; import org.junit.After; import org.junit.Before; import org.junit.Test; -import org.terracotta.offheapresource.OffHeapResourcesConfiguration; import org.terracotta.offheapresource.OffHeapResourcesProvider; import org.terracotta.offheapresource.config.MemoryUnit; import org.terracotta.passthrough.PassthroughClusterControl; -import org.terracotta.passthrough.PassthroughServer; import org.terracotta.passthrough.PassthroughTestHelpers; -import java.lang.reflect.Field; import java.net.URI; import static org.ehcache.clustered.client.internal.UnitTestConnectionService.getOffheapResourcesType; -import static org.ehcache.config.units.MemoryUnit.MB; +import static org.ehcache.clustered.client.replication.ReplicationUtil.getEntity; +import static org.ehcache.clustered.client.replication.ReplicationUtil.getServerStoreConfiguration; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; @@ -65,18 +58,14 @@ public class LifeCycleMessageActivePassvieReplicationTest { @Before public void setUp() throws Exception { this.clusterControl = PassthroughTestHelpers.createActivePassive(STRIPENAME, - new PassthroughTestHelpers.ServerInitializer() { - @Override - public void registerServicesForServer(PassthroughServer server) { + server -> { server.registerServerEntityService(new EhcacheServerEntityService()); server.registerClientEntityService(new EhcacheClientEntityService()); server.registerServerEntityService(new VoltronReadWriteLockServerEntityService()); server.registerClientEntityService(new VoltronReadWriteLockEntityClientService()); - server.registerServiceProvider(new OffHeapResourcesProvider(), - new OffHeapResourcesConfiguration(getOffheapResourcesType("test", 32, MemoryUnit.MB))); + server.registerExtendedConfiguration(new OffHeapResourcesProvider(getOffheapResourcesType("test", 32, MemoryUnit.MB))); - UnitTestConnectionService.addServerToStripe(STRIPENAME, server); - } + UnitTestConnectionService.addServerToStripe(STRIPENAME, server); } ); @@ -180,17 +169,42 @@ public void testDestroyServerStoreReplication() throws Exception { } - private static EhcacheClientEntity getEntity(ClusteringService clusteringService) throws NoSuchFieldException, IllegalAccessException { - Field entity = clusteringService.getClass().getDeclaredField("entity"); - entity.setAccessible(true); - return (EhcacheClientEntity)entity.get(clusteringService); - } + @Test + public void testDestroyServerStoreIsNotReplicatedIfFailsOnActive() throws Exception { + ClusteringServiceConfiguration configuration = + ClusteringServiceConfigurationBuilder.cluster(URI.create(STRIPE_URI)) + .autoCreate() + .build(); + + ClusteringService service1 = new ClusteringServiceFactory().create(configuration); + + ClusteringService service2 = new ClusteringServiceFactory().create(configuration); + + service1.start(null); + service2.start(null); + + EhcacheClientEntity clientEntity1 = getEntity(service1); + EhcacheClientEntity clientEntity2 = getEntity(service2); + + clientEntity1.createCache("testCache", getServerStoreConfiguration("test")); + clientEntity2.validateCache("testCache", getServerStoreConfiguration("test")); + + clientEntity1.releaseCache("testCache"); + try { + clientEntity1.destroyCache("testCache"); + fail("ClusteredTierDestructionException Expected"); + } catch (ClusteredTierDestructionException e) { + //nothing to do + } + + clusterControl.terminateActive(); + + clientEntity2.releaseCache("testCache"); + clientEntity2.destroyCache("testCache"); + + service1.stop(); + service2.stop(); - private static ServerStoreConfiguration getServerStoreConfiguration(String resourceName) { - ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(resourceName, 8, MB); - return new ServerStoreConfiguration(resourcePool.getPoolAllocation(), - String.class.getName(), String.class.getName(), null, null, CompactJavaSerializer.class.getName(), CompactJavaSerializer.class - .getName(), Consistency.STRONG); } } diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/replication/ReplicationUtil.java b/clustered/client/src/test/java/org/ehcache/clustered/client/replication/ReplicationUtil.java new file mode 100644 index 0000000000..2ce2199136 --- /dev/null +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/replication/ReplicationUtil.java @@ -0,0 +1,49 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.replication; + +import org.ehcache.clustered.client.config.ClusteredResourcePool; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.internal.EhcacheClientEntity; +import org.ehcache.clustered.client.service.ClusteringService; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.impl.serialization.CompactJavaSerializer; + +import java.lang.reflect.Field; + +import static org.ehcache.config.units.MemoryUnit.MB; + +public class ReplicationUtil { + + private ReplicationUtil() { + + } + + public static EhcacheClientEntity getEntity(ClusteringService clusteringService) throws NoSuchFieldException, IllegalAccessException { + Field entity = clusteringService.getClass().getDeclaredField("entity"); + entity.setAccessible(true); + return (EhcacheClientEntity)entity.get(clusteringService); + } + + public static ServerStoreConfiguration getServerStoreConfiguration(String resourceName) { + ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(resourceName, 8, MB); + return new ServerStoreConfiguration(resourcePool.getPoolAllocation(), + String.class.getName(), String.class.getName(), null, null, CompactJavaSerializer.class.getName(), CompactJavaSerializer.class + .getName(), Consistency.STRONG); + } +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java b/clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java index e61b3895df..60aa1eb619 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java @@ -43,6 +43,7 @@ public class ObservableEhcacheServerEntityService private final EhcacheServerEntityService delegate = new EhcacheServerEntityService(); private final List servedActiveEntities = new ArrayList(); + private final List servedPassiveEntities = new ArrayList<>(); /** * Gets a list of {@link ObservableEhcacheActiveEntity} instances wrapping the @@ -58,6 +59,14 @@ public List getServedActiveEntities() throws NoSu return Collections.unmodifiableList(observables); } + public List getServedPassiveEntities() throws Exception { + List observables = new ArrayList<>(servedPassiveEntities.size()); + for (EhcachePassiveEntity servedPassiveEntity : servedPassiveEntities) { + observables.add(new ObservableEhcachePassiveEntity(servedPassiveEntity)); + } + return Collections.unmodifiableList(observables); + } + @Override public long getVersion() { return delegate.getVersion(); @@ -76,8 +85,10 @@ public EhcacheActiveEntity createActiveEntity(ServiceRegistry registry, byte[] c } @Override - public PassiveServerEntity createPassiveEntity(ServiceRegistry registry, byte[] configuration) { - return delegate.createPassiveEntity(registry, configuration); + public EhcachePassiveEntity createPassiveEntity(ServiceRegistry registry, byte[] configuration) { + EhcachePassiveEntity passiveEntity = delegate.createPassiveEntity(registry, configuration); + servedPassiveEntities.add(passiveEntity); + return passiveEntity; } @Override @@ -136,5 +147,30 @@ public Set getSharedResourcePoolIds() { public Set getDedicatedResourcePoolIds() { return ehcacheStateService.getDedicatedResourcePoolIds(); } + + public Map getClientsWaitingForInvalidation() throws Exception { + Field field = activeEntity.getClass().getDeclaredField("clientsWaitingForInvalidation"); + field.setAccessible(true); + return (Map)field.get(activeEntity); + } + } + + public static final class ObservableEhcachePassiveEntity { + private final EhcachePassiveEntity passiveEntity; + private final EhcacheStateServiceImpl ehcacheStateService; + + private ObservableEhcachePassiveEntity(EhcachePassiveEntity passiveEntity) throws Exception { + this.passiveEntity = passiveEntity; + Field field = passiveEntity.getClass().getDeclaredField("ehcacheStateService"); + field.setAccessible(true); + this.ehcacheStateService = (EhcacheStateServiceImpl)field.get(passiveEntity); + } + + public Map getMessageTrackerMap() throws Exception { + Field field = this.ehcacheStateService.getClientMessageTracker().getClass().getDeclaredField("messageTrackers"); + field.setAccessible(true); + return (Map)field.get(this.ehcacheStateService.getClientMessageTracker()); + } + } } diff --git a/clustered/client/src/test/resources/configs/docs/tc-config.xml b/clustered/client/src/test/resources/configs/docs/tc-config.xml index 8c46e1c8dc..57d1553f1c 100644 --- a/clustered/client/src/test/resources/configs/docs/tc-config.xml +++ b/clustered/client/src/test/resources/configs/docs/tc-config.xml @@ -1,13 +1,12 @@ - - - + + 128 96 - - + + diff --git a/clustered/clustered-dist/build.gradle b/clustered/clustered-dist/build.gradle index 623b66b7a1..3b5f3ebb99 100644 --- a/clustered/clustered-dist/build.gradle +++ b/clustered/clustered-dist/build.gradle @@ -31,6 +31,8 @@ ext { dependencies { compile project(':clustered:client') compile project(':clustered:common') + // Needed because declared as provided in the different projects + compile "org.terracotta:runnel:$parent.terracottaPlatformVersion" } apply plugin: 'distribution' @@ -42,8 +44,8 @@ configurations { } dependencies { - compile "org.terracotta.internal:client-runtime:$terracottaCoreVersion" changing true - compile "org.terracotta.internal:client-logging:$terracottaCoreVersion" changing true + compile "org.terracotta.internal:client-runtime:$terracottaCoreVersion" + compile "org.terracotta.internal:client-logging:$terracottaCoreVersion" serverLibs(project(':clustered:server')) { exclude group: 'org.terracotta', module: 'entity-server-api' @@ -52,9 +54,12 @@ dependencies { exclude group: 'org.terracotta.internal', module: 'tc-config-parser' } - kit "org.terracotta.internal:terracotta-kit:$terracottaCoreVersion@zip" changing true + // Needed because declared as provided in the different projects + serverLibs "org.terracotta:runnel:$parent.terracottaPlatformVersion" - shadow "org.slf4j:slf4j-api:$parent.slf4jVersion" + kit "org.terracotta.internal:terracotta-kit:$terracottaCoreVersion@zip" + + shadowCompile "org.slf4j:slf4j-api:$parent.slf4jVersion" pomOnlyCompile "org.ehcache:ehcache:$parent.baseVersion" } @@ -99,7 +104,7 @@ distributions { from project(':dist').javadocJar.archivePath.getPath() } into ('client/lib') { - from configurations.shadow + from configurations.shadowCompile } into ('') { from 'src/assemble' diff --git a/clustered/clustered-dist/gradle.properties b/clustered/clustered-dist/gradle.properties index b21565b96b..4d52981583 100644 --- a/clustered/clustered-dist/gradle.properties +++ b/clustered/clustered-dist/gradle.properties @@ -14,8 +14,8 @@ # limitations under the License. # -subPomName = Ehcache 3 Clustered Kit -subPomDesc = Ehcache 3 Clustered Kit +subPomName = Ehcache 3 Clustered Module +subPomDesc = Ehcache 3 Clustered: Defines the client jar and the kit containing the Terracotta server javadocExclude = **/core/**, **/impl/**, **/xml/**, **/jsr107/**, **/transactions/**, **/management/**, **/tck/** # Set to anything to disable SPI doc and jar generation diff --git a/clustered/clustered-dist/src/assemble/README.txt b/clustered/clustered-dist/src/assemble/README.txt index 4ab05faa32..9be3256669 100644 --- a/clustered/clustered-dist/src/assemble/README.txt +++ b/clustered/clustered-dist/src/assemble/README.txt @@ -1,11 +1,11 @@ -Ehcache 3.1 +Ehcache 3.2 ========================= -Welcome to version 3.1 of Ehcache with Terracotta-based distributed caching. +Welcome to version 3.2 of Ehcache with Terracotta-based distributed caching. Contents --------------- - Included in this kit are the following: + Included in this kit are the following: README.txt -- This file server -- Directory containing libraries, executables, and other supporting files for the Terracotta Server └─plugins -- Directory containing libraries for the applications installed in the server diff --git a/clustered/common/build.gradle b/clustered/common/build.gradle index 2bd3530ae3..21038bdcdf 100644 --- a/clustered/common/build.gradle +++ b/clustered/common/build.gradle @@ -17,5 +17,11 @@ apply plugin: EhDeploy dependencies { - compile "org.terracotta:entity-server-api:$parent.entityApiVersion" + compile "org.slf4j:slf4j-api:$parent.slf4jVersion" + provided "org.terracotta:entity-common-api:$parent.entityApiVersion" + provided "org.terracotta:runnel:$parent.terracottaPlatformVersion" +} + +tasks.withType(JavaCompile) { + options.compilerArgs += ['-Werror'] } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidClientIdException.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidClientIdException.java new file mode 100644 index 0000000000..585a2297b8 --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidClientIdException.java @@ -0,0 +1,32 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.exceptions; + +/** + * Thrown when Active Entity fails to validate the unique client Id sent by client + */ +public class InvalidClientIdException extends ClusterException { + + public InvalidClientIdException(String message) { + super(message); + } + + @Override + public InvalidClientIdException withClientStackTrace() { + return new InvalidClientIdException(this.getMessage()); + } +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerStoreConfigurationException.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerStoreConfigurationException.java index caa513268c..31d4fe05cd 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerStoreConfigurationException.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerStoreConfigurationException.java @@ -26,8 +26,8 @@ public InvalidServerStoreConfigurationException(String message) { super(message); } - public InvalidServerStoreConfigurationException(Throwable cause) { - super(cause); + public InvalidServerStoreConfigurationException(String message, Throwable cause) { + super(message, cause); } private InvalidServerStoreConfigurationException(InvalidServerStoreConfigurationException cause) { diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/UnknownClusterException.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/UnknownClusterException.java new file mode 100644 index 0000000000..da21efeddb --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/UnknownClusterException.java @@ -0,0 +1,33 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.exceptions; + +public class UnknownClusterException extends ClusterException{ + + public UnknownClusterException(String message) { + super(message); + } + + private UnknownClusterException(Throwable throwable) { + super(throwable); + } + + @Override + public ClusterException withClientStackTrace() { + return new UnknownClusterException(this); + } +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/lock/LockMessaging.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/lock/LockMessaging.java index 7e9414569b..544edee519 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/lock/LockMessaging.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/lock/LockMessaging.java @@ -20,7 +20,6 @@ import org.terracotta.entity.EntityResponse; import org.terracotta.entity.MessageCodec; import org.terracotta.entity.MessageCodecException; -import org.terracotta.entity.SyncMessageCodec; public class LockMessaging { @@ -60,26 +59,10 @@ public LockTransition decodeResponse(byte[] bytes) throws MessageCodecException } }; - private static final SyncMessageCodec SYNC_CODEC = new SyncMessageCodec() { - @Override - public byte[] encode(int i, LockOperation message) throws MessageCodecException { - throw new AssertionError(); - } - - @Override - public LockOperation decode(int i, byte[] bytes) throws MessageCodecException { - throw new AssertionError(); - } - }; - public static MessageCodec codec() { return CODEC; } - public static SyncMessageCodec syncCodec() { - return SYNC_CODEC; - } - public static LockOperation tryLock(HoldType type) { return new LockOperation(Operation.TRY_ACQUIRE, type); } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java index b21d9a8a5a..e5b907272a 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java @@ -19,93 +19,73 @@ import org.ehcache.clustered.common.internal.store.Chain; import org.ehcache.clustered.common.internal.store.Element; import org.ehcache.clustered.common.internal.store.SequencedElement; +import org.ehcache.clustered.common.internal.store.Util; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.StructBuilder; +import org.terracotta.runnel.decoding.StructArrayDecoder; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.StructArrayEncoder; +import org.terracotta.runnel.encoding.StructEncoder; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; -import static org.ehcache.clustered.common.internal.store.Util.getElement; -import static org.ehcache.clustered.common.internal.store.Util.getChain; +public class ChainCodec { -class ChainCodec { + private static final Struct ELEMENT_STRUCT = StructBuilder.newStructBuilder() + .int64("sequence", 10) + .byteBuffer("payload", 20) + .build(); - private static final byte NON_SEQUENCED_CHAIN = 0; - private static final byte SEQUENCED_CHAIN = 1; - private static final byte SEQ_NUM_OFFSET = 8; - private static final byte ELEMENT_PAYLOAD_OFFSET = 4; + public static final Struct CHAIN_STRUCT = StructBuilder.newStructBuilder() + .structs("elements", 10, ELEMENT_STRUCT) + .build(); - //TODO: optimize too many bytebuffer allocation public byte[] encode(Chain chain) { - ByteBuffer msg = null; - boolean firstIteration = true ; + StructEncoder encoder = CHAIN_STRUCT.encoder(); + + encode(encoder, chain); + + ByteBuffer byteBuffer = encoder.encode(); + return byteBuffer.array(); + } + + public void encode(StructEncoder encoder, Chain chain) { + StructArrayEncoder> elementsEncoder = encoder.structs("elements"); for (Element element : chain) { - if (firstIteration) { - firstIteration = false; - ByteBuffer buffer = ByteBuffer.allocate(1); - if (element instanceof SequencedElement) { - buffer.put(SEQUENCED_CHAIN); - } else { - buffer.put(NON_SEQUENCED_CHAIN); - } - buffer.flip(); - msg = combine(buffer, encodeElement(element)); - continue; + if (element instanceof SequencedElement) { + elementsEncoder.int64("sequence", ((SequencedElement) element).getSequenceNumber()); } - if (msg == null) { - throw new IllegalArgumentException("Message cannot be null"); - } - msg = combine(msg, encodeElement(element)); + elementsEncoder.byteBuffer("payload", element.getPayload()); + elementsEncoder.next(); } - return msg != null ? msg.array() : new byte[0]; } public Chain decode(byte[] payload) { + StructDecoder decoder = CHAIN_STRUCT.decoder(ByteBuffer.wrap(payload)); + return decode(decoder); + } + + public Chain decode(StructDecoder decoder) { + StructArrayDecoder> elementsDecoder = decoder.structs("elements"); + final List elements = new ArrayList(); - if (payload.length != 0) { - ByteBuffer buffer = ByteBuffer.wrap(payload); - boolean isSequenced = buffer.get() == 1; - if (isSequenced) { - while (buffer.hasRemaining()) { - long sequence = buffer.getLong(); - elements.add(getElement(sequence, getElementPayLoad(buffer))); - } + for (int i = 0; i < elementsDecoder.length(); i++) { + Long sequence = elementsDecoder.int64("sequence"); + ByteBuffer byteBuffer = elementsDecoder.byteBuffer("payload"); + Element element; + if (sequence != null) { + element = Util.getElement(sequence, byteBuffer); } else { - while (buffer.hasRemaining()) { - elements.add(getElement(getElementPayLoad(buffer))); - } + element = Util.getElement(byteBuffer); } + elements.add(element); + elementsDecoder.next(); } - return getChain(elements); - } - - private static ByteBuffer combine(ByteBuffer buffer1, ByteBuffer buffer2) { - ByteBuffer byteBuffer = ByteBuffer.allocate(buffer1.remaining() + buffer2.remaining()); - byteBuffer.put(buffer1); - byteBuffer.put(buffer2); - byteBuffer.flip(); - return byteBuffer; - } - private static ByteBuffer encodeElement(Element element) { - ByteBuffer buffer = null; - if (element instanceof SequencedElement) { - buffer = ByteBuffer.allocate(SEQ_NUM_OFFSET + ELEMENT_PAYLOAD_OFFSET + element.getPayload().remaining()); - buffer.putLong(((SequencedElement)element).getSequenceNumber()); - } else { - buffer = ByteBuffer.allocate(ELEMENT_PAYLOAD_OFFSET + element.getPayload().remaining()); - } - buffer.putInt(element.getPayload().remaining()); - buffer.put(element.getPayload()); - buffer.flip(); - return buffer; - } + elementsDecoder.end(); - private static ByteBuffer getElementPayLoad(ByteBuffer buffer) { - int payloadSize = buffer.getInt(); - buffer.limit(buffer.position() + payloadSize); - ByteBuffer elementPayload = buffer.slice(); - buffer.position(buffer.limit()); - buffer.limit(buffer.capacity()); - return elementPayload; + return Util.getChain(elements); } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/CodecUtil.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/CodecUtil.java index 9e80bdd9b8..f066309c18 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/CodecUtil.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/CodecUtil.java @@ -18,7 +18,7 @@ import java.nio.ByteBuffer; -final class CodecUtil { +public final class CodecUtil { private CodecUtil() { } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodec.java new file mode 100644 index 0000000000..941211362e --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodec.java @@ -0,0 +1,201 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.PoolAllocation; +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.terracotta.runnel.EnumMapping; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.StructBuilder; +import org.terracotta.runnel.decoding.Enm; +import org.terracotta.runnel.decoding.PrimitiveDecodingSupport; +import org.terracotta.runnel.decoding.StructArrayDecoder; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.PrimitiveEncodingSupport; +import org.terracotta.runnel.encoding.StructArrayEncoder; +import org.terracotta.runnel.encoding.StructEncoder; + +import java.util.HashMap; +import java.util.Map; + +import static org.terracotta.runnel.EnumMappingBuilder.newEnumMappingBuilder; +import static org.terracotta.runnel.StructBuilder.newStructBuilder; + +/** + * Encodes and decodes configuration objects such as {@link ServerSideConfiguration} and {@link ServerStoreConfiguration}. + *

+ * This class is made extensible and hence must remain public. + */ +@SuppressWarnings("WeakerAccess") +public class CommonConfigCodec implements ConfigCodec { + + private static final String STORE_CONFIG_KEY_TYPE_FIELD = "keyType"; + private static final String STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD = "keySerializerType"; + private static final String STORE_CONFIG_VALUE_TYPE_FIELD = "valueType"; + private static final String STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD = "valueSerializerType"; + private static final String STORE_CONFIG_CONSISTENCY_FIELD = "consistency"; + private static final String POOL_SIZE_FIELD = "poolSize"; + private static final String POOL_RESOURCE_NAME_FIELD = "resourceName"; + private static final String DEFAULT_RESOURCE_FIELD = "defaultResource"; + private static final String POOLS_SUB_STRUCT = "pools"; + private static final String POOL_NAME_FIELD = "poolName"; + + private static final EnumMapping CONSISTENCY_ENUM_MAPPING = newEnumMappingBuilder(Consistency.class) + .mapping(Consistency.EVENTUAL, 1) + .mapping(Consistency.STRONG, 2) + .build(); + + private static final Struct POOLS_STRUCT = newStructBuilder() + .string(POOL_NAME_FIELD, 10) + .int64(POOL_SIZE_FIELD, 20) + .string(POOL_RESOURCE_NAME_FIELD, 30).build(); + + @Override + public InjectTuple injectServerStoreConfiguration(StructBuilder baseBuilder, final int index) { + final StructBuilder structBuilder = baseBuilder.string(STORE_CONFIG_KEY_TYPE_FIELD, index) + .string(STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD, index + 10) + .string(STORE_CONFIG_VALUE_TYPE_FIELD, index + 11) + .string(STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD, index + 15) + .enm(STORE_CONFIG_CONSISTENCY_FIELD, index + 16, CONSISTENCY_ENUM_MAPPING) + .int64(POOL_SIZE_FIELD, index + 20) + .string(POOL_RESOURCE_NAME_FIELD, index + 30); + + return new InjectTuple() { + @Override + public int getLastIndex() { + return index + 30; + } + + @Override + public StructBuilder getUpdatedBuilder() { + return structBuilder; + } + }; + } + + @Override + public InjectTuple injectServerSideConfiguration(StructBuilder baseBuilder, final int index) { + final StructBuilder structBuilder = baseBuilder.string(DEFAULT_RESOURCE_FIELD, index + 10) + .structs(POOLS_SUB_STRUCT, index + 20, POOLS_STRUCT); + + return new InjectTuple() { + @Override + public int getLastIndex() { + return index + 20; + } + + @Override + public StructBuilder getUpdatedBuilder() { + return structBuilder; + } + }; + } + + @Override + public void encodeServerStoreConfiguration(PrimitiveEncodingSupport encoder, ServerStoreConfiguration configuration) { + encoder.string(STORE_CONFIG_KEY_TYPE_FIELD, configuration.getStoredKeyType()) + .string(STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD, configuration.getKeySerializerType()) + .string(STORE_CONFIG_VALUE_TYPE_FIELD, configuration.getStoredValueType()) + .string(STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD, configuration.getValueSerializerType()); + if (configuration.getConsistency() != null) { + encoder.enm(STORE_CONFIG_CONSISTENCY_FIELD, configuration.getConsistency()); + } + + PoolAllocation poolAllocation = configuration.getPoolAllocation(); + if (poolAllocation instanceof PoolAllocation.Dedicated) { + PoolAllocation.Dedicated dedicatedPool = (PoolAllocation.Dedicated) poolAllocation; + encoder.int64(POOL_SIZE_FIELD, dedicatedPool.getSize()); + if (dedicatedPool.getResourceName() != null) { + encoder.string(POOL_RESOURCE_NAME_FIELD, dedicatedPool.getResourceName()); + } + } else if (poolAllocation instanceof PoolAllocation.Shared) { + encoder.string(POOL_RESOURCE_NAME_FIELD, ((PoolAllocation.Shared) poolAllocation).getResourcePoolName()); + } + } + + @Override + public ServerStoreConfiguration decodeServerStoreConfiguration(PrimitiveDecodingSupport decoder) { + String keyType = decoder.string(STORE_CONFIG_KEY_TYPE_FIELD); + String keySerializer = decoder.string(STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD); + String valueType = decoder.string(STORE_CONFIG_VALUE_TYPE_FIELD); + String valueSerializer = decoder.string(STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD); + Enm consistencyEnm = decoder.enm(STORE_CONFIG_CONSISTENCY_FIELD); + Consistency consistency = Consistency.EVENTUAL; + if (consistencyEnm.isValid()) { + consistency = consistencyEnm.get(); + } + Long poolSize = decoder.int64(POOL_SIZE_FIELD); + String poolResource = decoder.string(POOL_RESOURCE_NAME_FIELD); + PoolAllocation poolAllocation = new PoolAllocation.Unknown(); + if (poolSize != null) { + poolAllocation = new PoolAllocation.Dedicated(poolResource, poolSize); + } else if (poolResource != null) { + poolAllocation = new PoolAllocation.Shared(poolResource); + } + return new ServerStoreConfiguration(poolAllocation, keyType, valueType, null, null, keySerializer, valueSerializer, consistency); + } + + @Override + public void encodeServerSideConfiguration(StructEncoder encoder, ServerSideConfiguration configuration) { + if (configuration.getDefaultServerResource() != null) { + encoder.string(DEFAULT_RESOURCE_FIELD, configuration.getDefaultServerResource()); + } + + if (!configuration.getResourcePools().isEmpty()) { + StructArrayEncoder> poolsEncoder = encoder.structs(POOLS_SUB_STRUCT); + for (Map.Entry poolEntry : configuration.getResourcePools().entrySet()) { + poolsEncoder.string(POOL_NAME_FIELD, poolEntry.getKey()) + .int64(POOL_SIZE_FIELD, poolEntry.getValue().getSize()); + if (poolEntry.getValue().getServerResource() != null) { + poolsEncoder.string(POOL_RESOURCE_NAME_FIELD, poolEntry.getValue().getServerResource()); + } + poolsEncoder.next(); + } + poolsEncoder.end(); + } + } + + @Override + public ServerSideConfiguration decodeServerSideConfiguration(StructDecoder decoder) { + String defaultResource = decoder.string(DEFAULT_RESOURCE_FIELD); + + HashMap resourcePools = new HashMap(); + StructArrayDecoder> poolsDecoder = decoder.structs(POOLS_SUB_STRUCT); + if (poolsDecoder != null) { + for (int i = 0; i < poolsDecoder.length(); i++) { + String poolName = poolsDecoder.string(POOL_NAME_FIELD); + Long poolSize = poolsDecoder.int64(POOL_SIZE_FIELD); + String poolResourceName = poolsDecoder.string(POOL_RESOURCE_NAME_FIELD); + if (poolResourceName == null) { + resourcePools.put(poolName, new ServerSideConfiguration.Pool(poolSize)); + } else { + resourcePools.put(poolName, new ServerSideConfiguration.Pool(poolSize, poolResourceName)); + } + poolsDecoder.next(); + } + } + + ServerSideConfiguration serverSideConfiguration; + if (defaultResource == null) { + serverSideConfiguration = new ServerSideConfiguration(resourcePools); + } else { + serverSideConfiguration = new ServerSideConfiguration(defaultResource, resourcePools); + } + return serverSideConfiguration; + } +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ConcurrentEntityMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ConcurrentEntityMessage.java index add54e0ecc..8f339a59c8 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ConcurrentEntityMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ConcurrentEntityMessage.java @@ -30,6 +30,6 @@ public interface ConcurrentEntityMessage extends EntityMessage { * @see org.terracotta.entity.ConcurrencyStrategy#concurrencyKey(EntityMessage) * @return the concurrency key */ - int concurrencyKey(); + long concurrencyKey(); } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ConfigCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ConfigCodec.java new file mode 100644 index 0000000000..677f3393b3 --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ConfigCodec.java @@ -0,0 +1,43 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.terracotta.runnel.StructBuilder; +import org.terracotta.runnel.decoding.PrimitiveDecodingSupport; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.PrimitiveEncodingSupport; +import org.terracotta.runnel.encoding.StructEncoder; + +/** + * Interface that allows extensions to codec. + */ +public interface ConfigCodec { + InjectTuple injectServerSideConfiguration(StructBuilder baseBuilder, int index); + void encodeServerSideConfiguration(StructEncoder encoder, ServerSideConfiguration configuration); + ServerSideConfiguration decodeServerSideConfiguration(StructDecoder decoder); + + InjectTuple injectServerStoreConfiguration(StructBuilder baseBuilder, int index); + void encodeServerStoreConfiguration(PrimitiveEncodingSupport encoder, ServerStoreConfiguration configuration); + ServerStoreConfiguration decodeServerStoreConfiguration(PrimitiveDecodingSupport decoder); + + interface InjectTuple { + int getLastIndex(); + StructBuilder getUpdatedBuilder(); + } +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java index 9010232e48..0bd585dcdc 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java @@ -16,28 +16,36 @@ package org.ehcache.clustered.common.internal.messages; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.terracotta.entity.MessageCodec; import org.terracotta.entity.MessageCodecException; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.decoding.Enm; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage.Type.LIFECYCLE_OP; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage.Type.SERVER_STORE_OP; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage.Type.STATE_REPO_OP; +import java.nio.ByteBuffer; + +import static java.nio.ByteBuffer.wrap; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.EHCACHE_MESSAGE_TYPES_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_INDEX; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isLifecycleMessage; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStateRepoOperationMessage; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStoreOperationMessage; +import static org.terracotta.runnel.StructBuilder.newStructBuilder; public class EhcacheCodec implements MessageCodec { - private static final MessageCodec SERVER_INSTANCE = - new EhcacheCodec(new ServerStoreOpCodec(), new LifeCycleMessageCodec(), new StateRepositoryOpCodec(), new ResponseCodec()); + private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheCodec.class); + + public static final Struct OP_CODE_DECODER = newStructBuilder().enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING).build(); private final ServerStoreOpCodec serverStoreOpCodec; private final LifeCycleMessageCodec lifeCycleMessageCodec; private final StateRepositoryOpCodec stateRepositoryOpCodec; private final ResponseCodec responseCodec; - public static MessageCodec messageCodec() { - return SERVER_INSTANCE; - } - - EhcacheCodec(ServerStoreOpCodec serverStoreOpCodec, LifeCycleMessageCodec lifeCycleMessageCodec, + public EhcacheCodec(ServerStoreOpCodec serverStoreOpCodec, LifeCycleMessageCodec lifeCycleMessageCodec, StateRepositoryOpCodec stateRepositoryOpCodec, ResponseCodec responseCodec) { this.serverStoreOpCodec = serverStoreOpCodec; this.lifeCycleMessageCodec = lifeCycleMessageCodec; @@ -47,29 +55,48 @@ public static MessageCodec messageC @Override public byte[] encodeMessage(EhcacheEntityMessage message) { - switch (message.getType()) { - case LIFECYCLE_OP: - return lifeCycleMessageCodec.encode((LifecycleMessage)message); - case SERVER_STORE_OP: - return serverStoreOpCodec.encode((ServerStoreOpMessage) message); - case STATE_REPO_OP: - return stateRepositoryOpCodec.encode((StateRepositoryOpMessage) message); - default: - throw new IllegalArgumentException("Undefined message type: " + message.getType()); + if (!(message instanceof EhcacheOperationMessage)) { + throw new AssertionError("Unexpected message type " + message.getClass()); + } + EhcacheOperationMessage operationMessage = (EhcacheOperationMessage) message; + if (isLifecycleMessage(operationMessage.getMessageType())) { + return lifeCycleMessageCodec.encode((LifecycleMessage) operationMessage); + } else if (isStoreOperationMessage(operationMessage.getMessageType())) { + return serverStoreOpCodec.encode((ServerStoreOpMessage) operationMessage); + } else if (isStateRepoOperationMessage(operationMessage.getMessageType())) { + return stateRepositoryOpCodec.encode((StateRepositoryOpMessage) operationMessage); } + throw new AssertionError("Unknown message type: " + operationMessage.getMessageType()); } @Override public EhcacheEntityMessage decodeMessage(byte[] payload) throws MessageCodecException { - byte opCode = payload[0]; - if (opCode <= LIFECYCLE_OP.getCode()) { - return lifeCycleMessageCodec.decode(payload); - } else if (opCode <= SERVER_STORE_OP.getCode()) { - return serverStoreOpCodec.decode(payload); - } else if (opCode <= STATE_REPO_OP.getCode()) { - return stateRepositoryOpCodec.decode(payload); + ByteBuffer byteBuffer = wrap(payload); + Enm opCodeEnm = OP_CODE_DECODER.decoder(byteBuffer).enm("opCode"); + + if (!opCodeEnm.isFound()) { + throw new AssertionError("Got a message without an opCode"); + } + if (!opCodeEnm.isValid()) { + LOGGER.warn("Received message with unknown operation code - more recent version at the other end?"); + return null; + } + + byteBuffer.rewind(); + + EhcacheMessageType opCode = opCodeEnm.get(); + return decodeMessage(byteBuffer, opCode); + } + + public EhcacheEntityMessage decodeMessage(ByteBuffer byteBuffer, EhcacheMessageType opCode) { + if (isLifecycleMessage(opCode)) { + return lifeCycleMessageCodec.decode(opCode, byteBuffer); + } else if (isStoreOperationMessage(opCode)) { + return serverStoreOpCodec.decode(opCode, byteBuffer); + } else if (isStateRepoOperationMessage(opCode)) { + return stateRepositoryOpCodec.decode(opCode, byteBuffer); } else { - throw new UnsupportedOperationException("Undefined message code: " + opCode); + throw new UnsupportedOperationException("Unsupported message code: " + opCode); } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java index 88daee109e..0863aaa51b 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java @@ -18,48 +18,19 @@ import org.terracotta.entity.EntityMessage; +import java.util.UUID; + /** * Defines messages for interactions with an {@code EhcacheActiveEntity}. */ public abstract class EhcacheEntityMessage implements EntityMessage { - /** - * These types represent the top level Ehcache entity message types. - * Each of these top level types can have subtypes of messages. - * The byte code values represents the upper bound of the subtypes messages' byte values if there are any. - */ - public enum Type { - LIFECYCLE_OP((byte) 10), - SERVER_STORE_OP((byte) 20), - STATE_REPO_OP((byte) 30), - ; - - private final byte code; - - Type(byte code) { - this.code = code; - } - - public byte getCode() { - return this.code; - } + static final long NOT_REPLICATED = -1; - public static Type toType(byte code) { - for (Type type: Type.values()) { - if(type.getCode() == code) { - return type; - } - } - throw new IllegalArgumentException("Invalid message type code: " + code); - } - } + public abstract void setId(long id); - public abstract Type getType(); + public abstract long getId(); - public abstract byte getOpCode(); + public abstract UUID getClientId(); - @Override - public String toString() { - return getType().toString(); - } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityResponse.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityResponse.java index 572679bd50..f01605b88e 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityResponse.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityResponse.java @@ -23,55 +23,7 @@ public abstract class EhcacheEntityResponse implements EntityResponse { - public enum Type { - SUCCESS((byte) 0), - FAILURE((byte) 1), - GET_RESPONSE((byte) 2), - HASH_INVALIDATION_DONE((byte) 3), - ALL_INVALIDATION_DONE((byte) 4), - CLIENT_INVALIDATE_HASH((byte) 5), - CLIENT_INVALIDATE_ALL((byte) 6), - SERVER_INVALIDATE_HASH((byte) 7), - MAP_VALUE((byte) 8), - ; - - private final byte opCode; - - Type(byte opCode) { - this.opCode = opCode; - } - - public byte getOpCode() { - return this.opCode; - } - - public static Type responseType(byte opCode) { - switch (opCode) { - case 0: - return SUCCESS; - case 1: - return FAILURE; - case 2: - return GET_RESPONSE; - case 3: - return HASH_INVALIDATION_DONE; - case 4: - return ALL_INVALIDATION_DONE; - case 5: - return CLIENT_INVALIDATE_HASH; - case 6: - return CLIENT_INVALIDATE_ALL; - case 7: - return SERVER_INVALIDATE_HASH; - case 8: - return MAP_VALUE; - default: - throw new IllegalArgumentException("Store operation not defined for : " + opCode); - } - } - } - - public abstract Type getType(); + public abstract EhcacheResponseType getResponseType(); public static class Success extends EhcacheEntityResponse { @@ -82,10 +34,9 @@ private Success() { } @Override - public Type getType() { - return Type.SUCCESS; + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.SUCCESS; } - } public static class Failure extends EhcacheEntityResponse { @@ -96,15 +47,14 @@ public static class Failure extends EhcacheEntityResponse { this.cause = cause; } - @Override - public Type getType() { - return Type.FAILURE; - } - public ClusterException getCause() { return cause; } + @Override + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.FAILURE; + } } public static class GetResponse extends EhcacheEntityResponse { @@ -115,15 +65,14 @@ public static class GetResponse extends EhcacheEntityResponse { this.chain = chain; } - @Override - public Type getType() { - return Type.GET_RESPONSE; - } - public Chain getChain() { return chain; } + @Override + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.GET_RESPONSE; + } } public static HashInvalidationDone hashInvalidationDone(String cacheId, long key) { @@ -148,10 +97,9 @@ public long getKey() { } @Override - public Type getType() { - return Type.HASH_INVALIDATION_DONE; + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.HASH_INVALIDATION_DONE; } - } public static AllInvalidationDone allInvalidationDone(String cacheId) { @@ -170,10 +118,9 @@ public String getCacheId() { } @Override - public Type getType() { - return Type.ALL_INVALIDATION_DONE; + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.ALL_INVALIDATION_DONE; } - } public static ServerInvalidateHash serverInvalidateHash(String cacheId, long key) { @@ -198,8 +145,8 @@ public long getKey() { } @Override - public Type getType() { - return Type.SERVER_INVALIDATE_HASH; + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.SERVER_INVALIDATE_HASH; } } @@ -231,8 +178,8 @@ public int getInvalidationId() { } @Override - public Type getType() { - return Type.CLIENT_INVALIDATE_HASH; + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.CLIENT_INVALIDATE_HASH; } } @@ -258,8 +205,8 @@ public int getInvalidationId() { } @Override - public Type getType() { - return Type.CLIENT_INVALIDATE_ALL; + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.CLIENT_INVALIDATE_ALL; } } @@ -275,14 +222,14 @@ public MapValue(Object value) { this.value = value; } - @Override - public Type getType() { - return Type.MAP_VALUE; - } - public Object getValue() { return this.value; } + + @Override + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.MAP_VALUE; + } } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheMessageType.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheMessageType.java new file mode 100644 index 0000000000..47e85aa7dd --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheMessageType.java @@ -0,0 +1,109 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.terracotta.runnel.EnumMapping; + +import java.util.EnumSet; + +import static java.util.EnumSet.of; +import static org.terracotta.runnel.EnumMappingBuilder.newEnumMappingBuilder; + +/** + * EhcacheMessageType + * + * Whenever you edit this, you must think about enum mapping and helper methods + */ +public enum EhcacheMessageType { + // Lifecycle messages + CONFIGURE, + VALIDATE, + CREATE_SERVER_STORE, + VALIDATE_SERVER_STORE, + RELEASE_SERVER_STORE, + DESTROY_SERVER_STORE, + + // ServerStore operation messages + GET_AND_APPEND, + APPEND, + REPLACE, + CLIENT_INVALIDATION_ACK, + CLEAR, + GET_STORE, + + // StateRepository operation messages + GET_STATE_REPO, + PUT_IF_ABSENT, + ENTRY_SET, + + // Passive replication messages + CHAIN_REPLICATION_OP, + CLIENT_ID_TRACK_OP, + CLEAR_INVALIDATION_COMPLETE, + INVALIDATION_COMPLETE, + CREATE_SERVER_STORE_REPLICATION, + DESTROY_SERVER_STORE_REPLICATION; + + public static final String MESSAGE_TYPE_FIELD_NAME = "opCode"; + public static final int MESSAGE_TYPE_FIELD_INDEX = 10; + public static final EnumMapping EHCACHE_MESSAGE_TYPES_ENUM_MAPPING = newEnumMappingBuilder(EhcacheMessageType.class) + .mapping(CONFIGURE, 1) + .mapping(VALIDATE, 2) + .mapping(CREATE_SERVER_STORE, 3) + .mapping(VALIDATE_SERVER_STORE, 4) + .mapping(RELEASE_SERVER_STORE, 5) + .mapping(DESTROY_SERVER_STORE, 6) + + .mapping(GET_AND_APPEND, 21) + .mapping(APPEND, 22) + .mapping(REPLACE, 23) + .mapping(CLIENT_INVALIDATION_ACK, 24) + .mapping(CLEAR, 25) + .mapping(GET_STORE, 26) + + .mapping(GET_STATE_REPO, 41) + .mapping(PUT_IF_ABSENT, 42) + .mapping(ENTRY_SET, 43) + + .mapping(CHAIN_REPLICATION_OP, 61) + .mapping(CLIENT_ID_TRACK_OP, 62) + .mapping(CLEAR_INVALIDATION_COMPLETE, 63) + .mapping(INVALIDATION_COMPLETE, 64) + .mapping(CREATE_SERVER_STORE_REPLICATION, 65) + .mapping(DESTROY_SERVER_STORE_REPLICATION, 66) + .build(); + + public static final EnumSet LIFECYCLE_MESSAGES = of(CONFIGURE, VALIDATE, CREATE_SERVER_STORE, VALIDATE_SERVER_STORE, RELEASE_SERVER_STORE, DESTROY_SERVER_STORE); + public static boolean isLifecycleMessage(EhcacheMessageType value) { + return LIFECYCLE_MESSAGES.contains(value); + } + + public static final EnumSet STORE_OPERATION_MESSAGES = of(GET_AND_APPEND, APPEND, REPLACE, CLIENT_INVALIDATION_ACK, CLEAR, GET_STORE); + public static boolean isStoreOperationMessage(EhcacheMessageType value) { + return STORE_OPERATION_MESSAGES.contains(value); + } + + public static final EnumSet STATE_REPO_OPERATION_MESSAGES = of(GET_STATE_REPO, PUT_IF_ABSENT, ENTRY_SET); + public static boolean isStateRepoOperationMessage(EhcacheMessageType value) { + return STATE_REPO_OPERATION_MESSAGES.contains(value); + } + + public static final EnumSet PASSIVE_REPLICATION_MESSAGES = of(CHAIN_REPLICATION_OP, CLIENT_ID_TRACK_OP, CLEAR_INVALIDATION_COMPLETE, INVALIDATION_COMPLETE, CREATE_SERVER_STORE_REPLICATION, DESTROY_SERVER_STORE_REPLICATION); + public static boolean isPassiveReplicationMessage(EhcacheMessageType value) { + return PASSIVE_REPLICATION_MESSAGES.contains(value); + } +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheOperationMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheOperationMessage.java new file mode 100644 index 0000000000..c2e2893912 --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheOperationMessage.java @@ -0,0 +1,30 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +/** + * EhcacheOperationMessage + */ +public abstract class EhcacheOperationMessage extends EhcacheEntityMessage { + + public abstract EhcacheMessageType getMessageType(); + + @Override + public String toString() { + return getMessageType().toString(); + } +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheResponseType.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheResponseType.java new file mode 100644 index 0000000000..f33298f731 --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheResponseType.java @@ -0,0 +1,51 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.terracotta.runnel.EnumMapping; + +import static org.terracotta.runnel.EnumMappingBuilder.newEnumMappingBuilder; + +/** + * EhcacheResponseType + */ +public enum EhcacheResponseType { + SUCCESS, + FAILURE, + GET_RESPONSE, + HASH_INVALIDATION_DONE, + CLIENT_INVALIDATE_HASH, + CLIENT_INVALIDATE_ALL, + SERVER_INVALIDATE_HASH, + MAP_VALUE, + ALL_INVALIDATION_DONE; + + + public static final String RESPONSE_TYPE_FIELD_NAME = "opCode"; + public static final int RESPONSE_TYPE_FIELD_INDEX = 10; + public static final EnumMapping EHCACHE_RESPONSE_TYPES_ENUM_MAPPING = newEnumMappingBuilder(EhcacheResponseType.class) + .mapping(EhcacheResponseType.SUCCESS, 80) + .mapping(EhcacheResponseType.FAILURE, 81) + .mapping(EhcacheResponseType.GET_RESPONSE, 82) + .mapping(EhcacheResponseType.HASH_INVALIDATION_DONE, 83) + .mapping(EhcacheResponseType.ALL_INVALIDATION_DONE, 84) + .mapping(EhcacheResponseType.CLIENT_INVALIDATE_HASH, 85) + .mapping(EhcacheResponseType.CLIENT_INVALIDATE_ALL, 86) + .mapping(EhcacheResponseType.SERVER_INVALIDATE_HASH, 87) + .mapping(EhcacheResponseType.MAP_VALUE, 88) + .build(); +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ExceptionCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ExceptionCodec.java new file mode 100644 index 0000000000..8618f6ef34 --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ExceptionCodec.java @@ -0,0 +1,120 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.internal.exceptions.ClusterException; +import org.ehcache.clustered.common.internal.exceptions.UnknownClusterException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.StructBuilder; +import org.terracotta.runnel.decoding.StructArrayDecoder; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.StructArrayEncoder; +import org.terracotta.runnel.encoding.StructEncoder; + +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; + +class ExceptionCodec { + + private static final Logger LOGGER = LoggerFactory.getLogger(ExceptionCodec.class); + + private static final String DECLARING_CLASS_FIELD = "declaringClass"; + private static final String METHOD_NAME_FIELD = "methodName"; + private static final String FILE_NAME_FIELD = "fileName"; + private static final String LINE_NUM_FIELD = "lineNumber"; + private static final String FQCN_FIELD = "fqcn"; + private static final String MESSAGE_FIELD = "message"; + private static final String STACKTRACE_ELEMENTS_FIELD = "stacktraceElements"; + + private static final Struct STE_STRUCT = StructBuilder.newStructBuilder() + .string(DECLARING_CLASS_FIELD, 10) + .string(METHOD_NAME_FIELD, 20) + .string(FILE_NAME_FIELD, 30) + .int32(LINE_NUM_FIELD, 40) + .build(); + + static final Struct EXCEPTION_STRUCT = StructBuilder.newStructBuilder() + .string(FQCN_FIELD, 10) + .string(MESSAGE_FIELD, 20) + .structs(STACKTRACE_ELEMENTS_FIELD, 30, STE_STRUCT) + .build(); + + public void encode(StructEncoder> encoder, ClusterException exception) { + encoder.string(FQCN_FIELD, exception.getClass().getCanonicalName()); + encoder.string(MESSAGE_FIELD, exception.getMessage()); + StructArrayEncoder>> arrayEncoder = encoder.structs(STACKTRACE_ELEMENTS_FIELD); + for (StackTraceElement stackTraceElement : exception.getStackTrace()) { + arrayEncoder.string(DECLARING_CLASS_FIELD, stackTraceElement.getClassName()); + arrayEncoder.string(METHOD_NAME_FIELD, stackTraceElement.getMethodName()); + if (stackTraceElement.getFileName() != null) { + arrayEncoder.string(FILE_NAME_FIELD, stackTraceElement.getFileName()); + } + arrayEncoder.int32(LINE_NUM_FIELD, stackTraceElement.getLineNumber()); + arrayEncoder.next(); + } + arrayEncoder.end(); + } + + public ClusterException decode(StructDecoder> decoder) { + String exceptionClassName = decoder.string(FQCN_FIELD); + String message = decoder.string(MESSAGE_FIELD); + StructArrayDecoder>> arrayDecoder = decoder.structs(STACKTRACE_ELEMENTS_FIELD); + StackTraceElement[] stackTraceElements = new StackTraceElement[arrayDecoder.length()]; + for (int i = 0; i < arrayDecoder.length(); i++) { + stackTraceElements[i] = new StackTraceElement(arrayDecoder.string(DECLARING_CLASS_FIELD), arrayDecoder.string(METHOD_NAME_FIELD), arrayDecoder + .string(FILE_NAME_FIELD), arrayDecoder.int32(LINE_NUM_FIELD)); + arrayDecoder.next(); + } + arrayDecoder.end(); + Class clazz = null; + ClusterException exception = null; + try { + clazz = Class.forName(exceptionClassName); + } catch (ClassNotFoundException e) { + LOGGER.error("Exception type not found", e); + } + exception = getClusterException(message, clazz); + if (exception == null) { + exception = new UnknownClusterException(message); + } + exception.setStackTrace(stackTraceElements); + return exception; + } + + @SuppressWarnings("unchecked") + private ClusterException getClusterException(String message, Class clazz) { + ClusterException exception = null; + if (clazz != null) { + try { + Constructor declaredConstructor = clazz.getDeclaredConstructor(String.class); + exception = (ClusterException)declaredConstructor.newInstance(message); + } catch (NoSuchMethodException e) { + LOGGER.error("Failed to instantiate exception object.", e); + } catch (IllegalAccessException e) { + LOGGER.error("Failed to instantiate exception object.", e); + } catch (InstantiationException e) { + LOGGER.error("Failed to instantiate exception object.", e); + } catch (InvocationTargetException e) { + LOGGER.error("Failed to instantiate exception object.", e); + } + } + return exception; + } + +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java index 4c98eae64f..ca0637544c 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java @@ -16,33 +16,260 @@ package org.ehcache.clustered.common.internal.messages; -import org.ehcache.clustered.common.internal.store.Util; +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.StructBuilder; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.StructEncoder; import java.nio.ByteBuffer; +import java.util.UUID; -class LifeCycleMessageCodec { +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.EHCACHE_MESSAGE_TYPES_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_INDEX; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.LSB_UUID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSB_UUID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSG_ID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; +import static org.terracotta.runnel.StructBuilder.newStructBuilder; - private static final byte OPCODE_SIZE = 1; +public class LifeCycleMessageCodec { + + private static final String CONFIG_PRESENT_FIELD = "configPresent"; + + private final StructBuilder CONFIGURE_MESSAGE_STRUCT_BUILDER_PREFIX = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .bool(CONFIG_PRESENT_FIELD, 30); + private static final int CONFIGURE_MESSAGE_NEXT_INDEX = 40; + + private final StructBuilder CREATE_STORE_MESSAGE_STRUCT_BUILDER_PREFIX = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30); + private static final int CREATE_STORE_NEXT_INDEX = 40; + + private static final Struct DESTROY_STORE_MESSAGE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30) + .build(); + + private final Struct RELEASE_STORE_MESSAGE_STRUCT = DESTROY_STORE_MESSAGE_STRUCT; + + private final Struct configureMessageStruct; + private final Struct validateMessageStruct; + private final Struct createStoreMessageStruct; + private final Struct validateStoreMessageStruct; + + private final MessageCodecUtils messageCodecUtils; + private final ConfigCodec configCodec; + + public LifeCycleMessageCodec(ConfigCodec configCodec) { + this.messageCodecUtils = new MessageCodecUtils(); + this.configCodec = configCodec; + configureMessageStruct = this.configCodec.injectServerSideConfiguration( + CONFIGURE_MESSAGE_STRUCT_BUILDER_PREFIX, CONFIGURE_MESSAGE_NEXT_INDEX).getUpdatedBuilder().build(); + validateMessageStruct = configureMessageStruct; + + createStoreMessageStruct = this.configCodec.injectServerStoreConfiguration( + CREATE_STORE_MESSAGE_STRUCT_BUILDER_PREFIX, CREATE_STORE_NEXT_INDEX).getUpdatedBuilder().build(); + validateStoreMessageStruct = createStoreMessageStruct; + } public byte[] encode(LifecycleMessage message) { - byte[] encodedMsg = Util.marshall(message); - ByteBuffer buffer = ByteBuffer.allocate(OPCODE_SIZE + encodedMsg.length); - buffer.put(message.getOpCode()); - buffer.put(encodedMsg); - return buffer.array(); - } - - public EhcacheEntityMessage decode(byte[] payload) { - ByteBuffer message = ByteBuffer.wrap(payload); - byte[] encodedMsg = new byte[message.capacity() - OPCODE_SIZE]; - byte opCode = message.get(); - if (opCode == EhcacheEntityMessage.Type.LIFECYCLE_OP.getCode()) { - message.get(encodedMsg, 0, encodedMsg.length); - EhcacheEntityMessage entityMessage = (EhcacheEntityMessage) Util.unmarshall(encodedMsg); - return entityMessage; + //For configure message id serves as message creation timestamp + if (message instanceof LifecycleMessage.ConfigureStoreManager) { + message.setId(System.nanoTime()); + } + + switch (message.getMessageType()) { + case CONFIGURE: + return encodeTierManagerConfigureMessage((LifecycleMessage.ConfigureStoreManager) message); + case VALIDATE: + return encodeTierManagerValidateMessage((LifecycleMessage.ValidateStoreManager) message); + case CREATE_SERVER_STORE: + return encodeCreateStoreMessage((LifecycleMessage.CreateServerStore) message); + case VALIDATE_SERVER_STORE: + return encodeValidateStoreMessage((LifecycleMessage.ValidateServerStore) message); + case DESTROY_SERVER_STORE: + return encodeDestroyStoreMessage((LifecycleMessage.DestroyServerStore) message); + case RELEASE_SERVER_STORE: + return encodeReleaseStoreMessage((LifecycleMessage.ReleaseServerStore) message); + default: + throw new IllegalArgumentException("Unknown lifecycle message: " + message.getClass()); + } + } + + private byte[] encodeReleaseStoreMessage(LifecycleMessage.ReleaseServerStore message) { + StructEncoder encoder = RELEASE_STORE_MESSAGE_STRUCT.encoder(); + + messageCodecUtils.encodeMandatoryFields(encoder, message); + encoder.string(SERVER_STORE_NAME_FIELD, message.getName()); + return encoder.encode().array(); + } + + private byte[] encodeDestroyStoreMessage(LifecycleMessage.DestroyServerStore message) { + StructEncoder encoder = DESTROY_STORE_MESSAGE_STRUCT.encoder(); + + messageCodecUtils.encodeMandatoryFields(encoder, message); + encoder.string(SERVER_STORE_NAME_FIELD, message.getName()); + return encoder.encode().array(); + } + + private byte[] encodeCreateStoreMessage(LifecycleMessage.CreateServerStore message) { + StructEncoder encoder = createStoreMessageStruct.encoder(); + return encodeBaseServerStoreMessage(message, encoder); + } + + private byte[] encodeValidateStoreMessage(LifecycleMessage.ValidateServerStore message) { + return encodeBaseServerStoreMessage(message, validateStoreMessageStruct.encoder()); + } + + private byte[] encodeBaseServerStoreMessage(LifecycleMessage.BaseServerStore message, StructEncoder encoder) { + messageCodecUtils.encodeMandatoryFields(encoder, message); + + encoder.string(SERVER_STORE_NAME_FIELD, message.getName()); + configCodec.encodeServerStoreConfiguration(encoder, message.getStoreConfiguration()); + return encoder.encode().array(); + } + + private byte[] encodeTierManagerConfigureMessage(LifecycleMessage.ConfigureStoreManager message) { + return encodeTierManagerCreateOrValidate(message, message.getConfiguration(), configureMessageStruct.encoder()); + } + + private byte[] encodeTierManagerValidateMessage(LifecycleMessage.ValidateStoreManager message) { + return encodeTierManagerCreateOrValidate(message, message.getConfiguration(), validateMessageStruct.encoder()); + } + + private byte[] encodeTierManagerCreateOrValidate(LifecycleMessage message, ServerSideConfiguration config, StructEncoder encoder) { + messageCodecUtils.encodeMandatoryFields(encoder, message); + if (config == null) { + encoder.bool(CONFIG_PRESENT_FIELD, false); } else { - throw new IllegalArgumentException("LifeCycleMessage operation not defined for : " + opCode); + encoder.bool(CONFIG_PRESENT_FIELD, true); + configCodec.encodeServerSideConfiguration(encoder, config); + } + return encoder.encode().array(); + } + + public EhcacheEntityMessage decode(EhcacheMessageType messageType, ByteBuffer messageBuffer) { + + switch (messageType) { + case CONFIGURE: + return decodeConfigureMessage(messageBuffer); + case VALIDATE: + return decodeValidateMessage(messageBuffer); + case CREATE_SERVER_STORE: + return decodeCreateServerStoreMessage(messageBuffer); + case VALIDATE_SERVER_STORE: + return decodeValidateServerStoreMessage(messageBuffer); + case DESTROY_SERVER_STORE: + return decodeDestroyServerStoreMessage(messageBuffer); + case RELEASE_SERVER_STORE: + return decodeReleaseServerStoreMessage(messageBuffer); } + throw new IllegalArgumentException("LifeCycleMessage operation not defined for : " + messageType); + } + + private LifecycleMessage.ReleaseServerStore decodeReleaseServerStoreMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = RELEASE_STORE_MESSAGE_STRUCT.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID cliendId = messageCodecUtils.decodeUUID(decoder); + + String storeName = decoder.string(SERVER_STORE_NAME_FIELD); + + LifecycleMessage.ReleaseServerStore message = new LifecycleMessage.ReleaseServerStore(storeName, cliendId); + message.setId(msgId); + return message; + } + + private LifecycleMessage.DestroyServerStore decodeDestroyServerStoreMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = DESTROY_STORE_MESSAGE_STRUCT.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID cliendId = messageCodecUtils.decodeUUID(decoder); + + String storeName = decoder.string(SERVER_STORE_NAME_FIELD); + + LifecycleMessage.DestroyServerStore message = new LifecycleMessage.DestroyServerStore(storeName, cliendId); + message.setId(msgId); + return message; } + private LifecycleMessage.ValidateServerStore decodeValidateServerStoreMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = validateStoreMessageStruct.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID cliendId = messageCodecUtils.decodeUUID(decoder); + + String storeName = decoder.string(SERVER_STORE_NAME_FIELD); + ServerStoreConfiguration config = configCodec.decodeServerStoreConfiguration(decoder); + + LifecycleMessage.ValidateServerStore message = new LifecycleMessage.ValidateServerStore(storeName, config, cliendId); + message.setId(msgId); + return message; + } + + private LifecycleMessage.CreateServerStore decodeCreateServerStoreMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = createStoreMessageStruct.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID cliendId = messageCodecUtils.decodeUUID(decoder); + + String storeName = decoder.string(SERVER_STORE_NAME_FIELD); + ServerStoreConfiguration config = configCodec.decodeServerStoreConfiguration(decoder); + + LifecycleMessage.CreateServerStore message = new LifecycleMessage.CreateServerStore(storeName, config, cliendId); + message.setId(msgId); + return message; + } + + private LifecycleMessage.ValidateStoreManager decodeValidateMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = validateMessageStruct.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID cliendId = messageCodecUtils.decodeUUID(decoder); + boolean configPresent = decoder.bool(CONFIG_PRESENT_FIELD); + + ServerSideConfiguration config = null; + if (configPresent) { + config = configCodec.decodeServerSideConfiguration(decoder); + } + + + LifecycleMessage.ValidateStoreManager message = new LifecycleMessage.ValidateStoreManager(config, cliendId); + if (msgId != null) { + message.setId(msgId); + } + return message; + } + + private LifecycleMessage.ConfigureStoreManager decodeConfigureMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = configureMessageStruct.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID clientId = messageCodecUtils.decodeUUID(decoder); + boolean configPresent = decoder.bool(CONFIG_PRESENT_FIELD); + + ServerSideConfiguration config = null; + if (configPresent) { + config = configCodec.decodeServerSideConfiguration(decoder); + } + + LifecycleMessage.ConfigureStoreManager message = new LifecycleMessage.ConfigureStoreManager(config, clientId); + if (msgId != null) { + message.setId(msgId); + } + return message; + } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageFactory.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageFactory.java index 2b34a52289..fe4c79ec61 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageFactory.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageFactory.java @@ -19,30 +19,37 @@ import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import java.util.UUID; + public class LifeCycleMessageFactory { - public EhcacheEntityMessage validateStoreManager(ServerSideConfiguration configuration){ - return new LifecycleMessage.ValidateStoreManager(configuration); + private UUID clientId; + + public LifecycleMessage validateStoreManager(ServerSideConfiguration configuration){ + return new LifecycleMessage.ValidateStoreManager(configuration, clientId); } - public EhcacheEntityMessage configureStoreManager(ServerSideConfiguration configuration) { - return new LifecycleMessage.ConfigureStoreManager(configuration); + public LifecycleMessage configureStoreManager(ServerSideConfiguration configuration) { + return new LifecycleMessage.ConfigureStoreManager(configuration, clientId); } - public EhcacheEntityMessage createServerStore(String name, ServerStoreConfiguration serverStoreConfiguration) { - return new LifecycleMessage.CreateServerStore(name, serverStoreConfiguration); + public LifecycleMessage createServerStore(String name, ServerStoreConfiguration serverStoreConfiguration) { + return new LifecycleMessage.CreateServerStore(name, serverStoreConfiguration, clientId); } - public EhcacheEntityMessage validateServerStore(String name, ServerStoreConfiguration serverStoreConfiguration) { - return new LifecycleMessage.ValidateServerStore(name, serverStoreConfiguration); + public LifecycleMessage validateServerStore(String name, ServerStoreConfiguration serverStoreConfiguration) { + return new LifecycleMessage.ValidateServerStore(name, serverStoreConfiguration, clientId); } - public EhcacheEntityMessage releaseServerStore(String name) { - return new LifecycleMessage.ReleaseServerStore(name); + public LifecycleMessage releaseServerStore(String name) { + return new LifecycleMessage.ReleaseServerStore(name, clientId); } - public EhcacheEntityMessage destroyServerStore(String name) { - return new LifecycleMessage.DestroyServerStore(name); + public LifecycleMessage destroyServerStore(String name) { + return new LifecycleMessage.DestroyServerStore(name, clientId); } + public void setClientId(UUID clientId) { + this.clientId = clientId; + } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java index 9638ad5782..5549a9e309 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java @@ -20,33 +20,29 @@ import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import java.io.Serializable; +import java.util.UUID; -public abstract class LifecycleMessage extends EhcacheEntityMessage implements Serializable { +public abstract class LifecycleMessage extends EhcacheOperationMessage implements Serializable { - public enum LifeCycleOp { - CONFIGURE, - VALIDATE, - CREATE_SERVER_STORE, - VALIDATE_SERVER_STORE, - RELEASE_SERVER_STORE, - DESTROY_SERVER_STORE, - } + protected UUID clientId; + protected long id = NOT_REPLICATED; @Override - public byte getOpCode() { - return getType().getCode(); + public UUID getClientId() { + if (clientId == null) { + throw new AssertionError("Client Id cannot be null for lifecycle messages"); + } + return this.clientId; } @Override - public Type getType() { - return Type.LIFECYCLE_OP; + public long getId() { + return this.id; } - public abstract LifeCycleOp operation(); - @Override - public String toString() { - return getType() + "#" + operation(); + public void setId(long id) { + this.id = id; } public static class ValidateStoreManager extends LifecycleMessage { @@ -54,13 +50,14 @@ public static class ValidateStoreManager extends LifecycleMessage { private final ServerSideConfiguration configuration; - ValidateStoreManager(ServerSideConfiguration config) { + ValidateStoreManager(ServerSideConfiguration config, UUID clientId) { this.configuration = config; + this.clientId = clientId; } @Override - public LifeCycleOp operation() { - return LifeCycleOp.VALIDATE; + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.VALIDATE; } public ServerSideConfiguration getConfiguration() { @@ -73,13 +70,14 @@ public static class ConfigureStoreManager extends LifecycleMessage { private final ServerSideConfiguration configuration; - ConfigureStoreManager(ServerSideConfiguration config) { + ConfigureStoreManager(ServerSideConfiguration config, UUID clientId) { this.configuration = config; + this.clientId = clientId; } @Override - public LifeCycleOp operation() { - return LifeCycleOp.CONFIGURE; + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.CONFIGURE; } public ServerSideConfiguration getConfiguration() { @@ -93,9 +91,10 @@ public abstract static class BaseServerStore extends LifecycleMessage { private final String name; private final ServerStoreConfiguration storeConfiguration; - protected BaseServerStore(String name, ServerStoreConfiguration storeConfiguration) { + BaseServerStore(String name, ServerStoreConfiguration storeConfiguration, UUID clientId) { this.name = name; this.storeConfiguration = storeConfiguration; + this.clientId = clientId; } public String getName() { @@ -114,13 +113,13 @@ public ServerStoreConfiguration getStoreConfiguration() { public static class CreateServerStore extends BaseServerStore { private static final long serialVersionUID = -5832725455629624613L; - CreateServerStore(String name, ServerStoreConfiguration storeConfiguration) { - super(name, storeConfiguration); + CreateServerStore(String name, ServerStoreConfiguration storeConfiguration, UUID clientId) { + super(name, storeConfiguration, clientId); } @Override - public LifeCycleOp operation() { - return LifeCycleOp.CREATE_SERVER_STORE; + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.CREATE_SERVER_STORE; } } @@ -130,13 +129,13 @@ public LifeCycleOp operation() { public static class ValidateServerStore extends BaseServerStore { private static final long serialVersionUID = 8762670006846832185L; - ValidateServerStore(String name, ServerStoreConfiguration storeConfiguration) { - super(name, storeConfiguration); + ValidateServerStore(String name, ServerStoreConfiguration storeConfiguration, UUID clientId) { + super(name, storeConfiguration, clientId); } @Override - public LifeCycleOp operation() { - return LifeCycleOp.VALIDATE_SERVER_STORE; + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.VALIDATE_SERVER_STORE; } } @@ -148,13 +147,14 @@ public static class ReleaseServerStore extends LifecycleMessage { private final String name; - ReleaseServerStore(String name) { + ReleaseServerStore(String name, UUID clientId) { this.name = name; + this.clientId = clientId; } @Override - public LifeCycleOp operation() { - return LifeCycleOp.RELEASE_SERVER_STORE; + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.RELEASE_SERVER_STORE; } public String getName() { @@ -170,13 +170,14 @@ public static class DestroyServerStore extends LifecycleMessage { private final String name; - DestroyServerStore(String name) { + DestroyServerStore(String name, UUID clientId) { this.name = name; + this.clientId = clientId; } @Override - public LifeCycleOp operation() { - return LifeCycleOp.DESTROY_SERVER_STORE; + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.DESTROY_SERVER_STORE; } public String getName() { diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java new file mode 100644 index 0000000000..42c1762803 --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java @@ -0,0 +1,54 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.PoolAllocation; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.terracotta.runnel.EnumMapping; +import org.terracotta.runnel.decoding.Enm; +import org.terracotta.runnel.decoding.PrimitiveDecodingSupport; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.PrimitiveEncodingSupport; +import org.terracotta.runnel.encoding.StructEncoder; + +import java.util.UUID; + +import static org.terracotta.runnel.EnumMappingBuilder.newEnumMappingBuilder; + +/** + * MessageCodecUtils + */ +public class MessageCodecUtils { + + public static final String MSG_ID_FIELD = "msgId"; + public static final String LSB_UUID_FIELD = "lsbUUID"; + public static final String MSB_UUID_FIELD = "msbUUID"; + public static final String SERVER_STORE_NAME_FIELD = "serverStoreName"; + public static final String KEY_FIELD = "key"; + + public void encodeMandatoryFields(StructEncoder encoder, EhcacheOperationMessage message) { + encoder.enm(EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) + .int64(MSG_ID_FIELD, message.getId()) + .int64(MSB_UUID_FIELD, message.getClientId().getMostSignificantBits()) + .int64(LSB_UUID_FIELD, message.getClientId().getLeastSignificantBits()); + } + + public UUID decodeUUID(StructDecoder decoder) { + return new UUID(decoder.int64(MSB_UUID_FIELD), decoder.int64(LSB_UUID_FIELD)); + } +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodec.java deleted file mode 100644 index b44bc82820..0000000000 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodec.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.common.internal.messages; - - -import java.nio.ByteBuffer; -import java.util.HashSet; -import java.util.Set; - -public class ReconnectDataCodec { - - private static final byte ENTRY_SIZE = 4; - - public byte[] encode(Set cacheIds, int length) { - ByteBuffer reconnectData = ByteBuffer.allocate(2 * length + cacheIds.size() * ENTRY_SIZE); - for (String cacheId : cacheIds) { - reconnectData.putInt(cacheId.length()); - CodecUtil.putStringAsCharArray(reconnectData, cacheId); - } - - return reconnectData.array(); - } - - public Set decode(byte[] payload) { - Set cacheIds = new HashSet(); - ByteBuffer byteBuffer = ByteBuffer.wrap(payload); - while (byteBuffer.hasRemaining()) { - int cacheIdSize = byteBuffer.getInt(); - cacheIds.add(CodecUtil.getStringFromBuffer(byteBuffer, cacheIdSize)); - } - return cacheIds; - } - -} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessage.java new file mode 100644 index 0000000000..8993de4b5e --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessage.java @@ -0,0 +1,66 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; +import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +public class ReconnectMessage { + + private final UUID clientId; + private final Set caches; + private final ConcurrentMap> hashInvalidationsInProgressPerCache = new ConcurrentHashMap>(); + private final Set cachesWithClearInProgress = Collections.newSetFromMap(new ConcurrentHashMap()); + + public ReconnectMessage(UUID clientId, Set caches) { + if (clientId == null) { + throw new IllegalStateException("ClientID cannot be null"); + } + this.clientId = clientId; + this.caches = new HashSet(caches); + } + + public UUID getClientId() { + return clientId; + } + + public Set getAllCaches() { + return this.caches; + } + + public void addInvalidationsInProgress(String cacheId, Set hashInvalidationsInProgress) { + hashInvalidationsInProgressPerCache.put(cacheId, hashInvalidationsInProgress); + } + + public Set getInvalidationsInProgress(String cacheId) { + Set hashToInvalidate = hashInvalidationsInProgressPerCache.get(cacheId); + return hashToInvalidate == null ? Collections.emptySet() : hashToInvalidate; + } + + public void addClearInProgress(String cacheId) { + cachesWithClearInProgress.add(cacheId); + } + + public boolean isClearInProgress(String cacheId) { + return cachesWithClearInProgress.contains(cacheId); + } + +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodec.java new file mode 100644 index 0000000000..57dd41bf15 --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodec.java @@ -0,0 +1,99 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.internal.ClusteredEhcacheIdentity; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; + +public class ReconnectMessageCodec { + + private static final byte CLIENT_ID_SIZE = 16; + private static final byte ENTRY_SIZE = 4; + private static final byte HASH_SIZE = 8; + private static final byte CLEAR_IN_PROGRESS_STATUS_SIZE = 1; + + public byte[] encode(ReconnectMessage reconnectMessage) { + int totalLength = 0; + Set caches = reconnectMessage.getAllCaches(); + List byteBuffers = new ArrayList(); + for (String cache : caches) { + Set hashToInvalidate = reconnectMessage.getInvalidationsInProgress(cache); + int sizeOfBuffer = 2 * cache.length() + CLEAR_IN_PROGRESS_STATUS_SIZE + hashToInvalidate.size() * HASH_SIZE + 2 * ENTRY_SIZE; + ByteBuffer encodedCache = ByteBuffer.allocate(sizeOfBuffer); + encodedCache.putInt(cache.length()); + CodecUtil.putStringAsCharArray(encodedCache, cache); + if (reconnectMessage.isClearInProgress(cache)) { + encodedCache.put((byte)1); + } else { + encodedCache.put((byte)0); + } + encodedCache.putInt(hashToInvalidate.size()); + for (long hash : hashToInvalidate) { + encodedCache.putLong(hash); + } + encodedCache.flip(); + byteBuffers.add(encodedCache); + totalLength += sizeOfBuffer; + } + ByteBuffer encodedMsg = ByteBuffer.allocate(totalLength + CLIENT_ID_SIZE); + encodedMsg.put(ClusteredEhcacheIdentity.serialize(reconnectMessage.getClientId())); + for (ByteBuffer byteBuffer : byteBuffers) { + encodedMsg.put(byteBuffer); + } + return encodedMsg.array(); + } + + public ReconnectMessage decode(byte[] payload) { + ByteBuffer byteBuffer = ByteBuffer.wrap(payload); + long msb = byteBuffer.getLong(); + long lsb = byteBuffer.getLong(); + + Map> caches = new HashMap>(); + Set clearInProgressCache = new HashSet(); + + while (byteBuffer.hasRemaining()) { + int cacheIdSize = byteBuffer.getInt(); + String cacheId = CodecUtil.getStringFromBuffer(byteBuffer, cacheIdSize); + byte clearInProgress = byteBuffer.get(); + if (clearInProgress == 1) { + clearInProgressCache.add(cacheId); + } + Set hashToInvalidate = new HashSet(); + int numOfHash = byteBuffer.getInt(); + for (int i = 0; i < numOfHash; i++) { + hashToInvalidate.add(byteBuffer.getLong()); + } + caches.put(cacheId, hashToInvalidate); + } + ReconnectMessage reconnectMessage = new ReconnectMessage(new UUID(msb, lsb), caches.keySet()); + for (Map.Entry> cacheEntry : caches.entrySet()) { + if (clearInProgressCache.contains(cacheEntry.getKey())) { + reconnectMessage.addClearInProgress(cacheEntry.getKey()); + } + reconnectMessage.addInvalidationsInProgress(cacheEntry.getKey(), cacheEntry.getValue()); + } + return reconnectMessage; + } +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java index 02453540ca..b191936923 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java @@ -18,147 +18,227 @@ import org.ehcache.clustered.common.internal.exceptions.ClusterException; import org.ehcache.clustered.common.internal.store.Util; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.StructBuilder; +import org.terracotta.runnel.decoding.Enm; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.StructEncoder; +import org.terracotta.runnel.encoding.StructEncoderFunction; import java.nio.ByteBuffer; +import static java.nio.ByteBuffer.wrap; +import static org.ehcache.clustered.common.internal.messages.ChainCodec.CHAIN_STRUCT; import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.AllInvalidationDone; import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.ClientInvalidateAll; import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.ClientInvalidateHash; import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.HashInvalidationDone; import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.ServerInvalidateHash; import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.MapValue; +import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.EHCACHE_RESPONSE_TYPES_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.RESPONSE_TYPE_FIELD_INDEX; +import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.RESPONSE_TYPE_FIELD_NAME; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.KEY_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; -class ResponseCodec { +public class ResponseCodec { - private static final byte OP_CODE_SIZE = 1; + private static final String EXCEPTION_FIELD = "exception"; + private static final String INVALIDATION_ID_FIELD = "invalidationId"; + private static final String CHAIN_FIELD = "chain"; + private static final String MAP_VALUE_FIELD = "mapValue"; + + private final ExceptionCodec exceptionCodec = new ExceptionCodec(); + + private static final Struct SUCCESS_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .build(); + private static final Struct FAILURE_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .struct(EXCEPTION_FIELD, 20, ExceptionCodec.EXCEPTION_STRUCT) + .build(); + private static final Struct GET_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .struct(CHAIN_FIELD, 20, CHAIN_STRUCT) + .build(); + private static final Struct HASH_INVALIDATION_DONE_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .string(SERVER_STORE_NAME_FIELD, 20) + .int64(KEY_FIELD, 30) + .build(); + private static final Struct ALL_INVALIDATION_DONE_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .string(SERVER_STORE_NAME_FIELD, 20) + .build(); + private static final Struct CLIENT_INVALIDATE_HASH_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .string(SERVER_STORE_NAME_FIELD, 20) + .int64(KEY_FIELD, 30) + .int32(INVALIDATION_ID_FIELD, 40) + .build(); + private static final Struct CLIENT_INVALIDATE_ALL_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .string(SERVER_STORE_NAME_FIELD, 20) + .int32(INVALIDATION_ID_FIELD, 30) + .build(); + private static final Struct SERVER_INVALIDATE_HASH_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .string(SERVER_STORE_NAME_FIELD, 20) + .int64(KEY_FIELD, 30) + .build(); + private static final Struct MAP_VALUE_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .byteBuffer(MAP_VALUE_FIELD, 20) + .build(); private final ChainCodec chainCodec; - ResponseCodec() { + public ResponseCodec() { this.chainCodec = new ChainCodec(); } public byte[] encode(EhcacheEntityResponse response) { - switch (response.getType()) { + switch (response.getResponseType()) { case FAILURE: - EhcacheEntityResponse.Failure failure = (EhcacheEntityResponse.Failure)response; - byte[] failureMsg = Util.marshall(failure.getCause()); - ByteBuffer buffer = ByteBuffer.allocate(OP_CODE_SIZE + failureMsg.length); - buffer.put(EhcacheEntityResponse.Type.FAILURE.getOpCode()); - buffer.put(failureMsg); - return buffer.array(); + final EhcacheEntityResponse.Failure failure = (EhcacheEntityResponse.Failure)response; + return FAILURE_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, failure.getResponseType()) + .struct(EXCEPTION_FIELD, new StructEncoderFunction>>() { + @Override + public void encode(StructEncoder> encoder) { + exceptionCodec.encode(encoder, failure.getCause()); + } + }) + .encode().array(); case SUCCESS: - buffer = ByteBuffer.allocate(OP_CODE_SIZE); - buffer.put(EhcacheEntityResponse.Type.SUCCESS.getOpCode()); - return buffer.array(); + return SUCCESS_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, response.getResponseType()) + .encode().array(); case GET_RESPONSE: - EhcacheEntityResponse.GetResponse getResponse = (EhcacheEntityResponse.GetResponse)response; - byte[] encodedChain = chainCodec.encode(getResponse.getChain()); - int chainLen = encodedChain.length; - buffer = ByteBuffer.allocate(OP_CODE_SIZE + chainLen); - buffer.put(EhcacheEntityResponse.Type.GET_RESPONSE.getOpCode()); - buffer.put(encodedChain); - return buffer.array(); + final EhcacheEntityResponse.GetResponse getResponse = (EhcacheEntityResponse.GetResponse)response; + return GET_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, getResponse.getResponseType()) + .struct(CHAIN_FIELD, new StructEncoderFunction>>() { + @Override + public void encode(StructEncoder> encoder) { + chainCodec.encode(encoder, getResponse.getChain()); + } + }) + .encode().array(); case HASH_INVALIDATION_DONE: { HashInvalidationDone hashInvalidationDone = (HashInvalidationDone) response; - buffer = ByteBuffer.allocate(OP_CODE_SIZE + hashInvalidationDone.getCacheId().length() * 2 + 8); - buffer.put(EhcacheEntityResponse.Type.HASH_INVALIDATION_DONE.getOpCode()); - CodecUtil.putStringAsCharArray(buffer, hashInvalidationDone.getCacheId()); - buffer.putLong(hashInvalidationDone.getKey()); - return buffer.array(); + return HASH_INVALIDATION_DONE_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, hashInvalidationDone.getResponseType()) + .string(SERVER_STORE_NAME_FIELD, hashInvalidationDone.getCacheId()) + .int64(KEY_FIELD, hashInvalidationDone.getKey()) + .encode().array(); } case ALL_INVALIDATION_DONE: { AllInvalidationDone allInvalidationDone = (AllInvalidationDone) response; - buffer = ByteBuffer.allocate(OP_CODE_SIZE + allInvalidationDone.getCacheId().length() * 2); - buffer.put(EhcacheEntityResponse.Type.ALL_INVALIDATION_DONE.getOpCode()); - CodecUtil.putStringAsCharArray(buffer, allInvalidationDone.getCacheId()); - return buffer.array(); + return ALL_INVALIDATION_DONE_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, allInvalidationDone.getResponseType()) + .string(SERVER_STORE_NAME_FIELD, allInvalidationDone.getCacheId()) + .encode().array(); } case CLIENT_INVALIDATE_HASH: { ClientInvalidateHash clientInvalidateHash = (ClientInvalidateHash) response; - buffer = ByteBuffer.allocate(OP_CODE_SIZE + clientInvalidateHash.getCacheId().length() * 2 + 12); - buffer.put(EhcacheEntityResponse.Type.CLIENT_INVALIDATE_HASH.getOpCode()); - CodecUtil.putStringAsCharArray(buffer, clientInvalidateHash.getCacheId()); - buffer.putLong(clientInvalidateHash.getKey()); - buffer.putInt(((ClientInvalidateHash) response).getInvalidationId()); - return buffer.array(); + return CLIENT_INVALIDATE_HASH_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, clientInvalidateHash.getResponseType()) + .string(SERVER_STORE_NAME_FIELD, clientInvalidateHash.getCacheId()) + .int64(KEY_FIELD, clientInvalidateHash.getKey()) + .int32(INVALIDATION_ID_FIELD, clientInvalidateHash.getInvalidationId()) + .encode().array(); } case CLIENT_INVALIDATE_ALL: { ClientInvalidateAll clientInvalidateAll = (ClientInvalidateAll) response; - buffer = ByteBuffer.allocate(OP_CODE_SIZE + clientInvalidateAll.getCacheId().length() * 2 + 4); - buffer.put(EhcacheEntityResponse.Type.CLIENT_INVALIDATE_ALL.getOpCode()); - CodecUtil.putStringAsCharArray(buffer, clientInvalidateAll.getCacheId()); - buffer.putInt(((ClientInvalidateAll) response).getInvalidationId()); - return buffer.array(); + return CLIENT_INVALIDATE_ALL_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, clientInvalidateAll.getResponseType()) + .string(SERVER_STORE_NAME_FIELD, clientInvalidateAll.getCacheId()) + .int32(INVALIDATION_ID_FIELD, clientInvalidateAll.getInvalidationId()) + .encode().array(); } case SERVER_INVALIDATE_HASH: { ServerInvalidateHash serverInvalidateHash = (ServerInvalidateHash) response; - buffer = ByteBuffer.allocate(OP_CODE_SIZE + serverInvalidateHash.getCacheId().length() * 2 + 8); - buffer.put(EhcacheEntityResponse.Type.SERVER_INVALIDATE_HASH.getOpCode()); - CodecUtil.putStringAsCharArray(buffer, serverInvalidateHash.getCacheId()); - buffer.putLong(serverInvalidateHash.getKey()); - return buffer.array(); + return SERVER_INVALIDATE_HASH_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, serverInvalidateHash.getResponseType()) + .string(SERVER_STORE_NAME_FIELD, serverInvalidateHash.getCacheId()) + .int64(KEY_FIELD, serverInvalidateHash.getKey()) + .encode().array(); } case MAP_VALUE: { MapValue mapValue = (MapValue) response; byte[] encodedMapValue = Util.marshall(mapValue.getValue()); - buffer = ByteBuffer.allocate(OP_CODE_SIZE + encodedMapValue.length); - buffer.put(EhcacheEntityResponse.Type.MAP_VALUE.getOpCode()); - buffer.put(encodedMapValue); - return buffer.array(); + return MAP_VALUE_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, mapValue.getResponseType()) + .byteBuffer(MAP_VALUE_FIELD, wrap(encodedMapValue)) + .encode().array(); } default: - throw new UnsupportedOperationException("The operation is not supported : " + response.getType()); + throw new UnsupportedOperationException("The operation is not supported : " + response.getResponseType()); } } public EhcacheEntityResponse decode(byte[] payload) { - ByteBuffer buffer = ByteBuffer.wrap(payload); - byte opCode = buffer.get(); - EhcacheEntityResponse.Type type = EhcacheEntityResponse.Type.responseType(opCode); - byte[] payArr = new byte[buffer.remaining()]; - buffer.get(payArr); - switch (type) { + ByteBuffer buffer = wrap(payload); + StructDecoder decoder = SUCCESS_RESPONSE_STRUCT.decoder(buffer); + Enm opCodeEnm = decoder.enm(RESPONSE_TYPE_FIELD_NAME); + + if (!opCodeEnm.isFound()) { + throw new AssertionError("Got a response without an opCode"); + } + if (!opCodeEnm.isValid()) { + // Need to ignore the response here as we do not understand its type - coming from the future? + return null; + } + + EhcacheResponseType opCode = opCodeEnm.get(); + buffer.rewind(); + switch (opCode) { case SUCCESS: return EhcacheEntityResponse.Success.INSTANCE; case FAILURE: - ClusterException exception = (ClusterException)Util.unmarshall(payArr); + decoder = FAILURE_RESPONSE_STRUCT.decoder(buffer); + ClusterException exception = exceptionCodec.decode(decoder.struct(EXCEPTION_FIELD)); return new EhcacheEntityResponse.Failure(exception.withClientStackTrace()); case GET_RESPONSE: - return new EhcacheEntityResponse.GetResponse(chainCodec.decode(payArr)); + decoder = GET_RESPONSE_STRUCT.decoder(buffer); + return new EhcacheEntityResponse.GetResponse(chainCodec.decode(decoder.struct(CHAIN_FIELD))); case HASH_INVALIDATION_DONE: { - String cacheId = ByteBuffer.wrap(payArr, 0, payArr.length - 8).asCharBuffer().toString(); - long key = ByteBuffer.wrap(payArr, payArr.length - 8, 8).getLong(); + decoder = HASH_INVALIDATION_DONE_RESPONSE_STRUCT.decoder(buffer); + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); + long key = decoder.int64(KEY_FIELD); return EhcacheEntityResponse.hashInvalidationDone(cacheId, key); } case ALL_INVALIDATION_DONE: { - String cacheId = ByteBuffer.wrap(payArr).asCharBuffer().toString(); + decoder = ALL_INVALIDATION_DONE_RESPONSE_STRUCT.decoder(buffer); + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); return EhcacheEntityResponse.allInvalidationDone(cacheId); } case CLIENT_INVALIDATE_HASH: { - String cacheId = ByteBuffer.wrap(payArr, 0, payArr.length - 12).asCharBuffer().toString(); - ByteBuffer byteBuffer = ByteBuffer.wrap(payArr, payArr.length - 12, 12); - long key = byteBuffer.getLong(); - int invalidationId = byteBuffer.getInt(); + decoder = CLIENT_INVALIDATE_HASH_RESPONSE_STRUCT.decoder(buffer); + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); + long key = decoder.int64(KEY_FIELD); + int invalidationId = decoder.int32(INVALIDATION_ID_FIELD); return EhcacheEntityResponse.clientInvalidateHash(cacheId, key, invalidationId); } case CLIENT_INVALIDATE_ALL: { - String cacheId = ByteBuffer.wrap(payArr, 0, payArr.length - 4).asCharBuffer().toString(); - ByteBuffer byteBuffer = ByteBuffer.wrap(payArr, payArr.length - 4, 4); - int invalidationId = byteBuffer.getInt(); + decoder = CLIENT_INVALIDATE_ALL_RESPONSE_STRUCT.decoder(buffer); + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); + int invalidationId = decoder.int32(INVALIDATION_ID_FIELD); return EhcacheEntityResponse.clientInvalidateAll(cacheId, invalidationId); } case SERVER_INVALIDATE_HASH: { - String cacheId = ByteBuffer.wrap(payArr, 0, payArr.length - 8).asCharBuffer().toString(); - ByteBuffer byteBuffer = ByteBuffer.wrap(payArr, payArr.length - 8, 8); - long key = byteBuffer.getLong(); + decoder = SERVER_INVALIDATE_HASH_RESPONSE_STRUCT.decoder(buffer); + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); + long key = decoder.int64(KEY_FIELD); return EhcacheEntityResponse.serverInvalidateHash(cacheId, key); } case MAP_VALUE: { - return EhcacheEntityResponse.mapValue(Util.unmarshall(payArr)); + decoder = MAP_VALUE_RESPONSE_STRUCT.decoder(buffer); + return EhcacheEntityResponse.mapValue(Util.unmarshall(decoder.byteBuffer(MAP_VALUE_FIELD))); } default: - throw new UnsupportedOperationException("The operation is not supported with opCode : " + type); + throw new UnsupportedOperationException("The operation is not supported with opCode : " + opCode); } } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreMessageFactory.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreMessageFactory.java index 7b376c1675..710fd86dfa 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreMessageFactory.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreMessageFactory.java @@ -19,37 +19,40 @@ import org.ehcache.clustered.common.internal.store.Chain; import java.nio.ByteBuffer; +import java.util.UUID; public class ServerStoreMessageFactory { private final String cacheId; + private final UUID clientId; - public ServerStoreMessageFactory(String cacheId) { + public ServerStoreMessageFactory(String cacheId, UUID clientId) { this.cacheId = cacheId; + this.clientId = clientId; } - public EhcacheEntityMessage getOperation(long key) { + public ServerStoreOpMessage.GetMessage getOperation(long key) { return new ServerStoreOpMessage.GetMessage(this.cacheId, key); } - public EhcacheEntityMessage getAndAppendOperation(long key, ByteBuffer payload) { - return new ServerStoreOpMessage.GetAndAppendMessage(this.cacheId, key, payload); + public ServerStoreOpMessage.GetAndAppendMessage getAndAppendOperation(long key, ByteBuffer payload) { + return new ServerStoreOpMessage.GetAndAppendMessage(this.cacheId, key, payload, clientId); } - public EhcacheEntityMessage appendOperation(long key, ByteBuffer payload) { - return new ServerStoreOpMessage.AppendMessage(this.cacheId, key, payload); + public ServerStoreOpMessage.AppendMessage appendOperation(long key, ByteBuffer payload) { + return new ServerStoreOpMessage.AppendMessage(this.cacheId, key, payload, clientId); } - public EhcacheEntityMessage replaceAtHeadOperation(long key, Chain expect, Chain update) { - return new ServerStoreOpMessage.ReplaceAtHeadMessage(this.cacheId, key, expect, update); + public ServerStoreOpMessage.ReplaceAtHeadMessage replaceAtHeadOperation(long key, Chain expect, Chain update) { + return new ServerStoreOpMessage.ReplaceAtHeadMessage(this.cacheId, key, expect, update, clientId); } - public EhcacheEntityMessage clientInvalidationAck(int invalidationId) { + public ServerStoreOpMessage.ClientInvalidationAck clientInvalidationAck(int invalidationId) { return new ServerStoreOpMessage.ClientInvalidationAck(this.cacheId, invalidationId); } - public EhcacheEntityMessage clearOperation() { - return new ServerStoreOpMessage.ClearMessage(this.cacheId); + public ServerStoreOpMessage.ClearMessage clearOperation() { + return new ServerStoreOpMessage.ClearMessage(this.cacheId, clientId); } public String getCacheId() { diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java index 67ee534700..e5f5a28fba 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java @@ -18,139 +18,239 @@ import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.AppendMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClearMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClientInvalidationAck; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.GetAndAppendMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.GetMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ReplaceAtHeadMessage; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClientInvalidationAck; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ServerStoreOp; +import org.ehcache.clustered.common.internal.store.Chain; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.StructBuilder; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.StructEncoder; +import org.terracotta.runnel.encoding.StructEncoderFunction; import java.nio.ByteBuffer; +import java.util.UUID; + +import static org.ehcache.clustered.common.internal.messages.ChainCodec.CHAIN_STRUCT; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.EHCACHE_MESSAGE_TYPES_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_INDEX; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.KEY_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.LSB_UUID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSB_UUID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSG_ID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; + +public class ServerStoreOpCodec { + + private static final Struct GET_AND_APPEND_MESSAGE_STRUCT = StructBuilder.newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30) + .int64(KEY_FIELD, 40) + .byteBuffer("payload", 50) + .build(); -class ServerStoreOpCodec { + private static final Struct APPEND_MESSAGE_STRUCT = StructBuilder.newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30) + .int64(KEY_FIELD, 40) + .byteBuffer("payload", 50) + .build(); - private static final byte STORE_OP_CODE_SIZE = 1; - private static final byte CACHE_ID_LEN_SIZE = 4; - private static final byte KEY_SIZE = 8; - private static final byte CHAIN_LEN_SIZE = 4; - private static final byte INVALIDATION_ID_LEN_SIZE = 4; + private static final Struct REPLACE_MESSAGE_STRUCT = StructBuilder.newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30) + .int64(KEY_FIELD, 40) + .struct("expect", 50, CHAIN_STRUCT) + .struct("update", 60, CHAIN_STRUCT) + .build(); + + private static final Struct CLIENT_INVALIDATION_ACK_MESSAGE_STRUCT = StructBuilder.newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .string(SERVER_STORE_NAME_FIELD, 30) + .int32("invalidationId", 40) + .build(); + + private static final Struct CLEAR_MESSAGE_STRUCT = StructBuilder.newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30) + .build(); + + private static final Struct GET_MESSAGE_STRUCT = StructBuilder.newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .string(SERVER_STORE_NAME_FIELD, 30) + .int64(KEY_FIELD, 40) + .build(); private final ChainCodec chainCodec; + private final MessageCodecUtils messageCodecUtils = new MessageCodecUtils(); - ServerStoreOpCodec() { + public ServerStoreOpCodec() { this.chainCodec = new ChainCodec(); } public byte[] encode(ServerStoreOpMessage message) { - // TODO: improve data send over n/w by optimizing cache Id - ByteBuffer encodedMsg; - int cacheIdLen = message.getCacheId().length(); - switch (message.operation()) { - case GET: - GetMessage getMessage = (GetMessage)message; - encodedMsg = ByteBuffer.allocate(STORE_OP_CODE_SIZE + KEY_SIZE + 2 * cacheIdLen); - encodedMsg.put(getMessage.getOpCode()); - encodedMsg.putLong(getMessage.getKey()); - CodecUtil.putStringAsCharArray(encodedMsg, getMessage.getCacheId()); - return encodedMsg.array(); + StructEncoder encoder = null; + + switch (message.getMessageType()) { + case GET_STORE: + GetMessage getMessage = (GetMessage) message; + encoder = GET_MESSAGE_STRUCT.encoder(); + return encoder + .enm(MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) + .int64(MSG_ID_FIELD, message.getId()) + .string(SERVER_STORE_NAME_FIELD, getMessage.getCacheId()) + .int64(KEY_FIELD, getMessage.getKey()) + .encode() + .array(); case APPEND: - AppendMessage appendMessage = (AppendMessage)message; - encodedMsg = ByteBuffer.allocate(STORE_OP_CODE_SIZE + CACHE_ID_LEN_SIZE + KEY_SIZE + 2 * cacheIdLen + appendMessage - .getPayload() - .remaining()); - putCacheIdKeyAndOpCode(encodedMsg, appendMessage.getCacheId(), appendMessage.getKey(), appendMessage.getOpCode()); - encodedMsg.put(appendMessage.getPayload()); - return encodedMsg.array(); + AppendMessage appendMessage = (AppendMessage) message; + encoder = APPEND_MESSAGE_STRUCT.encoder(); + messageCodecUtils.encodeMandatoryFields(encoder, message); + return encoder + .string(SERVER_STORE_NAME_FIELD, appendMessage.getCacheId()) + .int64(KEY_FIELD, appendMessage.getKey()) + .byteBuffer("payload", appendMessage.getPayload()) + .encode() + .array(); case GET_AND_APPEND: - GetAndAppendMessage getAndAppendMessage = (GetAndAppendMessage)message; - encodedMsg = ByteBuffer.allocate(STORE_OP_CODE_SIZE + CACHE_ID_LEN_SIZE + KEY_SIZE + 2 * cacheIdLen + - getAndAppendMessage.getPayload().remaining()); - putCacheIdKeyAndOpCode(encodedMsg, getAndAppendMessage.getCacheId(), getAndAppendMessage.getKey(), getAndAppendMessage.getOpCode()); - encodedMsg.put(getAndAppendMessage.getPayload()); - return encodedMsg.array(); + GetAndAppendMessage getAndAppendMessage = (GetAndAppendMessage) message; + encoder = GET_AND_APPEND_MESSAGE_STRUCT.encoder(); + messageCodecUtils.encodeMandatoryFields(encoder, message); + return encoder + .string(SERVER_STORE_NAME_FIELD, getAndAppendMessage.getCacheId()) + .int64(KEY_FIELD, getAndAppendMessage.getKey()) + .byteBuffer("payload", getAndAppendMessage.getPayload()) + .encode() + .array(); case REPLACE: - ReplaceAtHeadMessage replaceAtHeadMessage = (ReplaceAtHeadMessage)message; - byte[] encodedExpectedChain = chainCodec.encode(replaceAtHeadMessage.getExpect()); - byte[] encodedUpdatedChain = chainCodec.encode(replaceAtHeadMessage.getUpdate()); - encodedMsg = ByteBuffer.allocate(STORE_OP_CODE_SIZE + CACHE_ID_LEN_SIZE + KEY_SIZE + 2 * cacheIdLen + - CHAIN_LEN_SIZE + encodedExpectedChain.length + encodedUpdatedChain.length); - putCacheIdKeyAndOpCode(encodedMsg, replaceAtHeadMessage.getCacheId(), replaceAtHeadMessage.getKey(), replaceAtHeadMessage.getOpCode()); - encodedMsg.putInt(encodedExpectedChain.length); - encodedMsg.put(encodedExpectedChain); - encodedMsg.put(encodedUpdatedChain); - return encodedMsg.array(); + final ReplaceAtHeadMessage replaceAtHeadMessage = (ReplaceAtHeadMessage) message; + encoder = REPLACE_MESSAGE_STRUCT.encoder(); + messageCodecUtils.encodeMandatoryFields(encoder, message); + return encoder + .string(SERVER_STORE_NAME_FIELD, replaceAtHeadMessage.getCacheId()) + .int64(KEY_FIELD, replaceAtHeadMessage.getKey()) + .struct("expect", new StructEncoderFunction>>() { + @Override + public void encode(StructEncoder> encoder) { + Chain expect = replaceAtHeadMessage.getExpect(); + chainCodec.encode(encoder, expect); + } + }) + .struct("update", new StructEncoderFunction>>() { + @Override + public void encode(StructEncoder> encoder) { + Chain update = replaceAtHeadMessage.getUpdate(); + chainCodec.encode(encoder, update); + } + }) + .encode() + .array(); case CLIENT_INVALIDATION_ACK: - ClientInvalidationAck clientInvalidationAck = (ClientInvalidationAck)message; - encodedMsg = ByteBuffer.allocate(STORE_OP_CODE_SIZE + INVALIDATION_ID_LEN_SIZE + 2 * cacheIdLen); - encodedMsg.put(clientInvalidationAck.getOpCode()); - encodedMsg.putInt(clientInvalidationAck.getInvalidationId()); - CodecUtil.putStringAsCharArray(encodedMsg, clientInvalidationAck.getCacheId()); - return encodedMsg.array(); + ClientInvalidationAck clientInvalidationAckMessage = (ClientInvalidationAck) message; + encoder = CLIENT_INVALIDATION_ACK_MESSAGE_STRUCT.encoder(); + return encoder + .enm(MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) + .int64(MSG_ID_FIELD, message.getId()) + .string(SERVER_STORE_NAME_FIELD, clientInvalidationAckMessage.getCacheId()) + .int32("invalidationId", clientInvalidationAckMessage.getInvalidationId()) + .encode() + .array(); case CLEAR: - ClearMessage clearMessage = (ClearMessage)message; - encodedMsg = ByteBuffer.allocate(STORE_OP_CODE_SIZE + 2 * cacheIdLen); - encodedMsg.put(clearMessage.getOpCode()); - CodecUtil.putStringAsCharArray(encodedMsg, clearMessage.getCacheId()); - return encodedMsg.array(); + ClearMessage clearMessage = (ClearMessage) message; + encoder = CLEAR_MESSAGE_STRUCT.encoder(); + messageCodecUtils.encodeMandatoryFields(encoder, message); + return encoder + .string(SERVER_STORE_NAME_FIELD, clearMessage.getCacheId()) + .encode() + .array(); default: - throw new UnsupportedOperationException("This operation is not supported : " + message.operation()); + throw new RuntimeException("Unhandled message operation : " + message.getMessageType()); } } - // This assumes correct allocation and puts extracts common code - private static void putCacheIdKeyAndOpCode(ByteBuffer byteBuffer, String cacheId, long key, byte opcode) { - byteBuffer.put(opcode); - byteBuffer.putInt(cacheId.length()); - CodecUtil.putStringAsCharArray(byteBuffer, cacheId); - byteBuffer.putLong(key); - } - - public EhcacheEntityMessage decode(byte[] payload) { - ByteBuffer msg = ByteBuffer.wrap(payload); - byte opCode = msg.get(); - ServerStoreOp storeOp = ServerStoreOp.getServerStoreOp(opCode); - - long key; - String cacheId; - - switch (storeOp) { - case GET: - key = msg.getLong(); - cacheId = CodecUtil.getStringFromBuffer(msg, msg.remaining() / 2); - return new GetMessage(cacheId, key); - case GET_AND_APPEND: - cacheId = readStringFromBufferWithSize(msg); - key = msg.getLong(); - return new GetAndAppendMessage(cacheId, key, msg.slice().asReadOnlyBuffer()); - case APPEND: - cacheId = readStringFromBufferWithSize(msg); - key = msg.getLong(); - return new AppendMessage(cacheId, key, msg.slice().asReadOnlyBuffer()); - case REPLACE: - cacheId = readStringFromBufferWithSize(msg); - key = msg.getLong(); - int expectChainLen = msg.getInt(); - byte[] encodedExpectChain = new byte[expectChainLen]; - msg.get(encodedExpectChain); - int updateChainLen = msg.remaining(); - byte[] encodedUpdateChain = new byte[updateChainLen]; - msg.get(encodedUpdateChain); - return new ReplaceAtHeadMessage(cacheId, key, chainCodec.decode(encodedExpectChain), - chainCodec.decode(encodedUpdateChain)); - case CLIENT_INVALIDATION_ACK: - int invalidationId = msg.getInt(); - cacheId = CodecUtil.getStringFromBuffer(msg, msg.remaining() / 2); - return new ClientInvalidationAck(cacheId, invalidationId); - case CLEAR: - cacheId = CodecUtil.getStringFromBuffer(msg, msg.remaining() / 2); - return new ClearMessage(cacheId); + public EhcacheEntityMessage decode(EhcacheMessageType opCode, ByteBuffer messageBuffer) { + StructDecoder decoder; + switch (opCode) { + case GET_STORE: { + decoder = GET_MESSAGE_STRUCT.decoder(messageBuffer); + Long msgId = decoder.int64(MSG_ID_FIELD); + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); + Long key = decoder.int64(KEY_FIELD); + GetMessage message = new GetMessage(cacheId, key); + message.setId(msgId); + return message; + } + case GET_AND_APPEND: { + decoder = GET_AND_APPEND_MESSAGE_STRUCT.decoder(messageBuffer); + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID uuid = messageCodecUtils.decodeUUID(decoder); + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); + Long key = decoder.int64(KEY_FIELD); + ByteBuffer payload = decoder.byteBuffer("payload"); + GetAndAppendMessage message = new GetAndAppendMessage(cacheId, key, payload, uuid); + message.setId(msgId); + return message; + } + case APPEND: { + decoder = APPEND_MESSAGE_STRUCT.decoder(messageBuffer); + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID uuid = messageCodecUtils.decodeUUID(decoder); + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); + Long key = decoder.int64(KEY_FIELD); + ByteBuffer payload = decoder.byteBuffer("payload"); + AppendMessage message = new AppendMessage(cacheId, key, payload, uuid); + message.setId(msgId); + return message; + } + case REPLACE: { + decoder = REPLACE_MESSAGE_STRUCT.decoder(messageBuffer); + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID uuid = messageCodecUtils.decodeUUID(decoder); + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); + Long key = decoder.int64(KEY_FIELD); + Chain expect = chainCodec.decode(decoder.struct("expect")); + Chain update = chainCodec.decode(decoder.struct("update")); + ReplaceAtHeadMessage message = new ReplaceAtHeadMessage(cacheId, key, expect, update, uuid); + message.setId(msgId); + return message; + } + case CLIENT_INVALIDATION_ACK: { + decoder = CLIENT_INVALIDATION_ACK_MESSAGE_STRUCT.decoder(messageBuffer); + Long msgId = decoder.int64(MSG_ID_FIELD); + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); + Integer invalidationId = decoder.int32("invalidationId"); + ClientInvalidationAck message = new ClientInvalidationAck(cacheId, invalidationId); + message.setId(msgId); + return message; + } + case CLEAR: { + decoder = CLEAR_MESSAGE_STRUCT.decoder(messageBuffer); + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID uuid = messageCodecUtils.decodeUUID(decoder); + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); + ClearMessage message = new ClearMessage(cacheId, uuid); + message.setId(msgId); + return message; + } default: - throw new UnsupportedOperationException("This operation code is not supported : " + opCode); + throw new RuntimeException("Unhandled message operation : " + opCode); } } - private static String readStringFromBufferWithSize(ByteBuffer buffer) { - int length = buffer.getInt(); - return CodecUtil.getStringFromBuffer(buffer, length); - } - } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java index 44eed62261..05f0534e48 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java @@ -19,47 +19,29 @@ import org.ehcache.clustered.common.internal.store.Chain; import java.nio.ByteBuffer; +import java.util.UUID; -public abstract class ServerStoreOpMessage extends EhcacheEntityMessage implements ConcurrentEntityMessage { - public enum ServerStoreOp { +public abstract class ServerStoreOpMessage extends EhcacheOperationMessage { - GET_AND_APPEND((byte) 11), - APPEND((byte) 12), - REPLACE((byte) 13), - CLIENT_INVALIDATION_ACK((byte) 14), - CLEAR((byte) 15), - GET((byte) 16), - ; + protected UUID clientId; + protected long id = NOT_REPLICATED; - private final byte storeOpCode; - - ServerStoreOp(byte storeOpCode) { - this.storeOpCode = storeOpCode; - } - - public byte getStoreOpCode() { - return this.storeOpCode; + @Override + public UUID getClientId() { + if (clientId == null) { + throw new AssertionError("Client Id is not supported for message type " + this.getMessageType() ); } + return this.clientId; + } - public static ServerStoreOp getServerStoreOp(byte storeOpCode) { - switch (storeOpCode) { - case 11: - return GET_AND_APPEND; - case 12: - return APPEND; - case 13: - return REPLACE; - case 14: - return CLIENT_INVALIDATION_ACK; - case 15: - return CLEAR; - case 16: - return GET; - default: - throw new IllegalArgumentException("Store operation not defined for : " + storeOpCode); - } - } + @Override + public long getId() { + return this.id; + } + @Override + public void setId(long id) { + this.id = id; } private final String cacheId; @@ -72,29 +54,7 @@ public String getCacheId() { return cacheId; } - @Override - public int concurrencyKey() { - return cacheId.hashCode(); - } - - @Override - public Type getType() { - return Type.SERVER_STORE_OP; - } - - public abstract ServerStoreOp operation(); - - @Override - public byte getOpCode() { - return operation().getStoreOpCode(); - } - - @Override - public String toString() { - return getType() + "#" + operation(); - } - - static abstract class KeyBasedServerStoreOpMessage extends ServerStoreOpMessage { + public static abstract class KeyBasedServerStoreOpMessage extends ServerStoreOpMessage implements ConcurrentEntityMessage { private final long key; @@ -108,8 +68,8 @@ public long getKey() { } @Override - public int concurrencyKey() { - return (int) (super.concurrencyKey() + key); + public long concurrencyKey() { + return key; } } @@ -120,8 +80,8 @@ public static class GetMessage extends KeyBasedServerStoreOpMessage { } @Override - public ServerStoreOp operation() { - return ServerStoreOp.GET; + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.GET_STORE; } } @@ -129,14 +89,15 @@ public static class GetAndAppendMessage extends KeyBasedServerStoreOpMessage { private final ByteBuffer payload; - GetAndAppendMessage(String cacheId, long key, ByteBuffer payload) { + GetAndAppendMessage(String cacheId, long key, ByteBuffer payload, UUID clientId) { super(cacheId, key); this.payload = payload; + this.clientId = clientId; } @Override - public ServerStoreOp operation() { - return ServerStoreOp.GET_AND_APPEND; + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.GET_AND_APPEND; } public ByteBuffer getPayload() { @@ -149,14 +110,15 @@ public static class AppendMessage extends KeyBasedServerStoreOpMessage { private final ByteBuffer payload; - AppendMessage(String cacheId, long key, ByteBuffer payload) { + AppendMessage(String cacheId, long key, ByteBuffer payload, UUID clientId) { super(cacheId, key); this.payload = payload; + this.clientId = clientId; } @Override - public ServerStoreOp operation() { - return ServerStoreOp.APPEND; + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.APPEND; } public ByteBuffer getPayload() { @@ -170,15 +132,16 @@ public static class ReplaceAtHeadMessage extends KeyBasedServerStoreOpMessage { private final Chain expect; private final Chain update; - ReplaceAtHeadMessage(String cacheId, long key, Chain expect, Chain update) { + ReplaceAtHeadMessage(String cacheId, long key, Chain expect, Chain update, UUID clientId) { super(cacheId, key); this.expect = expect; this.update = update; + this.clientId = clientId; } @Override - public ServerStoreOp operation() { - return ServerStoreOp.REPLACE; + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.REPLACE; } public Chain getExpect() { @@ -204,20 +167,21 @@ public int getInvalidationId() { } @Override - public ServerStoreOp operation() { - return ServerStoreOp.CLIENT_INVALIDATION_ACK; + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.CLIENT_INVALIDATION_ACK; } } - static class ClearMessage extends ServerStoreOpMessage { + public static class ClearMessage extends ServerStoreOpMessage { - ClearMessage(final String cacheId) { + ClearMessage(String cacheId, UUID clientId) { super(cacheId); + this.clientId = clientId; } @Override - public ServerStoreOp operation() { - return ServerStoreOp.CLEAR; + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.CLEAR; } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryMessageFactory.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryMessageFactory.java index c1ae209398..c03d77cedb 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryMessageFactory.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryMessageFactory.java @@ -16,26 +16,30 @@ package org.ehcache.clustered.common.internal.messages; +import java.util.UUID; + public class StateRepositoryMessageFactory { private final String cacheId; private final String mapId; + private final UUID clientId; - public StateRepositoryMessageFactory(String cacheId, String mapId) { + public StateRepositoryMessageFactory(String cacheId, String mapId, UUID clientId) { this.cacheId = cacheId; this.mapId = mapId; + this.clientId = clientId; } public StateRepositoryOpMessage getMessage(Object key) { - return new StateRepositoryOpMessage.GetMessage(cacheId, mapId, key); + return new StateRepositoryOpMessage.GetMessage(cacheId, mapId, key, clientId); } public StateRepositoryOpMessage putIfAbsentMessage(Object key, Object value) { - return new StateRepositoryOpMessage.PutIfAbsentMessage(cacheId, mapId, key, value); + return new StateRepositoryOpMessage.PutIfAbsentMessage(cacheId, mapId, key, value, clientId); } public StateRepositoryOpMessage entrySetMessage() { - return new StateRepositoryOpMessage.EntrySetMessage(cacheId, mapId); + return new StateRepositoryOpMessage.EntrySetMessage(cacheId, mapId, clientId); } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpCodec.java index 3a198fa3b3..249da64c47 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpCodec.java @@ -17,31 +17,171 @@ package org.ehcache.clustered.common.internal.messages; import org.ehcache.clustered.common.internal.store.Util; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.StructEncoder; import java.nio.ByteBuffer; +import java.util.UUID; + +import static java.nio.ByteBuffer.wrap; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.EHCACHE_MESSAGE_TYPES_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_INDEX; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.KEY_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.LSB_UUID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSB_UUID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSG_ID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; +import static org.terracotta.runnel.StructBuilder.newStructBuilder; public class StateRepositoryOpCodec { - private static final byte OPCODE_SIZE = 1; + private static final String MAP_ID_FIELD = "mapId"; + private static final String VALUE_FIELD = "value"; + + private static final Struct GET_MESSAGE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30) + .string(MAP_ID_FIELD, 35) + .byteBuffer(KEY_FIELD, 40) + .build(); + + private static final Struct PUT_IF_ABSENT_MESSAGE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30) + .string(MAP_ID_FIELD, 35) + .byteBuffer(KEY_FIELD, 40) + .byteBuffer(VALUE_FIELD, 45) + .build(); + + private static final Struct ENTRY_SET_MESSAGE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30) + .string(MAP_ID_FIELD, 35) + .build(); + + private final MessageCodecUtils messageCodecUtils = new MessageCodecUtils(); public byte[] encode(StateRepositoryOpMessage message) { - byte[] encodedMsg = Util.marshall(message); - ByteBuffer buffer = ByteBuffer.allocate(OPCODE_SIZE + encodedMsg.length); - buffer.put(message.getOpCode()); - buffer.put(encodedMsg); - return buffer.array(); - } - - public StateRepositoryOpMessage decode(byte[] payload) { - ByteBuffer message = ByteBuffer.wrap(payload); - byte[] encodedMsg = new byte[message.capacity() - OPCODE_SIZE]; - byte opCode = message.get(); - if (opCode == EhcacheEntityMessage.Type.STATE_REPO_OP.getCode()) { - message.get(encodedMsg, 0, encodedMsg.length); - StateRepositoryOpMessage entityMessage = (StateRepositoryOpMessage) Util.unmarshall(encodedMsg); - return entityMessage; - } else { - throw new UnsupportedOperationException("State repository operation not defined for : " + opCode); + + switch (message.getMessageType()) { + case GET_STATE_REPO: + return encodeGetMessage((StateRepositoryOpMessage.GetMessage) message); + case PUT_IF_ABSENT: + return encodePutIfAbsentMessage((StateRepositoryOpMessage.PutIfAbsentMessage) message); + case ENTRY_SET: + return encodeEntrySetMessage((StateRepositoryOpMessage.EntrySetMessage) message); + default: + throw new IllegalArgumentException("Unsupported StateRepositoryOpMessage " + message.getClass()); } } + + private byte[] encodeEntrySetMessage(StateRepositoryOpMessage.EntrySetMessage message) { + StructEncoder encoder = ENTRY_SET_MESSAGE_STRUCT.encoder(); + + messageCodecUtils.encodeMandatoryFields(encoder, message); + encoder.string(SERVER_STORE_NAME_FIELD, message.getCacheId()); + encoder.string(MAP_ID_FIELD, message.getCacheId()); + + return encoder.encode().array(); + } + + private byte[] encodePutIfAbsentMessage(StateRepositoryOpMessage.PutIfAbsentMessage message) { + StructEncoder encoder = PUT_IF_ABSENT_MESSAGE_STRUCT.encoder(); + + messageCodecUtils.encodeMandatoryFields(encoder, message); + encoder.string(SERVER_STORE_NAME_FIELD, message.getCacheId()); + encoder.string(MAP_ID_FIELD, message.getCacheId()); + // TODO this needs to change - serialization needs to happen in the StateRepo not here, though we need the hashcode for server side comparison. + encoder.byteBuffer(KEY_FIELD, wrap(Util.marshall(message.getKey()))); + encoder.byteBuffer(VALUE_FIELD, wrap(Util.marshall(message.getValue()))); + + return encoder.encode().array(); + } + + private byte[] encodeGetMessage(StateRepositoryOpMessage.GetMessage message) { + StructEncoder encoder = GET_MESSAGE_STRUCT.encoder(); + + messageCodecUtils.encodeMandatoryFields(encoder, message); + encoder.string(SERVER_STORE_NAME_FIELD, message.getCacheId()); + encoder.string(MAP_ID_FIELD, message.getCacheId()); + // TODO this needs to change - serialization needs to happen in the StateRepo not here, though we need the hashcode for server side comparison. + encoder.byteBuffer(KEY_FIELD, wrap(Util.marshall(message.getKey()))); + + return encoder.encode().array(); + } + + public StateRepositoryOpMessage decode(EhcacheMessageType messageType, ByteBuffer messageBuffer) { + switch (messageType) { + case GET_STATE_REPO: + return decodeGetMessage(messageBuffer); + case PUT_IF_ABSENT: + return decodePutIfAbsentMessage(messageBuffer); + case ENTRY_SET: + return decodeEntrySetMessage(messageBuffer); + default: + throw new IllegalArgumentException("Unsupported StateRepositoryOpMessage " + messageType); + } + } + + private StateRepositoryOpMessage.EntrySetMessage decodeEntrySetMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = ENTRY_SET_MESSAGE_STRUCT.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID clientId = messageCodecUtils.decodeUUID(decoder); + + String storeName = decoder.string(SERVER_STORE_NAME_FIELD); + String mapId = decoder.string(MAP_ID_FIELD); + + StateRepositoryOpMessage.EntrySetMessage message = new StateRepositoryOpMessage.EntrySetMessage(storeName, mapId, clientId); + message.setId(msgId); + return message; + } + + private StateRepositoryOpMessage.PutIfAbsentMessage decodePutIfAbsentMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = PUT_IF_ABSENT_MESSAGE_STRUCT.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID clientId = messageCodecUtils.decodeUUID(decoder); + + String storeName = decoder.string(SERVER_STORE_NAME_FIELD); + String mapId = decoder.string(MAP_ID_FIELD); + + ByteBuffer keyBuffer = decoder.byteBuffer(KEY_FIELD); + Object key = Util.unmarshall(keyBuffer); + + ByteBuffer valueBuffer = decoder.byteBuffer(VALUE_FIELD); + Object value = Util.unmarshall(valueBuffer); + + StateRepositoryOpMessage.PutIfAbsentMessage message = new StateRepositoryOpMessage.PutIfAbsentMessage(storeName, mapId, key, value, clientId); + message.setId(msgId); + return message; + } + + private StateRepositoryOpMessage.GetMessage decodeGetMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = GET_MESSAGE_STRUCT.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID clientId = messageCodecUtils.decodeUUID(decoder); + + String storeName = decoder.string(SERVER_STORE_NAME_FIELD); + String mapId = decoder.string(MAP_ID_FIELD); + + ByteBuffer keyBuffer = decoder.byteBuffer(KEY_FIELD); + Object key = Util.unmarshall(keyBuffer); + + StateRepositoryOpMessage.GetMessage getMessage = new StateRepositoryOpMessage.GetMessage(storeName, mapId, key, clientId); + getMessage.setId(msgId); + return getMessage; + } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java index 141a1082bc..53412e3cb8 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java @@ -17,54 +17,54 @@ package org.ehcache.clustered.common.internal.messages; import java.io.Serializable; +import java.util.UUID; -public abstract class StateRepositoryOpMessage extends EhcacheEntityMessage implements Serializable { - - public enum StateRepositoryOp { - GET, - PUT_IF_ABSENT, - ENTRY_SET, - } +public abstract class StateRepositoryOpMessage extends EhcacheOperationMessage implements Serializable { private final String cacheId; private final String mapId; - private StateRepositoryOpMessage(String cacheId, String mapId) { + private UUID clientId; + protected long id = NOT_REPLICATED; + + private StateRepositoryOpMessage(String cacheId, String mapId, UUID clientId) { this.cacheId = cacheId; this.mapId = mapId; + this.clientId = clientId; } - public String getCacheId() { - return cacheId; + @Override + public UUID getClientId() { + if (clientId == null) { + throw new AssertionError("Client Id cannot be null for StateRepository messages"); + } + return this.clientId; } - public String getMapId() { - return mapId; + @Override + public long getId() { + return this.id; } @Override - public Type getType() { - return Type.STATE_REPO_OP; + public void setId(long id) { + this.id = id; } - public abstract StateRepositoryOp operation(); - - @Override - public byte getOpCode() { - return getType().getCode(); + public String getCacheId() { + return cacheId; } - @Override - public String toString() { - return getType() + "#" + operation(); + public String getMapId() { + return mapId; } private static abstract class KeyBasedMessage extends StateRepositoryOpMessage { private final Object key; - private KeyBasedMessage(final String cacheId, final String mapId, final Object key) { - super(cacheId, mapId); + private KeyBasedMessage(final String cacheId, final String mapId, final Object key, final UUID clientId) { + super(cacheId, mapId, clientId); this.key = key; } @@ -76,13 +76,13 @@ public Object getKey() { public static class GetMessage extends KeyBasedMessage { - public GetMessage(final String cacheId, final String mapId, final Object key) { - super(cacheId, mapId, key); + public GetMessage(final String cacheId, final String mapId, final Object key, final UUID clientId) { + super(cacheId, mapId, key, clientId); } @Override - public StateRepositoryOp operation() { - return StateRepositoryOp.GET; + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.GET_STATE_REPO; } } @@ -90,8 +90,8 @@ public static class PutIfAbsentMessage extends KeyBasedMessage { private final Object value; - public PutIfAbsentMessage(final String cacheId, final String mapId, final Object key, final Object value) { - super(cacheId, mapId, key); + public PutIfAbsentMessage(final String cacheId, final String mapId, final Object key, final Object value, final UUID clientId) { + super(cacheId, mapId, key, clientId); this.value = value; } @@ -100,20 +100,20 @@ public Object getValue() { } @Override - public StateRepositoryOp operation() { - return StateRepositoryOp.PUT_IF_ABSENT; + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.PUT_IF_ABSENT; } } public static class EntrySetMessage extends StateRepositoryOpMessage { - public EntrySetMessage(final String cacheId, final String mapId) { - super(cacheId, mapId); + public EntrySetMessage(final String cacheId, final String mapId, final UUID clientId) { + super(cacheId, mapId, clientId); } @Override - public StateRepositoryOp operation() { - return StateRepositoryOp.ENTRY_SET; + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.ENTRY_SET; } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/Util.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/Util.java index 1e8fc71029..3affaf8a0f 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/Util.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/Util.java @@ -16,8 +16,11 @@ package org.ehcache.clustered.common.internal.store; +import org.ehcache.clustered.common.internal.util.ByteBufferInputStream; + import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; +import java.io.Closeable; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; @@ -147,12 +150,30 @@ public ByteBuffer getPayload() { } public static Object unmarshall(byte[] payload) { + ObjectInputStream objectInputStream = null; + try { + objectInputStream = new ObjectInputStream(new ByteArrayInputStream(payload)); + return objectInputStream.readObject(); + } catch (IOException ex) { + throw new IllegalArgumentException(ex); + } catch (ClassNotFoundException ex) { + throw new IllegalArgumentException(ex); + } finally { + closeSilently(objectInputStream); + } + } + + public static Object unmarshall(ByteBuffer payload) { + ObjectInputStream objectInputStream = null; try { - return new ObjectInputStream(new ByteArrayInputStream(payload)).readObject(); + objectInputStream = new ObjectInputStream(new ByteBufferInputStream(payload)); + return objectInputStream.readObject(); } catch (IOException ex) { throw new IllegalArgumentException(ex); } catch (ClassNotFoundException ex) { throw new IllegalArgumentException(ex); + } finally { + closeSilently(objectInputStream); } } @@ -163,11 +184,21 @@ public static byte[] marshall(Object message) { try { oout.writeObject(message); } finally { - oout.close(); + closeSilently(oout); } } catch (IOException e) { throw new IllegalArgumentException(e); } return out.toByteArray(); } + + private static void closeSilently(Closeable closeable) { + if (closeable != null) { + try { + closeable.close(); + } catch (IOException e) { + // Ignore + } + } + } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/util/ByteBufferInputStream.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/util/ByteBufferInputStream.java new file mode 100644 index 0000000000..0b0c1e60b0 --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/util/ByteBufferInputStream.java @@ -0,0 +1,62 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.util; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; + +import static java.lang.Math.max; +import static java.lang.Math.min; + +// TODO remove once it comes with Runnel +public class ByteBufferInputStream extends InputStream { + + private final ByteBuffer buffer; + + public ByteBufferInputStream(ByteBuffer buffer) { + this.buffer = buffer.slice(); + } + + @Override + public int read() throws IOException { + if (buffer.hasRemaining()) { + return 0xff & buffer.get(); + } else { + return -1; + } + } + + @Override + public int read(byte b[], int off, int len) { + len = min(len, buffer.remaining()); + buffer.get(b, off, len); + return len; + } + + @Override + public long skip(long n) { + n = min(buffer.remaining(), max(n, 0)); + buffer.position((int) (buffer.position() + n)); + return n; + } + + @Override + public synchronized int available() { + return buffer.remaining(); + } +} diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/BaseClusteredEhcacheExceptionTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/BaseClusteredEhcacheExceptionTest.java index ecf25303ef..8c5b68f8e1 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/BaseClusteredEhcacheExceptionTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/BaseClusteredEhcacheExceptionTest.java @@ -18,7 +18,7 @@ import org.hamcrest.Matchers; import org.junit.Test; -import org.junit.internal.AssumptionViolatedException; +import org.junit.AssumptionViolatedException; import java.lang.reflect.Constructor; @@ -150,4 +150,4 @@ private void checkWithClientStack(T baseException) { assertThat(copyException.getMessage(), is(baseException.getMessage())); assertThat(copyException.getCause(), Matchers.is(baseException)); } -} \ No newline at end of file +} diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodecTest.java new file mode 100644 index 0000000000..d8e1764146 --- /dev/null +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodecTest.java @@ -0,0 +1,50 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.junit.Test; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.encoding.StructEncoder; + +import java.nio.ByteBuffer; +import java.util.Collections; + +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.*; +import static org.terracotta.runnel.StructBuilder.newStructBuilder; + +public class CommonConfigCodecTest { + + private static final CommonConfigCodec CODEC = new CommonConfigCodec(); + + @Test + public void testEncodeDecodeServerSideConfiguration() throws Exception { + ServerSideConfiguration serverSideConfiguration = + new ServerSideConfiguration("foo", Collections.singletonMap("bar", new ServerSideConfiguration.Pool(1))); + Struct serverSideConfigurationStruct = CODEC.injectServerSideConfiguration(newStructBuilder(), 10).getUpdatedBuilder().build(); + StructEncoder encoder = serverSideConfigurationStruct.encoder(); + CODEC.encodeServerSideConfiguration(encoder, serverSideConfiguration); + ByteBuffer byteBuffer = encoder.encode(); + byteBuffer.rewind(); + ServerSideConfiguration decodedServerSideConfiguration = + CODEC.decodeServerSideConfiguration(serverSideConfigurationStruct.decoder(byteBuffer)); + assertThat(decodedServerSideConfiguration.getDefaultServerResource(), is("foo")); + assertThat(decodedServerSideConfiguration.getResourcePools(), hasKey("bar")); + } +} diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java index ef6fbc72d9..ee62af2270 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java @@ -16,77 +16,93 @@ package org.ehcache.clustered.common.internal.messages; +import org.junit.Before; import org.junit.Test; +import org.mockito.Mock; + +import java.nio.ByteBuffer; +import java.util.UUID; -import static org.junit.Assert.*; import static org.mockito.Matchers.any; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.only; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; +import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.MockitoAnnotations.initMocks; public class EhcacheCodecTest { + private static final UUID CLIENT_ID = UUID.randomUUID(); + + @Mock + private ServerStoreOpCodec serverStoreOpCodec; + + @Mock + private LifeCycleMessageCodec lifeCycleMessageCodec; + + @Mock + private StateRepositoryOpCodec stateRepositoryOpCodec; + + private EhcacheCodec codec; + + @Before + public void setUp() { + initMocks(this); + + codec = new EhcacheCodec(serverStoreOpCodec, lifeCycleMessageCodec, stateRepositoryOpCodec, null); + } + @Test public void encodeMessage() throws Exception { - ServerStoreOpCodec serverStoreOpCodec = mock(ServerStoreOpCodec.class); - LifeCycleMessageCodec lifeCycleMessageCodec = mock(LifeCycleMessageCodec.class); - StateRepositoryOpCodec stateRepositoryOpCodec = mock(StateRepositoryOpCodec.class); - EhcacheCodec codec = new EhcacheCodec(serverStoreOpCodec, lifeCycleMessageCodec, stateRepositoryOpCodec, null); - - LifecycleMessage.DestroyServerStore lifecycleMessage = new LifecycleMessage.DestroyServerStore("foo"); + LifecycleMessage.DestroyServerStore lifecycleMessage = new LifecycleMessage.DestroyServerStore("foo", CLIENT_ID); codec.encodeMessage(lifecycleMessage); verify(lifeCycleMessageCodec, only()).encode(any(LifecycleMessage.class)); verify(serverStoreOpCodec, never()).encode(any(ServerStoreOpMessage.class)); verify(stateRepositoryOpCodec, never()).encode(any(StateRepositoryOpMessage.class)); - ServerStoreOpMessage.ClearMessage serverStoreOpMessage = new ServerStoreOpMessage.ClearMessage("foo"); + ServerStoreOpMessage.ClearMessage serverStoreOpMessage = new ServerStoreOpMessage.ClearMessage("foo", CLIENT_ID); codec.encodeMessage(serverStoreOpMessage); verify(lifeCycleMessageCodec, only()).encode(any(LifecycleMessage.class)); verify(serverStoreOpCodec, only()).encode(any(ServerStoreOpMessage.class)); verify(stateRepositoryOpCodec, never()).encode(any(StateRepositoryOpMessage.class)); - StateRepositoryOpMessage.EntrySetMessage stateRepositoryOpMessage = new StateRepositoryOpMessage.EntrySetMessage("foo", "bar"); + StateRepositoryOpMessage.EntrySetMessage stateRepositoryOpMessage = new StateRepositoryOpMessage.EntrySetMessage("foo", "bar", CLIENT_ID); codec.encodeMessage(stateRepositoryOpMessage); verify(lifeCycleMessageCodec, only()).encode(any(LifecycleMessage.class)); verify(serverStoreOpCodec, only()).encode(any(ServerStoreOpMessage.class)); verify(stateRepositoryOpCodec, only()).encode(any(StateRepositoryOpMessage.class)); + } @Test - public void decodeMessage() throws Exception { - ServerStoreOpCodec serverStoreOpCodec = mock(ServerStoreOpCodec.class); - LifeCycleMessageCodec lifeCycleMessageCodec = mock(LifeCycleMessageCodec.class); - StateRepositoryOpCodec stateRepositoryOpCodec = mock(StateRepositoryOpCodec.class); - EhcacheCodec codec = new EhcacheCodec(serverStoreOpCodec, lifeCycleMessageCodec, stateRepositoryOpCodec, null); - - byte[] payload = new byte[1]; - - for (byte i = 1; i <= EhcacheEntityMessage.Type.LIFECYCLE_OP.getCode(); i++) { - payload[0] = i; - codec.decodeMessage(payload); + public void decodeLifeCycleMessages() throws Exception { + for (EhcacheMessageType messageType : EhcacheMessageType.LIFECYCLE_MESSAGES) { + ByteBuffer encodedBuffer = EhcacheCodec.OP_CODE_DECODER.encoder().enm("opCode", messageType).encode(); + codec.decodeMessage(encodedBuffer.array()); } - verify(lifeCycleMessageCodec, times(10)).decode(payload); - verify(serverStoreOpCodec, never()).decode(payload); - verify(stateRepositoryOpCodec, never()).decode(payload); + verify(lifeCycleMessageCodec, times(EhcacheMessageType.LIFECYCLE_MESSAGES.size())).decode(any(EhcacheMessageType.class), any(ByteBuffer.class)); + verifyZeroInteractions(serverStoreOpCodec, stateRepositoryOpCodec); + } - for (byte i = 11; i <= EhcacheEntityMessage.Type.SERVER_STORE_OP.getCode(); i++) { - payload[0] = i; - codec.decodeMessage(payload); + @Test + public void decodeServerStoreMessages() throws Exception { + for (EhcacheMessageType messageType : EhcacheMessageType.STORE_OPERATION_MESSAGES) { + ByteBuffer encodedBuffer = EhcacheCodec.OP_CODE_DECODER.encoder().enm("opCode", messageType).encode(); + codec.decodeMessage(encodedBuffer.array()); } - verify(lifeCycleMessageCodec, times(10)).decode(payload); - verify(serverStoreOpCodec, times(10)).decode(payload); - verify(stateRepositoryOpCodec, never()).decode(payload); + verify(serverStoreOpCodec, times(EhcacheMessageType.STORE_OPERATION_MESSAGES.size())).decode(any(EhcacheMessageType.class), any(ByteBuffer.class)); + verifyZeroInteractions(lifeCycleMessageCodec, stateRepositoryOpCodec); + } - for (byte i = 21; i <= EhcacheEntityMessage.Type.STATE_REPO_OP.getCode(); i++) { - payload[0] = i; - codec.decodeMessage(payload); + @Test + public void decodeStateRepoMessages() throws Exception { + for (EhcacheMessageType messageType : EhcacheMessageType.STATE_REPO_OPERATION_MESSAGES) { + ByteBuffer encodedBuffer = EhcacheCodec.OP_CODE_DECODER.encoder().enm("opCode", messageType).encode(); + codec.decodeMessage(encodedBuffer.array()); } - verify(lifeCycleMessageCodec, times(10)).decode(payload); - verify(serverStoreOpCodec, times(10)).decode(payload); - verify(stateRepositoryOpCodec, times(10)).decode(payload); - + verify(stateRepositoryOpCodec, times(EhcacheMessageType.STATE_REPO_OPERATION_MESSAGES.size())).decode(any(EhcacheMessageType.class), any(ByteBuffer.class)); + verifyZeroInteractions(lifeCycleMessageCodec, serverStoreOpCodec); } -} \ No newline at end of file + +} diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodecTest.java new file mode 100644 index 0000000000..4f1f8589ee --- /dev/null +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodecTest.java @@ -0,0 +1,234 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.PoolAllocation; +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.junit.Before; +import org.junit.Test; + +import java.util.Collections; +import java.util.UUID; + +import static java.nio.ByteBuffer.wrap; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.*; + +/** + * LifeCycleMessageCodecTest + */ +public class LifeCycleMessageCodecTest { + + private static final long MESSAGE_ID = 42L; + private static final UUID CLIENT_ID = UUID.randomUUID(); + + private final LifeCycleMessageFactory factory = new LifeCycleMessageFactory(); + private final LifeCycleMessageCodec codec = new LifeCycleMessageCodec(new CommonConfigCodec()); + + @Before + public void setUp() { + factory.setClientId(CLIENT_ID); + } + + @Test + public void testConfigureStoreManager() throws Exception { + ServerSideConfiguration configuration = getServerSideConfiguration(); + LifecycleMessage message = factory.configureStoreManager(configuration); + + byte[] encoded = codec.encode(message); + LifecycleMessage.ConfigureStoreManager decodedMessage = (LifecycleMessage.ConfigureStoreManager) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getClientId(), is(CLIENT_ID)); + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.CONFIGURE)); + assertThat(decodedMessage.getConfiguration().getDefaultServerResource(), is(configuration.getDefaultServerResource())); + assertThat(decodedMessage.getConfiguration().getResourcePools(), is(configuration.getResourcePools())); + } + + @Test + public void testValidateStoreManager() throws Exception { + ServerSideConfiguration configuration = getServerSideConfiguration(); + LifecycleMessage message = factory.validateStoreManager(configuration); + message.setId(MESSAGE_ID); + + byte[] encoded = codec.encode(message); + LifecycleMessage.ValidateStoreManager decodedMessage = (LifecycleMessage.ValidateStoreManager) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getId(), is(MESSAGE_ID)); + assertThat(decodedMessage.getClientId(), is(CLIENT_ID)); + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.VALIDATE)); + assertThat(decodedMessage.getConfiguration().getDefaultServerResource(), is(configuration.getDefaultServerResource())); + assertThat(decodedMessage.getConfiguration().getResourcePools(), is(configuration.getResourcePools())); + } + + @Test + public void testCreateServerStoreDedicated() throws Exception { + PoolAllocation.Dedicated dedicated = new PoolAllocation.Dedicated("dedicate", 420000L); + ServerStoreConfiguration configuration = new ServerStoreConfiguration(dedicated, "java.lang.Long", "java.lang.String", null, null, + "org.ehcache.impl.serialization.LongSerializer", "org.ehcache.impl.serialization.StringSerializer", + Consistency.STRONG); + LifecycleMessage message = factory.createServerStore("store1", configuration); + message.setId(MESSAGE_ID); + + byte[] encoded = codec.encode(message); + LifecycleMessage.CreateServerStore decodedMessage = (LifecycleMessage.CreateServerStore) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.CREATE_SERVER_STORE)); + validateCommonServerStoreConfig(decodedMessage, configuration); + PoolAllocation.Dedicated decodedPoolAllocation = (PoolAllocation.Dedicated) decodedMessage.getStoreConfiguration().getPoolAllocation(); + assertThat(decodedPoolAllocation.getResourceName(), is(dedicated.getResourceName())); + assertThat(decodedPoolAllocation.getSize(), is(dedicated.getSize())); + } + + @Test + public void testCreateServerStoreShared() throws Exception { + PoolAllocation.Shared shared = new PoolAllocation.Shared("shared"); + ServerStoreConfiguration configuration = new ServerStoreConfiguration(shared, "java.lang.Long", "java.lang.String", null, null, + "org.ehcache.impl.serialization.LongSerializer", "org.ehcache.impl.serialization.StringSerializer", + Consistency.STRONG); + LifecycleMessage message = factory.createServerStore("store1", configuration); + message.setId(MESSAGE_ID); + + byte[] encoded = codec.encode(message); + LifecycleMessage.CreateServerStore decodedMessage = (LifecycleMessage.CreateServerStore) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.CREATE_SERVER_STORE)); + validateCommonServerStoreConfig(decodedMessage, configuration); + PoolAllocation.Shared decodedPoolAllocation = (PoolAllocation.Shared) decodedMessage.getStoreConfiguration().getPoolAllocation(); + assertThat(decodedPoolAllocation.getResourcePoolName(), is(shared.getResourcePoolName())); + } + + @Test + public void testCreateServerStoreUnknown() throws Exception { + PoolAllocation.Unknown unknown = new PoolAllocation.Unknown(); + ServerStoreConfiguration configuration = new ServerStoreConfiguration(unknown, "java.lang.Long", "java.lang.String", null, null, + "org.ehcache.impl.serialization.LongSerializer", "org.ehcache.impl.serialization.StringSerializer", + Consistency.STRONG); + LifecycleMessage message = factory.createServerStore("store1", configuration); + message.setId(MESSAGE_ID); + + byte[] encoded = codec.encode(message); + LifecycleMessage.CreateServerStore decodedMessage = (LifecycleMessage.CreateServerStore) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.CREATE_SERVER_STORE)); + validateCommonServerStoreConfig(decodedMessage, configuration); + assertThat(decodedMessage.getStoreConfiguration().getPoolAllocation(), instanceOf(PoolAllocation.Unknown.class)); + } + + @Test + public void testValidateServerStoreDedicated() throws Exception { + PoolAllocation.Dedicated dedicated = new PoolAllocation.Dedicated("dedicate", 420000L); + ServerStoreConfiguration configuration = new ServerStoreConfiguration(dedicated, "java.lang.Long", "java.lang.String", null, null, + "org.ehcache.impl.serialization.LongSerializer", "org.ehcache.impl.serialization.StringSerializer", + Consistency.STRONG); + LifecycleMessage message = factory.validateServerStore("store1", configuration); + message.setId(MESSAGE_ID); + + byte[] encoded = codec.encode(message); + LifecycleMessage.ValidateServerStore decodedMessage = (LifecycleMessage.ValidateServerStore) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.VALIDATE_SERVER_STORE)); + validateCommonServerStoreConfig(decodedMessage, configuration); + PoolAllocation.Dedicated decodedPoolAllocation = (PoolAllocation.Dedicated) decodedMessage.getStoreConfiguration().getPoolAllocation(); + assertThat(decodedPoolAllocation.getResourceName(), is(dedicated.getResourceName())); + assertThat(decodedPoolAllocation.getSize(), is(dedicated.getSize())); + } + + @Test + public void testValidateServerStoreShared() throws Exception { + PoolAllocation.Shared shared = new PoolAllocation.Shared("shared"); + ServerStoreConfiguration configuration = new ServerStoreConfiguration(shared, "java.lang.Long", "java.lang.String", null, null, + "org.ehcache.impl.serialization.LongSerializer", "org.ehcache.impl.serialization.StringSerializer", + Consistency.STRONG); + LifecycleMessage message = factory.validateServerStore("store1", configuration); + message.setId(MESSAGE_ID); + + byte[] encoded = codec.encode(message); + LifecycleMessage.ValidateServerStore decodedMessage = (LifecycleMessage.ValidateServerStore) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.VALIDATE_SERVER_STORE)); + validateCommonServerStoreConfig(decodedMessage, configuration); + PoolAllocation.Shared decodedPoolAllocation = (PoolAllocation.Shared) decodedMessage.getStoreConfiguration().getPoolAllocation(); + assertThat(decodedPoolAllocation.getResourcePoolName(), is(shared.getResourcePoolName())); + } + + @Test + public void testValidateServerStoreUnknown() throws Exception { + PoolAllocation.Unknown unknown = new PoolAllocation.Unknown(); + ServerStoreConfiguration configuration = new ServerStoreConfiguration(unknown, "java.lang.Long", "java.lang.String", null, null, + "org.ehcache.impl.serialization.LongSerializer", "org.ehcache.impl.serialization.StringSerializer", + Consistency.STRONG); + LifecycleMessage message = factory.validateServerStore("store1", configuration); + message.setId(MESSAGE_ID); + + byte[] encoded = codec.encode(message); + LifecycleMessage.ValidateServerStore decodedMessage = (LifecycleMessage.ValidateServerStore) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.VALIDATE_SERVER_STORE)); + validateCommonServerStoreConfig(decodedMessage, configuration); + assertThat(decodedMessage.getStoreConfiguration().getPoolAllocation(), instanceOf(PoolAllocation.Unknown.class)); + } + + @Test + public void testReleaseServerStore() throws Exception { + LifecycleMessage message = factory.releaseServerStore("store1"); + message.setId(MESSAGE_ID); + + byte[] encoded = codec.encode(message); + LifecycleMessage.ReleaseServerStore decodedMessage = (LifecycleMessage.ReleaseServerStore) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.RELEASE_SERVER_STORE)); + assertThat(decodedMessage.getClientId(), is(CLIENT_ID)); + assertThat(decodedMessage.getId(), is(MESSAGE_ID)); + assertThat(decodedMessage.getName(), is("store1")); + } + + @Test + public void testDestroyServerStore() throws Exception { + LifecycleMessage message = factory.destroyServerStore("store1"); + message.setId(MESSAGE_ID); + + byte[] encoded = codec.encode(message); + LifecycleMessage.DestroyServerStore decodedMessage = (LifecycleMessage.DestroyServerStore) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.DESTROY_SERVER_STORE)); + assertThat(decodedMessage.getClientId(), is(CLIENT_ID)); + assertThat(decodedMessage.getId(), is(MESSAGE_ID)); + assertThat(decodedMessage.getName(), is("store1")); + } + + private void validateCommonServerStoreConfig(LifecycleMessage.BaseServerStore decodedMessage, ServerStoreConfiguration initialConfiguration) { + assertThat(decodedMessage.getId(), is(MESSAGE_ID)); + assertThat(decodedMessage.getClientId(), is(CLIENT_ID)); + assertThat(decodedMessage.getName(), is("store1")); + assertThat(decodedMessage.getStoreConfiguration().getStoredKeyType(), is(initialConfiguration.getStoredKeyType())); + assertThat(decodedMessage.getStoreConfiguration().getStoredValueType(), is(initialConfiguration.getStoredValueType())); + assertThat(decodedMessage.getStoreConfiguration().getActualKeyType(), nullValue()); + assertThat(decodedMessage.getStoreConfiguration().getActualValueType(), nullValue()); + assertThat(decodedMessage.getStoreConfiguration().getConsistency(), is(initialConfiguration.getConsistency())); + assertThat(decodedMessage.getStoreConfiguration().getKeySerializerType(), is(initialConfiguration.getKeySerializerType())); + assertThat(decodedMessage.getStoreConfiguration().getValueSerializerType(), is(initialConfiguration.getValueSerializerType())); + } + + private ServerSideConfiguration getServerSideConfiguration() { + return new ServerSideConfiguration("default", Collections.singletonMap("shared", new ServerSideConfiguration.Pool(100, "other"))); + } + +} diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodecTest.java deleted file mode 100644 index 32520467e5..0000000000 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodecTest.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.common.internal.messages; - -import org.hamcrest.Matchers; -import org.junit.Test; - -import java.util.HashSet; -import java.util.Set; - -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.junit.Assert.assertThat; - -public class ReconnectDataCodecTest { - - @Test - public void testCodec() { - Set cacheIds = new HashSet(); - cacheIds.add("test"); - cacheIds.add("test1"); - cacheIds.add("test2"); - - ReconnectDataCodec dataCodec = new ReconnectDataCodec(); - - Set decoded = dataCodec.decode(dataCodec.encode(cacheIds, 14)); - - assertThat(decoded, Matchers.hasSize(3)); - assertThat(decoded, containsInAnyOrder("test", "test1", "test2")); - - - } -} diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodecTest.java new file mode 100644 index 0000000000..8c8f8123d8 --- /dev/null +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodecTest.java @@ -0,0 +1,71 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.junit.Test; + +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; +import java.util.UUID; + +import static org.junit.Assert.assertThat; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.containsInAnyOrder; + +public class ReconnectMessageCodecTest { + + @Test + public void testCodec() { + + Set caches = new HashSet(); + caches.add("test"); + caches.add("test1"); + caches.add("test2"); + + ReconnectMessage reconnectMessage = new ReconnectMessage(UUID.randomUUID(), caches); + + Set firstSetToInvalidate = new HashSet(); + firstSetToInvalidate.add(1L); + firstSetToInvalidate.add(11L); + firstSetToInvalidate.add(111L); + + Set secondSetToInvalidate = new HashSet(); + secondSetToInvalidate.add(2L); + secondSetToInvalidate.add(22L); + secondSetToInvalidate.add(222L); + secondSetToInvalidate.add(2222L); + reconnectMessage.addInvalidationsInProgress("test", firstSetToInvalidate); + reconnectMessage.addInvalidationsInProgress("test1", Collections.emptySet()); + reconnectMessage.addInvalidationsInProgress("test2", secondSetToInvalidate); + reconnectMessage.addClearInProgress("test"); + + ReconnectMessageCodec dataCodec = new ReconnectMessageCodec(); + + ReconnectMessage decoded = dataCodec.decode(dataCodec.encode(reconnectMessage)); + assertThat(decoded, notNullValue()); + assertThat(decoded.getClientId(), is(reconnectMessage.getClientId())); + assertThat(decoded.getAllCaches(), containsInAnyOrder("test", "test1", "test2")); + assertThat(decoded.getInvalidationsInProgress("test"), containsInAnyOrder(firstSetToInvalidate.toArray())); + assertThat(decoded.getInvalidationsInProgress("test1").isEmpty(), is(true)); + assertThat(decoded.getInvalidationsInProgress("test2"), containsInAnyOrder(secondSetToInvalidate.toArray())); + assertThat(decoded.isClearInProgress("test"), is(true)); + assertThat(decoded.isClearInProgress("test1"), is(false)); + assertThat(decoded.isClearInProgress("test2"), is(false)); + } +} diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ResponseCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ResponseCodecTest.java index 5199634b71..3f3a939153 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ResponseCodecTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ResponseCodecTest.java @@ -18,6 +18,7 @@ import org.ehcache.clustered.common.internal.exceptions.IllegalMessageException; import org.ehcache.clustered.common.internal.store.Chain; +import org.hamcrest.Matchers; import org.junit.Test; import java.util.Date; @@ -28,13 +29,13 @@ import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; -/** - * - */ public class ResponseCodecTest { private static final EhcacheEntityResponseFactory RESPONSE_FACTORY = new EhcacheEntityResponseFactory(); private static final ResponseCodec RESPONSE_CODEC = new ResponseCodec(); + private static final String STORE_ID = "storeId"; + private static final long KEY = 42L; + private static final int INVALIDATION_ID = 134; @Test public void testFailureResponseCodec() { @@ -65,4 +66,68 @@ public void testMapValueCodec() throws Exception { (EhcacheEntityResponse.MapValue) RESPONSE_CODEC.decode(RESPONSE_CODEC.encode(mapValue)); assertThat(decoded.getValue(), equalTo(subject)); } + + @Test + public void testSuccess() throws Exception { + byte[] encoded = RESPONSE_CODEC.encode(EhcacheEntityResponse.Success.INSTANCE); + assertThat(RESPONSE_CODEC.decode(encoded), Matchers.sameInstance(EhcacheEntityResponse.Success.INSTANCE)); + } + + @Test + public void testHashInvalidationDone() throws Exception { + EhcacheEntityResponse.HashInvalidationDone response = new EhcacheEntityResponse.HashInvalidationDone(STORE_ID, KEY); + byte[] encoded = RESPONSE_CODEC.encode(response); + EhcacheEntityResponse.HashInvalidationDone decodedResponse = (EhcacheEntityResponse.HashInvalidationDone) RESPONSE_CODEC.decode(encoded); + + assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.HASH_INVALIDATION_DONE)); + assertThat(decodedResponse.getCacheId(), is(STORE_ID)); + assertThat(decodedResponse.getKey(), is(KEY)); + } + + @Test + public void testAllInvalidationDone() throws Exception { + EhcacheEntityResponse.AllInvalidationDone response = new EhcacheEntityResponse.AllInvalidationDone(STORE_ID); + + byte[] encoded = RESPONSE_CODEC.encode(response); + EhcacheEntityResponse.AllInvalidationDone decodedResponse = (EhcacheEntityResponse.AllInvalidationDone) RESPONSE_CODEC.decode(encoded); + + assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.ALL_INVALIDATION_DONE)); + assertThat(decodedResponse.getCacheId(), is(STORE_ID)); + } + + @Test + public void testClientInvalidateHash() throws Exception { + EhcacheEntityResponse.ClientInvalidateHash response = new EhcacheEntityResponse.ClientInvalidateHash(STORE_ID, KEY, INVALIDATION_ID); + byte[] encoded = RESPONSE_CODEC.encode(response); + EhcacheEntityResponse.ClientInvalidateHash decodedResponse = (EhcacheEntityResponse.ClientInvalidateHash) RESPONSE_CODEC.decode(encoded); + + assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.CLIENT_INVALIDATE_HASH)); + assertThat(decodedResponse.getCacheId(), is(STORE_ID)); + assertThat(decodedResponse.getKey(), is(KEY)); + assertThat(decodedResponse.getInvalidationId(), is(INVALIDATION_ID)); + } + + @Test + public void testClientInvalidateAll() throws Exception { + EhcacheEntityResponse.ClientInvalidateAll response = new EhcacheEntityResponse.ClientInvalidateAll(STORE_ID, INVALIDATION_ID); + + byte[] encoded = RESPONSE_CODEC.encode(response); + EhcacheEntityResponse.ClientInvalidateAll decodedResponse = (EhcacheEntityResponse.ClientInvalidateAll) RESPONSE_CODEC.decode(encoded); + + assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.CLIENT_INVALIDATE_ALL)); + assertThat(decodedResponse.getCacheId(), is(STORE_ID)); + assertThat(decodedResponse.getInvalidationId(), is(INVALIDATION_ID)); + } + + @Test + public void testServerInvalidateHash() throws Exception { + EhcacheEntityResponse.ServerInvalidateHash response = new EhcacheEntityResponse.ServerInvalidateHash(STORE_ID, KEY); + + byte[] encoded = RESPONSE_CODEC.encode(response); + EhcacheEntityResponse.ServerInvalidateHash decodedResponse = (EhcacheEntityResponse.ServerInvalidateHash) RESPONSE_CODEC.decode(encoded); + + assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.SERVER_INVALIDATE_HASH)); + assertThat(decodedResponse.getCacheId(), is(STORE_ID)); + assertThat(decodedResponse.getKey(), is(KEY)); + } } diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreMessageFactoryTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreMessageFactoryTest.java index bfa0b140a3..3fab7dd997 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreMessageFactoryTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreMessageFactoryTest.java @@ -18,6 +18,8 @@ import org.junit.Test; +import java.util.UUID; + import static org.junit.Assert.assertThat; import static org.hamcrest.Matchers.is; import static org.ehcache.clustered.common.internal.store.Util.createPayload; @@ -26,7 +28,7 @@ public class ServerStoreMessageFactoryTest { - private static final ServerStoreMessageFactory MESSAGE_FACTORY = new ServerStoreMessageFactory("test"); + private static final ServerStoreMessageFactory MESSAGE_FACTORY = new ServerStoreMessageFactory("test", UUID.randomUUID()); @Test public void testAppendMessage() { diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodecTest.java index a656adfbd4..0c7161bc5e 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodecTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodecTest.java @@ -18,87 +18,133 @@ import org.junit.Test; + +import java.util.UUID; + +import static java.nio.ByteBuffer.wrap; import static org.ehcache.clustered.common.internal.store.Util.createPayload; import static org.ehcache.clustered.common.internal.store.Util.getChain; import static org.ehcache.clustered.common.internal.store.Util.readPayLoad; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; +import static org.junit.Assert.fail; -/** - * - */ public class ServerStoreOpCodecTest { - private static final ServerStoreMessageFactory MESSAGE_FACTORY = new ServerStoreMessageFactory("test"); + private static final UUID CLIENT_ID = UUID.randomUUID(); + private static final ServerStoreMessageFactory MESSAGE_FACTORY = new ServerStoreMessageFactory("test", CLIENT_ID); private static final ServerStoreOpCodec STORE_OP_CODEC = new ServerStoreOpCodec(); @Test public void testAppendMessageCodec() { - EhcacheEntityMessage appendMessage = MESSAGE_FACTORY.appendOperation(1L, createPayload(1L)); + ServerStoreOpMessage.AppendMessage appendMessage = MESSAGE_FACTORY.appendOperation(1L, createPayload(1L)); + appendMessage.setId(42L); - EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(STORE_OP_CODEC.encode((ServerStoreOpMessage)appendMessage)); + byte[] encoded = STORE_OP_CODEC.encode(appendMessage); + EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(appendMessage.getMessageType(), wrap(encoded)); ServerStoreOpMessage.AppendMessage decodedAppendMessage = (ServerStoreOpMessage.AppendMessage) decodedMsg; assertThat(decodedAppendMessage.getCacheId(), is("test")); assertThat(decodedAppendMessage.getKey(), is(1L)); assertThat(readPayLoad(decodedAppendMessage.getPayload()), is(1L)); + assertThat(decodedAppendMessage.getId(), is(42L)); + assertThat(decodedAppendMessage.getClientId(), is(CLIENT_ID)); + assertThat(decodedAppendMessage.getMessageType(), is(EhcacheMessageType.APPEND)); } @Test public void testGetMessageCodec() { - EhcacheEntityMessage getMessage = MESSAGE_FACTORY.getOperation(2L); + ServerStoreOpMessage getMessage = MESSAGE_FACTORY.getOperation(2L); + getMessage.setId(42L); - EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(STORE_OP_CODEC.encode((ServerStoreOpMessage)getMessage)); + byte[] encoded = STORE_OP_CODEC.encode(getMessage); + EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(getMessage.getMessageType(), wrap(encoded)); ServerStoreOpMessage.GetMessage decodedGetMessage = (ServerStoreOpMessage.GetMessage) decodedMsg; assertThat(decodedGetMessage.getCacheId(), is("test")); assertThat(decodedGetMessage.getKey(), is(2L)); + assertThat(decodedGetMessage.getId(), is(42L)); + assertThat(decodedGetMessage.getMessageType(), is(EhcacheMessageType.GET_STORE)); + try { + decodedGetMessage.getClientId(); + fail("AssertionError expected"); + } catch (AssertionError error) { + assertThat(error.getMessage(), containsString("Client Id is not supported")); + } + } @Test public void testGetAndAppendMessageCodec() { - EhcacheEntityMessage getAndAppendMessage = MESSAGE_FACTORY.getAndAppendOperation(10L, createPayload(10L)); + ServerStoreOpMessage getAndAppendMessage = MESSAGE_FACTORY.getAndAppendOperation(10L, createPayload(10L)); + getAndAppendMessage.setId(123L); - EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(STORE_OP_CODEC.encode((ServerStoreOpMessage)getAndAppendMessage)); + byte[] encoded = STORE_OP_CODEC.encode(getAndAppendMessage); + EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(getAndAppendMessage.getMessageType(), wrap(encoded)); ServerStoreOpMessage.GetAndAppendMessage decodedGetAndAppendMessage = (ServerStoreOpMessage.GetAndAppendMessage) decodedMsg; assertThat(decodedGetAndAppendMessage.getCacheId(), is("test")); assertThat(decodedGetAndAppendMessage.getKey(), is(10L)); assertThat(readPayLoad(decodedGetAndAppendMessage.getPayload()), is(10L)); + assertThat(decodedGetAndAppendMessage.getId(), is(123L)); + assertThat(decodedGetAndAppendMessage.getClientId(), is(CLIENT_ID)); + assertThat(decodedGetAndAppendMessage.getMessageType(), is(EhcacheMessageType.GET_AND_APPEND)); } @Test public void testReplaceAtHeadMessageCodec() { - EhcacheEntityMessage replaceAtHeadMessage = MESSAGE_FACTORY.replaceAtHeadOperation(10L, + ServerStoreOpMessage replaceAtHeadMessage = MESSAGE_FACTORY.replaceAtHeadOperation(10L, getChain(true, createPayload(10L), createPayload(100L), createPayload(1000L)), getChain(false, createPayload(2000L))); + replaceAtHeadMessage.setId(42L); - EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(STORE_OP_CODEC.encode((ServerStoreOpMessage)replaceAtHeadMessage)); + byte[] encoded = STORE_OP_CODEC.encode(replaceAtHeadMessage); + EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(replaceAtHeadMessage.getMessageType(), wrap(encoded)); ServerStoreOpMessage.ReplaceAtHeadMessage decodedReplaceAtHeadMessage = (ServerStoreOpMessage.ReplaceAtHeadMessage) decodedMsg; assertThat(decodedReplaceAtHeadMessage.getCacheId(), is("test")); assertThat(decodedReplaceAtHeadMessage.getKey(), is(10L)); + assertThat(decodedReplaceAtHeadMessage.getId(), is(42L)); Util.assertChainHas(decodedReplaceAtHeadMessage.getExpect(), 10L, 100L, 1000L); Util.assertChainHas(decodedReplaceAtHeadMessage.getUpdate(), 2000L); + assertThat(decodedReplaceAtHeadMessage.getClientId(), is(CLIENT_ID)); + assertThat(decodedReplaceAtHeadMessage.getMessageType(), is(EhcacheMessageType.REPLACE)); } @Test public void testClearMessageCodec() throws Exception { - EhcacheEntityMessage clearMessage = MESSAGE_FACTORY.clearOperation(); - byte[] encodedBytes = STORE_OP_CODEC.encode((ServerStoreOpMessage)clearMessage); - EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(encodedBytes); - assertThat(((ServerStoreOpMessage)decodedMsg).getCacheId(), is("test")); + ServerStoreOpMessage clearMessage = MESSAGE_FACTORY.clearOperation(); + clearMessage.setId(42L); + + byte[] encoded = STORE_OP_CODEC.encode(clearMessage); + ServerStoreOpMessage decodedMsg = (ServerStoreOpMessage) STORE_OP_CODEC.decode(clearMessage.getMessageType(), wrap(encoded)); + + assertThat(decodedMsg.getCacheId(), is("test")); + assertThat(decodedMsg.getId(), is(42L)); + assertThat(decodedMsg.getClientId(), is(CLIENT_ID)); + assertThat(decodedMsg.getMessageType(), is(EhcacheMessageType.CLEAR)); } @Test public void testClientInvalidationAckMessageCodec() throws Exception { - EhcacheEntityMessage invalidationAckMessage = MESSAGE_FACTORY.clientInvalidationAck(123); - byte[] encodedBytes = STORE_OP_CODEC.encode((ServerStoreOpMessage)invalidationAckMessage); - EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(encodedBytes); + ServerStoreOpMessage invalidationAckMessage = MESSAGE_FACTORY.clientInvalidationAck(123); + invalidationAckMessage.setId(456L); + + byte[] encoded = STORE_OP_CODEC.encode(invalidationAckMessage); + EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(invalidationAckMessage.getMessageType(), wrap(encoded)); ServerStoreOpMessage.ClientInvalidationAck decodedInvalidationAckMessage = (ServerStoreOpMessage.ClientInvalidationAck)decodedMsg; assertThat(decodedInvalidationAckMessage.getCacheId(), is("test")); assertThat(decodedInvalidationAckMessage.getInvalidationId(), is(123)); + assertThat(decodedInvalidationAckMessage.getId(), is(456L)); + assertThat(decodedInvalidationAckMessage.getMessageType(), is(EhcacheMessageType.CLIENT_INVALIDATION_ACK)); + try { + decodedInvalidationAckMessage.getClientId(); + fail("AssertionError expected"); + } catch (AssertionError error) { + assertThat(error.getMessage(), containsString("Client Id is not supported")); + } } } diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java index ab0782169e..3d972313cc 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java @@ -20,6 +20,7 @@ import org.junit.Test; import java.util.Collections; +import java.util.UUID; import static org.ehcache.clustered.common.internal.store.Util.createPayload; import static org.ehcache.clustered.common.internal.store.Util.getChain; @@ -27,44 +28,33 @@ import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNot.not; -/** - * @author Ludovic Orban - */ public class ServerStoreOpMessageTest { - @Test - public void testConcurrencyKeysEqualForSameCache() throws Exception { - ServerStoreOpMessage m1 = new ServerStoreOpMessage.ClearMessage("cache1"); - ServerStoreOpMessage m2 = new ServerStoreOpMessage.ClientInvalidationAck("cache1", 1); - - assertThat(m1.concurrencyKey(), is(m2.concurrencyKey())); - } + private static final UUID CLIENT_ID = UUID.randomUUID(); @Test public void testConcurrencyKeysEqualForSameCacheAndKey() throws Exception { - ServerStoreOpMessage m1 = new ServerStoreOpMessage.AppendMessage("cache1", 1L, createPayload(1L)); - ServerStoreOpMessage m2 = new ServerStoreOpMessage.GetAndAppendMessage("cache1", 1L, createPayload(1L)); - ServerStoreOpMessage m3 = new ServerStoreOpMessage.GetMessage("cache1", 1L); - ServerStoreOpMessage m4 = new ServerStoreOpMessage.ReplaceAtHeadMessage("cache1", 1L, getChain(Collections.emptyList()), getChain(Collections.emptyList())); + ConcurrentEntityMessage m1 = new ServerStoreOpMessage.AppendMessage("cache1", 1L, createPayload(1L), CLIENT_ID); + ConcurrentEntityMessage m2 = new ServerStoreOpMessage.GetAndAppendMessage("cache1", 1L, createPayload(1L), CLIENT_ID); + ConcurrentEntityMessage m3 = new ServerStoreOpMessage.ReplaceAtHeadMessage("cache1", 1L, getChain(Collections.emptyList()), getChain(Collections.emptyList()), CLIENT_ID); assertThat(m1.concurrencyKey(), is(m2.concurrencyKey())); assertThat(m2.concurrencyKey(), is(m3.concurrencyKey())); - assertThat(m3.concurrencyKey(), is(m4.concurrencyKey())); } @Test - public void testConcurrencyKeysNotEqualForDifferentCaches() throws Exception { - ServerStoreOpMessage m1 = new ServerStoreOpMessage.AppendMessage("cache1", 1L, createPayload(1L)); - ServerStoreOpMessage m2 = new ServerStoreOpMessage.GetAndAppendMessage("cache2", 1L, createPayload(1L)); + public void testConcurrencyKeysEqualForDifferentCachesSameKey() throws Exception { + ConcurrentEntityMessage m1 = new ServerStoreOpMessage.AppendMessage("cache1", 1L, createPayload(1L), CLIENT_ID); + ConcurrentEntityMessage m2 = new ServerStoreOpMessage.GetAndAppendMessage("cache2", 1L, createPayload(1L), CLIENT_ID); - assertThat(m1.concurrencyKey(), not(m2.concurrencyKey())); + assertThat(m1.concurrencyKey(), is(m2.concurrencyKey())); } @Test public void testConcurrencyKeysNotEqualForDifferentCachesAndKeys() throws Exception { - ServerStoreOpMessage m1 = new ServerStoreOpMessage.AppendMessage("cache1", 1L, createPayload(1L)); - ServerStoreOpMessage m2 = new ServerStoreOpMessage.GetAndAppendMessage("cache2", 1L, createPayload(1L)); - ServerStoreOpMessage m3 = new ServerStoreOpMessage.AppendMessage("cache1", 2L, createPayload(1L)); + ConcurrentEntityMessage m1 = new ServerStoreOpMessage.AppendMessage("cache1", 1L, createPayload(1L), CLIENT_ID); + ConcurrentEntityMessage m2 = new ServerStoreOpMessage.GetAndAppendMessage("cache2", 2L, createPayload(1L), CLIENT_ID); + ConcurrentEntityMessage m3 = new ServerStoreOpMessage.AppendMessage("cache1", 3L, createPayload(1L), CLIENT_ID); assertThat(m1.concurrencyKey(), not(m2.concurrencyKey())); assertThat(m1.concurrencyKey(), not(m3.concurrencyKey())); diff --git a/clustered/integration-test/build.gradle b/clustered/integration-test/build.gradle index cf833e2461..7827be1864 100644 --- a/clustered/integration-test/build.gradle +++ b/clustered/integration-test/build.gradle @@ -14,13 +14,32 @@ * limitations under the License. */ +import org.gradle.internal.jvm.Jvm + +sourceCompatibility = 1.8 +targetCompatibility = 1.8 + +configurations { + serverLibs +} + dependencies { testCompile project(':dist') testCompile project(':clustered:clustered-dist') + testCompile project(':management') + testCompile "org.terracotta.management.dist:management-client:$parent.managementVersion" + testCompile "com.fasterxml.jackson.core:jackson-databind:2.8.0" testCompile group:'org.terracotta', name:'galvan-support', version: galvanVersion - testCompile group:'com.google.code.tempus-fugit', name:'tempus-fugit', version:'1.1' + testCompile (group:'com.google.code.tempus-fugit', name:'tempus-fugit', version:'1.1') { + exclude group:'junit', module:'junit' + exclude group:'org.hamcrest', module:'hamcrest-core' + } testCompile group: 'javax.cache', name: 'cache-api', version: jcacheVersion + + serverLibs ("org.terracotta.management.dist:management-server:$parent.managementVersion") { + exclude group:'org.terracotta.management.dist', module:'management-common' + } } task unzipKit(type: Copy) { @@ -29,23 +48,25 @@ task unzipKit(type: Copy) { into 'build/ehcache-kit' } -def java8 = { - JavaVersion.current().isJava8Compatible() -} - compileTestJava { - options.fork = true; - options.forkOptions.executable = MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'javac') + options.forkOptions.executable = Jvm.current().javacExecutable } +sourceCompatibility = 1.8 +targetCompatibility = 1.8 + test { dependsOn unzipKit - executable = MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'java') - environment 'JAVA_HOME', MavenToolchain.javaHome(JavaVersion.VERSION_1_8) + executable = Jvm.current().javaExecutable + // If you want to see all mutations of the voltron monitoring tree, add to JAVA_OPTS: -Dorg.terracotta.management.service.monitoring.VoltronMonitoringService.DEBUG=true environment 'JAVA_OPTS', '-Dcom.tc.l2.lockmanager.greedy.locks.enabled=false' //If this directory does not exist, tests will fail with a cryptic assert failure systemProperty 'kitInstallationPath', "$unzipKit.destinationDir/${project(':clustered:clustered-dist').archivesBaseName}-$project.version-kit" + systemProperty 'managementPlugins', project.configurations.serverLibs.join(File.pathSeparator) // Uncomment to include client logging in console output // testLogging.showStandardStreams = true } +tasks.withType(JavaCompile) { + options.compilerArgs += ['-Werror'] +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java index 4fd8dcb7e2..2cffd52291 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java @@ -53,11 +53,11 @@ public class BasicClusteredCacheOpsTest { private static final String RESOURCE_CONFIG = - "" + "" + "" + "64" + "" + - "\n"; + "\n"; @ClassRule public static Cluster CLUSTER = diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicEntityInteractionTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicEntityInteractionTest.java index dccd6ee58f..8ec25861a6 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicEntityInteractionTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicEntityInteractionTest.java @@ -16,6 +16,7 @@ package org.ehcache.clustered; import java.io.File; +import java.util.Collections; import java.util.UUID; import org.ehcache.clustered.client.internal.EhcacheClientEntity; import org.junit.BeforeClass; @@ -35,8 +36,15 @@ public class BasicEntityInteractionTest { + private static final String RESOURCE_CONFIG = + "" + + "" + + "4" + + "" + + "\n"; + @ClassRule - public static Cluster CLUSTER = new BasicExternalCluster(new File("build/cluster"), 1); + public static Cluster CLUSTER = new BasicExternalCluster(new File("build/cluster"), 1, Collections.emptyList(), "", RESOURCE_CONFIG, ""); @BeforeClass public static void waitForActive() throws Exception { diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/CacheManagerLifecycleEhcacheIntegrationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/CacheManagerLifecycleEhcacheIntegrationTest.java index 14f4648a2a..ee1d81f9b1 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/CacheManagerLifecycleEhcacheIntegrationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/CacheManagerLifecycleEhcacheIntegrationTest.java @@ -60,11 +60,11 @@ public class CacheManagerLifecycleEhcacheIntegrationTest { private static final String RESOURCE_CONFIG = - "" + "" + "" + "64" + "" + - "\n"; + "\n"; @ClassRule public static Cluster CLUSTER = new BasicExternalCluster(new File("build/cluster"), 1, Collections.emptyList(), "", RESOURCE_CONFIG, ""); diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java index b937b7a34b..733a9ece8d 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java @@ -46,11 +46,11 @@ public class EhcacheClientEntityFactoryIntegrationTest { private static final Map EMPTY_RESOURCE_MAP = Collections.emptyMap(); private static final String RESOURCE_CONFIG = - "" + "" + "" + "64" + "" + - "\n"; + "\n"; @ClassRule public static Cluster CLUSTER = diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/JCacheClusteredTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/JCacheClusteredTest.java index f0c15f08f0..7b8eab483f 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/JCacheClusteredTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/JCacheClusteredTest.java @@ -39,11 +39,11 @@ public class JCacheClusteredTest { private static final String RESOURCE_CONFIG = - "" + "" + "" + "64" + "" + - "\n"; + "\n"; @ClassRule public static Cluster CLUSTER = new BasicExternalCluster(new File("build/cluster"), 1, Collections.emptyList(), "", RESOURCE_CONFIG, ""); diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/ResourcePoolAllocationFailureTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/ResourcePoolAllocationFailureTest.java new file mode 100644 index 0000000000..8d8604c9aa --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/ResourcePoolAllocationFailureTest.java @@ -0,0 +1,110 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered; + +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.ClusteredStoreConfiguration; +import org.ehcache.clustered.client.config.DedicatedClusteredResourcePool; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.config.builders.ServerSideConfigurationBuilder; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.internal.exceptions.InvalidServerStoreConfigurationException; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.terracotta.testing.rules.BasicExternalCluster; +import org.terracotta.testing.rules.Cluster; + +import java.io.File; +import java.util.Collections; + +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.startsWith; +import static org.junit.Assert.assertThat; +import static org.junit.Assert.fail; + +public class ResourcePoolAllocationFailureTest { + + private static final String RESOURCE_CONFIG = + "" + + "" + + "64" + + "" + + "\n"; + + @ClassRule + public static Cluster CLUSTER = + new BasicExternalCluster(new File("build/cluster"), 1, Collections.emptyList(), "", RESOURCE_CONFIG, ""); + + @BeforeClass + public static void waitForActive() throws Exception { + CLUSTER.getClusterControl().waitForActive(); + } + + @Test + public void testTooLowResourceException() throws InterruptedException { + + DedicatedClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(10, MemoryUnit.KB); + CacheManagerBuilder cacheManagerBuilder = getPersistentCacheManagerCacheManagerBuilder(resourcePool); + + try { + cacheManagerBuilder.build(true); + fail("InvalidServerStoreConfigurationException expected"); + } catch (Exception e) { + e.printStackTrace(); + assertThat(getRootCause(e), instanceOf(InvalidServerStoreConfigurationException.class)); + assertThat(getRootCause(e).getMessage(), startsWith("Failed to create ServerStore")); + } + resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(100, MemoryUnit.KB); + cacheManagerBuilder = getPersistentCacheManagerCacheManagerBuilder(resourcePool); + PersistentCacheManager persistentCacheManager = cacheManagerBuilder.build(true); + + assertThat(persistentCacheManager, notNullValue()); + persistentCacheManager.close(); + + } + + private CacheManagerBuilder getPersistentCacheManagerCacheManagerBuilder(DedicatedClusteredResourcePool resourcePool) { + + ClusteringServiceConfigurationBuilder clusteringServiceConfigurationBuilder = ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/crud-cm")); + ServerSideConfigurationBuilder serverSideConfigurationBuilder = clusteringServiceConfigurationBuilder.autoCreate() + .defaultServerResource("primary-server-resource"); + + return CacheManagerBuilder.newCacheManagerBuilder() + .with(serverSideConfigurationBuilder) + .withCache("test-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(resourcePool) + ).add(new ClusteredStoreConfiguration(Consistency.EVENTUAL))); + } + + private static Throwable getRootCause(Throwable e) { + Throwable current = e; + while (current.getCause() != null) { + current = current.getCause(); + } + return current; + } + + +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java index 8e25f4b825..0eea41355b 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java @@ -76,6 +76,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; +import static org.junit.Assume.assumeNoException; /** * Provides integration tests in which the server is terminated before the Ehcache operation completes. @@ -123,11 +124,11 @@ public static void setConcurrency() { } private static final String RESOURCE_CONFIG = - "" + "" + "" + "64" + "" + - "\n"; + "\n"; private static Map OLD_PROPERTIES; @@ -160,12 +161,21 @@ public static void restoreProperties() { } } + private static Cluster createCluster() { + try { + return new BasicExternalCluster(new File("build/cluster"), 1, Collections.emptyList(), "", RESOURCE_CONFIG, ""); + } catch (IllegalArgumentException e) { + assumeNoException(e); + return null; + } + } + @Rule public final TestName testName = new TestName(); // Included in 'ruleChain' below. - private final Cluster cluster = - new BasicExternalCluster(new File("build/cluster"), 1, Collections.emptyList(), "", RESOURCE_CONFIG, ""); + private final Cluster cluster = createCluster(); + // The TestRule.apply method is called on the inner-most Rule first with the result being passed to each // successively outer rule until the outer-most rule is reached. For ExternalResource rules, the before @@ -505,7 +515,7 @@ Void runTask() throws Exception { }.run(); fail("Expecting StoreAccessTimeoutException"); } catch (StoreAccessTimeoutException e) { - assertThat(e.getMessage(), containsString("Timeout exceeded for SERVER_STORE_OP#GET_AND_APPEND")); + assertThat(e.getMessage(), containsString("Timeout exceeded for GET_AND_APPEND")); } } @@ -541,7 +551,7 @@ String runTask() throws Exception { }.run(); fail("Expecting StoreAccessTimeoutException"); } catch (StoreAccessTimeoutException e) { - assertThat(e.getMessage(), containsString("Timeout exceeded for SERVER_STORE_OP#GET_AND_APPEND")); + assertThat(e.getMessage(), containsString("Timeout exceeded for GET_AND_APPEND")); } } @@ -578,7 +588,7 @@ Void runTask() throws Exception { }.run(); fail("Expecting StoreAccessTimeoutException"); } catch (StoreAccessTimeoutException e) { - assertThat(e.getMessage(), containsString("Timeout exceeded for SERVER_STORE_OP#GET_AND_APPEND")); + assertThat(e.getMessage(), containsString("Timeout exceeded for GET_AND_APPEND")); } } @@ -615,7 +625,7 @@ Void runTask() throws Exception { }.run(); fail("Expecting StoreAccessTimeoutException"); } catch (StoreAccessTimeoutException e) { - assertThat(e.getMessage(), containsString("Timeout exceeded for SERVER_STORE_OP#CLEAR")); + assertThat(e.getMessage(), containsString("Timeout exceeded for CLEAR")); } } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/lock/VoltronReadWriteLockPassiveIntegrationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/lock/VoltronReadWriteLockPassiveIntegrationTest.java index 322494082b..836507bc06 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/lock/VoltronReadWriteLockPassiveIntegrationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/lock/VoltronReadWriteLockPassiveIntegrationTest.java @@ -33,7 +33,6 @@ import static org.ehcache.clustered.lock.VoltronReadWriteLockIntegrationTest.async; import static org.junit.Assert.fail; -@Ignore public class VoltronReadWriteLockPassiveIntegrationTest { @ClassRule diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java new file mode 100644 index 0000000000..9a4a5b6a86 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java @@ -0,0 +1,295 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.management; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import org.ehcache.CacheManager; +import org.ehcache.Status; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.rules.Timeout; +import org.terracotta.connection.Connection; +import org.terracotta.management.entity.tms.TmsAgentConfig; +import org.terracotta.management.entity.tms.client.TmsAgentEntity; +import org.terracotta.management.entity.tms.client.TmsAgentEntityFactory; +import org.terracotta.management.entity.tms.client.TmsAgentService; +import org.terracotta.management.model.cluster.Client; +import org.terracotta.management.model.cluster.ClientIdentifier; +import org.terracotta.management.model.cluster.ServerEntityIdentifier; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.model.message.Message; +import org.terracotta.management.model.notification.ContextualNotification; +import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.registry.collect.StatisticConfiguration; +import org.terracotta.testing.rules.BasicExternalCluster; +import org.terracotta.testing.rules.Cluster; + +import java.io.File; +import java.io.FileNotFoundException; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Scanner; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static java.util.Arrays.asList; +import static java.util.concurrent.TimeUnit.SECONDS; +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredShared; +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.collection.IsCollectionWithSize.hasSize; +import static org.junit.Assert.assertThat; + +public abstract class AbstractClusteringManagementTest { + + private static final String RESOURCE_CONFIG = + "" + + "" + + "64" + + "64" + + "" + + "\n"; + + protected static CacheManager cacheManager; + protected static ClientIdentifier ehcacheClientIdentifier; + protected static ServerEntityIdentifier ehcacheServerEntityIdentifier; + protected static ObjectMapper mapper = new ObjectMapper(); + + protected static TmsAgentService tmsAgentService; + protected static ServerEntityIdentifier tmsServerEntityIdentifier; + + private static final List MANAGEMENT_PLUGINS = System.getProperty("managementPlugins") == null ? + Collections.emptyList() : + Stream.of(System.getProperty("managementPlugins").split(File.pathSeparator)) + .map(File::new) + .collect(Collectors.toList()); + + @ClassRule + public static Cluster CLUSTER = new BasicExternalCluster(new File("build/cluster"), 1, MANAGEMENT_PLUGINS, "", RESOURCE_CONFIG, ""); + + @BeforeClass + public static void beforeClass() throws Exception { + mapper.configure(SerializationFeature.INDENT_OUTPUT, true); + + CLUSTER.getClusterControl().waitForActive(); + + // simulate a TMS client + Connection managementConnection = CLUSTER.newConnection(); + TmsAgentEntityFactory entityFactory = new TmsAgentEntityFactory(managementConnection, AbstractClusteringManagementTest.class.getName()); + TmsAgentEntity tmsAgentEntity = entityFactory.retrieveOrCreate(new TmsAgentConfig() + .setStatisticConfiguration(new StatisticConfiguration( + 60, SECONDS, + 100, 1, SECONDS, + 10, SECONDS + ))); + tmsAgentService = new TmsAgentService(tmsAgentEntity); + tmsAgentService.setOperationTimeout(5, TimeUnit.SECONDS); + + tmsServerEntityIdentifier = readTopology() + .activeServerEntityStream() + .filter(serverEntity -> serverEntity.getType().equals(TmsAgentConfig.ENTITY_TYPE)) + .findFirst() + .get() // throws if not found + .getServerEntityIdentifier(); + + cacheManager = newCacheManagerBuilder() + // cluster config + .with(cluster(CLUSTER.getConnectionURI().resolve("/my-server-entity-1")) + .autoCreate() + .defaultServerResource("primary-server-resource") + .resourcePool("resource-pool-a", 28, MemoryUnit.MB, "secondary-server-resource") // <2> + .resourcePool("resource-pool-b", 16, MemoryUnit.MB)) // will take from primary-server-resource + // management config + .using(new DefaultManagementRegistryConfiguration() + .addTags("webapp-1", "server-node-1") + .setCacheManagerAlias("my-super-cache-manager") + .addConfiguration(new EhcacheStatisticsProviderConfiguration( + 1, TimeUnit.MINUTES, + 100, 1, TimeUnit.SECONDS, + 10, TimeUnit.SECONDS))) + // cache config + .withCache("dedicated-cache-1", newCacheConfigurationBuilder( + String.class, String.class, + newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .offheap(1, MemoryUnit.MB) + .with(clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))) + .build()) + .withCache("shared-cache-2", newCacheConfigurationBuilder( + String.class, String.class, + newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .offheap(1, MemoryUnit.MB) + .with(clusteredShared("resource-pool-a"))) + .build()) + .withCache("shared-cache-3", newCacheConfigurationBuilder( + String.class, String.class, + newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .offheap(1, MemoryUnit.MB) + .with(clusteredShared("resource-pool-b"))) + .build()) + .build(true); + + // ensure the CM is running and get its client id + assertThat(cacheManager.getStatus(), equalTo(Status.AVAILABLE)); + ehcacheClientIdentifier = readTopology().getClients().values() + .stream() + .filter(client -> client.getName().equals("Ehcache:my-server-entity-1")) + .findFirst() + .map(Client::getClientIdentifier) + .get(); + + ehcacheServerEntityIdentifier = readTopology() + .activeServerEntityStream() + .filter(serverEntity -> serverEntity.getName().equals("my-server-entity-1")) + .findFirst() + .get() // throws if not found + .getServerEntityIdentifier(); + + // test_notifs_sent_at_CM_init + List messages = readMessages(); + List notificationTypes = notificationTypes(messages); + + Map> counts = notificationTypes.stream().collect(Collectors.groupingBy(o -> o)); + assertThat(counts.keySet(), hasSize(12)); + assertThat(counts.get("CLIENT_CONNECTED"), hasSize(1)); + assertThat(counts.get("CLIENT_REGISTRY_AVAILABLE"), hasSize(1)); + assertThat(counts.get("CLIENT_TAGS_UPDATED"), hasSize(1)); + assertThat(counts.get("EHCACHE_CLIENT_VALIDATED"), hasSize(1)); + assertThat(counts.get("EHCACHE_RESOURCE_POOLS_CONFIGURED"), hasSize(1)); + assertThat(counts.get("EHCACHE_SERVER_STORE_CREATED"), hasSize(3)); + assertThat(counts.get("ENTITY_REGISTRY_AVAILABLE"), hasSize(2)); + assertThat(counts.get("ENTITY_REGISTRY_UPDATED"), hasSize(11)); + assertThat(counts.get("SERVER_ENTITY_CREATED"), hasSize(5)); + assertThat(counts.get("SERVER_ENTITY_DESTROYED"), hasSize(1)); + assertThat(counts.get("SERVER_ENTITY_FETCHED"), hasSize(7)); + assertThat(counts.get("SERVER_ENTITY_UNFETCHED"), hasSize(3)); + + assertThat(readMessages(), hasSize(0)); + + sendManagementCallOnEntityToCollectStats(); + } + + @AfterClass + public static void afterClass() throws Exception { + if (cacheManager != null && cacheManager.getStatus() == Status.AVAILABLE) { + cacheManager.close(); + } + } + + @Rule + public final Timeout globalTimeout = Timeout.seconds(60); + + @Before + public void init() throws Exception { + if (tmsAgentService != null) { + readMessages(); + } + } + + protected static org.terracotta.management.model.cluster.Cluster readTopology() throws Exception { + return tmsAgentService.readTopology(); + } + + protected static List readMessages() throws Exception { + return tmsAgentService.readMessages(); + } + + protected static void sendManagementCallOnClientToCollectStats(String... statNames) throws Exception { + Context ehcacheClient = readTopology().getClient(ehcacheClientIdentifier).get().getContext() + .with("cacheManagerName", "my-super-cache-manager"); + tmsAgentService.updateCollectedStatistics(ehcacheClient, "StatisticsCapability", asList(statNames)).waitForReturn(); + } + + protected static List waitForNextStats() throws Exception { + // uses the monitoring consumre entity to get the content of the stat buffer when some stats are collected + while (!Thread.currentThread().isInterrupted()) { + List messages = readMessages() + .stream() + .filter(message -> message.getType().equals("STATISTICS")) + .flatMap(message -> message.unwrap(ContextualStatistics.class).stream()) + .collect(Collectors.toList()); + if (messages.isEmpty()) { + Thread.yield(); + } else { + return messages; + } + } + return Collections.emptyList(); + } + + protected static List messageTypes(List messages) { + return messages.stream().map(Message::getType).collect(Collectors.toList()); + } + + protected static List notificationTypes(List messages) { + return messages + .stream() + .filter(message -> "NOTIFICATION".equals(message.getType())) + .flatMap(message -> message.unwrap(ContextualNotification.class).stream()) + .map(ContextualNotification::getType) + .collect(Collectors.toList()); + } + + protected static String read(String path) throws FileNotFoundException { + Scanner scanner = new Scanner(AbstractClusteringManagementTest.class.getResourceAsStream(path), "UTF-8"); + try { + return scanner.useDelimiter("\\A").next(); + } finally { + scanner.close(); + } + } + + protected static String normalizeForLineEndings(String stringToNormalize) { + return stringToNormalize.replace("\r\n", "\n").replace("\r", "\n"); + } + + private static void sendManagementCallOnEntityToCollectStats() throws Exception { + Context context = readTopology().getSingleStripe().getActiveServerEntity(tmsServerEntityIdentifier).get().getContext(); + tmsAgentService.updateCollectedStatistics(context, "PoolStatistics", asList("Pool:AllocatedSize")).waitForReturn(); + tmsAgentService.updateCollectedStatistics(context, "ServerStoreStatistics", asList( + "Store:AllocatedMemory", + "Store:DataAllocatedMemory", + "Store:OccupiedMemory", + "Store:DataOccupiedMemory", + "Store:Entries", + "Store:UsedSlotCount", + "Store:DataVitalMemory", + "Store:VitalMemory", + "Store:ReprobeLength", + "Store:RemovedSlotCount", + "Store:DataSize", + "Store:TableCapacity" + )).waitForReturn(); + tmsAgentService.updateCollectedStatistics(context, "OffHeapResourceStatistics", asList("OffHeapResource:AllocatedMemory")).waitForReturn(); + } + +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticRateTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticRateTest.java new file mode 100755 index 0000000000..9efbf323be --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticRateTest.java @@ -0,0 +1,132 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.management; + +import static org.hamcrest.CoreMatchers.is; + +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import org.ehcache.Cache; +import org.ehcache.management.config.DefaultStatisticsProviderConfiguration; +import org.ehcache.management.providers.statistics.EhcacheStatisticsProvider; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; +import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.model.stats.Sample; +import org.terracotta.management.model.stats.history.RateHistory; + + +public class ClusteredStatisticRateTest extends AbstractClusteringManagementTest { + + private static DefaultStatisticsProviderConfiguration config = new DefaultStatisticsProviderConfiguration(EhcacheStatisticsProvider.class); + private static double HIT_RATE; + private static double MISS_RATE; + + @BeforeClass + public static void initSeconds() { + long seconds; + switch (config.averageWindowUnit()) { + case SECONDS: + seconds = config.averageWindowDuration(); + HIT_RATE = 2.0d / (double)seconds; + MISS_RATE = 2.0d / (double)seconds; + break; + case MINUTES: + seconds = TimeUnit.MINUTES.toSeconds(config.averageWindowDuration()); + HIT_RATE = 2.0d / (double)seconds; + MISS_RATE = 2.0d / (double)seconds; + break; + case HOURS: + seconds = TimeUnit.HOURS.toSeconds(config.averageWindowDuration()); + HIT_RATE = 2.0d / (double)seconds; + MISS_RATE = 2.0d / (double)seconds; + default: + throw new IllegalArgumentException("invalid averageWindowUnit: " + config.averageWindowUnit() + " for unit test! You can add this TimeUnit if neccessary"); + } + } + + @Test + public void test() throws Exception { + + double cacheHitRate = 0d; + double clusteredHitRate = 0d; + double cacheMissRate = 0d; + double clusteredMissRate = 0d; + + sendManagementCallOnClientToCollectStats("Cache:HitRate", "Clustered:HitRate","Cache:MissRate","Clustered:MissRate"); + Thread.sleep(25000); + + Cache cache = cacheManager.getCache("dedicated-cache-1", String.class, String.class); + cache.put("one", "val1"); + cache.put("two", "val2"); + + cache.get("one"); //hit + cache.get("two"); //hit + + cache.get("three"); //miss + cache.get("four"); //miss + + + // it could be several seconds before the sampled stats could become available + // let's try until we find the correct values + do { + + // get the stats (we are getting the primitive counter, not the sample history) + List stats = waitForNextStats() + .stream() + .filter(statistics -> "dedicated-cache-1".equals(statistics.getContext().get("cacheName"))) + .collect(Collectors.toList());; + + for (ContextualStatistics stat : stats) { + if (stat.getContext().get("cacheName") != null && stat.getContext().get("cacheName").equals("dedicated-cache-1")) { + + //HIT stats + Sample[] samplesCacheHitRate = stat.getStatistic(RateHistory.class, "Cache:HitRate").getValue(); + if(samplesCacheHitRate.length > 0) { + cacheHitRate = samplesCacheHitRate[samplesCacheHitRate.length - 1].getValue(); + } + + Sample[] samplesClusteredHitRate = stat.getStatistic(RateHistory.class, "Clustered:HitRate").getValue(); + if(samplesClusteredHitRate.length > 0) { + clusteredHitRate = samplesClusteredHitRate[samplesClusteredHitRate.length - 1].getValue(); + } + + //MISS stats + Sample[] samplesCacheMissRate = stat.getStatistic(RateHistory.class, "Cache:MissRate").getValue(); + if(samplesCacheMissRate.length > 0) { + cacheMissRate = samplesCacheMissRate[samplesCacheMissRate.length - 1].getValue(); + } + + Sample[] samplesClusteredMissRate = stat.getStatistic(RateHistory.class, "Clustered:MissRate").getValue(); + if(samplesClusteredMissRate.length > 0) { + clusteredMissRate = samplesClusteredMissRate[samplesClusteredMissRate.length - 1].getValue(); + } + } + } + } while(!Thread.currentThread().isInterrupted() && + (cacheHitRate == 0d) && (clusteredHitRate == 0d) && + (cacheMissRate == 0d) && (clusteredMissRate == 0d)); + + Assert.assertThat(cacheHitRate,is(HIT_RATE)); + Assert.assertThat(clusteredHitRate,is(HIT_RATE)); + + Assert.assertThat(cacheMissRate,is(MISS_RATE)); + Assert.assertThat(clusteredMissRate,is(MISS_RATE)); + } + +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java new file mode 100755 index 0000000000..d495e429a1 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java @@ -0,0 +1,98 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.management; + +import static org.hamcrest.CoreMatchers.is; + +import java.util.List; +import org.ehcache.Cache; +import org.junit.Assert; +import org.junit.Test; +import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.model.stats.Sample; +import org.terracotta.management.model.stats.history.CounterHistory; + +public class ClusteredStatisticsCountTest extends AbstractClusteringManagementTest { + + private static final long CACHE_HIT_COUNT = 2L; + private static final long CLUSTERED_HIT_COUNT = 2L; + private static final long CACHE_MISS_COUNT = 2L; + private static final long CLUSTERED_MISS_COUNT = 2L; + + @Test + public void countTest() throws Exception { + sendManagementCallOnClientToCollectStats("Cache:HitCount","Clustered:HitCount","Cache:MissCount","Clustered:MissCount"); + + Cache cache = cacheManager.getCache("dedicated-cache-1", String.class, String.class); + cache.put("one", "val1"); + cache.put("two", "val2"); + + cache.get("one"); //hit + cache.get("two"); //hit + + cache.get("three"); //miss + cache.get("four"); //miss + + + long cacheHitCount = 0; + long clusteredHitCount = 0; + long cacheMissCount = 0; + long clusteredMissCount = 0; + + // it could be several seconds before the sampled stats could become available + // let's try until we find the correct values + do { + + // get the stats (we are getting the primitive counter, not the sample history) + List stats = waitForNextStats(); + for (ContextualStatistics stat : stats) { + if (stat.getContext().contains("cacheName") && stat.getContext().get("cacheName").equals("dedicated-cache-1")) { + + Sample[] samplesCacheHitCount = stat.getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); + if(samplesCacheHitCount.length > 0) { + cacheHitCount = samplesCacheHitCount[samplesCacheHitCount.length - 1].getValue(); + } + + Sample[] samplesClusteredHitCount = stat.getStatistic(CounterHistory.class, "Clustered:HitCount").getValue(); + if(samplesClusteredHitCount.length > 0) { + clusteredHitCount = samplesClusteredHitCount[samplesClusteredHitCount.length - 1].getValue(); + } + + Sample[] samplesClusteredMissCount = stat.getStatistic(CounterHistory.class, "Clustered:MissCount").getValue(); + if(samplesClusteredMissCount.length > 0) { + clusteredMissCount = samplesClusteredMissCount[samplesClusteredMissCount.length - 1].getValue(); + } + + Sample[] samplesCacheMissCount = stat.getStatistic(CounterHistory.class, "Cache:MissCount").getValue(); + if(samplesCacheMissCount.length > 0) { + cacheMissCount = samplesCacheMissCount[samplesCacheMissCount.length - 1].getValue(); + } + } + } + } while(!Thread.currentThread().isInterrupted() && + (cacheHitCount != CACHE_HIT_COUNT) && (clusteredHitCount != CLUSTERED_HIT_COUNT) && + (cacheMissCount != CACHE_MISS_COUNT) && (clusteredMissCount != CLUSTERED_MISS_COUNT)); + + Assert.assertThat(cacheHitCount,is(CACHE_HIT_COUNT)); + Assert.assertThat(clusteredHitCount,is(CLUSTERED_HIT_COUNT)); + Assert.assertThat(cacheMissCount,is(CACHE_MISS_COUNT)); + Assert.assertThat(clusteredMissCount,is(CLUSTERED_MISS_COUNT)); + + } + + + +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsLatencyTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsLatencyTest.java new file mode 100755 index 0000000000..de7eea3f68 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsLatencyTest.java @@ -0,0 +1,141 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.management; + + +import java.util.List; +import java.util.stream.Collectors; +import org.ehcache.Cache; +import org.hamcrest.Matchers; +import org.junit.Ignore; +import org.junit.Test; +import org.junit.Assert; +import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.model.stats.Sample; +import org.terracotta.management.model.stats.history.AverageHistory; +import org.terracotta.management.model.stats.history.DurationHistory; + +@Ignore +public class ClusteredStatisticsLatencyTest extends AbstractClusteringManagementTest { + + @Test + public void test() throws Exception { + + long cacheHitLatencyMin=0L; + long cacheHitLatencyMax=0L; + double cacheHitLatencyAvg=Double.NaN; + + long clusteredHitLatencyMin=0L; + long clusteredHitLatencyMax=0L; + double clusteredHitLatencyAvg=Double.NaN; + + long cacheMissLatencyMin=0L; + long cacheMissLatencyMax=0L; + double cacheMissLatencyAvg=Double.NaN; + + long clusteredMissLatencyMin=0L; + long clusteredMissLatencyMax=0L; + double clusteredMissLatencyAvg=Double.NaN; + + sendManagementCallOnClientToCollectStats("Cache:HitLatencyMinimum","Cache:HitLatencyMaximum","Cache:HitLatencyAverage", + "Clustered:HitLatencyMinimum","Clustered:HitLatencyMaximum","Clustered:HitLatencyAverage", + "Cache:MissLatencyMinimum","Cache:MissLatencyMaximum","Cache:MissLatencyAverage", + "Clustered:MissLatencyMinimum","Clustered:MissLatencyMaximum","Clustered:MissLatencyAverage"); + Thread.sleep(25000); + + Cache cache = cacheManager.getCache("dedicated-cache-1", String.class, String.class); + cache.put("one", "val1"); + cache.put("two", "val2"); + + cache.get("one"); //hit + cache.get("two"); //hit + + cache.get("three"); //miss + cache.get("four"); //miss + + + //It may take several seconds for the sampled stat values to be available and correct. + //In the meantime the default values will be available. + //Thus let's loop until the correct value we are expecting is available. + do { + + // get the stats (we are getting the primitive counter, not the sample history) + List stats = waitForNextStats() + .stream() + .filter(statistics -> "dedicated-cache-1".equals(statistics.getContext().get("cacheName"))) + .collect(Collectors.toList());; + + for (ContextualStatistics stat : stats) { + if (stat.getContext().get("cacheName") != null && stat.getContext().get("cacheName").equals("dedicated-cache-1")) { + + //Cache HIT stats + cacheHitLatencyMin = getDurationHistorySampleValue(stat, "Cache:HitLatencyMinimum"); + cacheHitLatencyMax = getDurationHistorySampleValue(stat, "Cache:HitLatencyMaximum"); + cacheHitLatencyAvg = getAverageHistorySampleValue(stat, "Cache:HitLatencyAverage"); + + //Clustered HIT stats + clusteredHitLatencyMin = getDurationHistorySampleValue(stat, "Clustered:HitLatencyMinimum"); + clusteredHitLatencyMax = getDurationHistorySampleValue(stat, "Clustered:HitLatencyMaximum"); + clusteredHitLatencyAvg = getAverageHistorySampleValue(stat, "Clustered:HitLatencyAverage"); + + //Cache MISS stats + cacheMissLatencyMin = getDurationHistorySampleValue(stat, "Cache:MissLatencyMinimum"); + cacheMissLatencyMax = getDurationHistorySampleValue(stat, "Cache:MissLatencyMaximum"); + cacheMissLatencyAvg = getAverageHistorySampleValue(stat, "Cache:MissLatencyAverage"); + + //Clustered MISS stats + clusteredMissLatencyMin = getDurationHistorySampleValue(stat, "Clustered:MissLatencyMinimum"); + clusteredMissLatencyMax = getDurationHistorySampleValue(stat, "Clustered:MissLatencyMaximum"); + clusteredMissLatencyAvg = getAverageHistorySampleValue(stat, "Clustered:MissLatencyAverage"); + + } + } + } while(!Thread.currentThread().isInterrupted() && + ((cacheHitLatencyMin == 0L) || (cacheHitLatencyMax == 0L) || Double.isNaN(cacheHitLatencyAvg) || + (clusteredHitLatencyMin == 0L) || (clusteredHitLatencyMax == 0L) || Double.isNaN(clusteredHitLatencyAvg) || + (cacheMissLatencyMin == 0L) || (cacheMissLatencyMax == 0L) || Double.isNaN(cacheMissLatencyAvg) || + (clusteredMissLatencyMin == 0L) || (clusteredMissLatencyMax == 0L) || Double.isNaN(clusteredMissLatencyAvg))); + + + Assert.assertThat((double)cacheHitLatencyMin,Matchers.lessThanOrEqualTo(cacheHitLatencyAvg)); + Assert.assertThat((double)cacheHitLatencyMax,Matchers.greaterThanOrEqualTo(cacheHitLatencyAvg)); + Assert.assertThat((double)clusteredHitLatencyMin,Matchers.lessThanOrEqualTo(clusteredHitLatencyAvg)); + Assert.assertThat((double)clusteredHitLatencyMax,Matchers.greaterThanOrEqualTo(clusteredHitLatencyAvg)); + Assert.assertThat((double)cacheMissLatencyMin,Matchers.lessThanOrEqualTo(cacheMissLatencyAvg)); + Assert.assertThat((double)cacheMissLatencyMax,Matchers.greaterThanOrEqualTo(cacheMissLatencyAvg)); + Assert.assertThat((double)clusteredMissLatencyMin,Matchers.lessThanOrEqualTo(clusteredMissLatencyAvg)); + Assert.assertThat((double)clusteredMissLatencyMax,Matchers.greaterThanOrEqualTo(clusteredMissLatencyAvg)); + } + + private static long getDurationHistorySampleValue(ContextualStatistics stat, String statName) { + Sample[] samplesCacheHitLatencyMin = stat.getStatistic(DurationHistory.class, statName).getValue(); + if(samplesCacheHitLatencyMin.length > 0 && samplesCacheHitLatencyMin[samplesCacheHitLatencyMin.length - 1].getValue() != null) { + return samplesCacheHitLatencyMin[samplesCacheHitLatencyMin.length - 1].getValue(); + } + + return 0L; + } + + private static double getAverageHistorySampleValue(ContextualStatistics stat, String statName) { + Sample[] samplesCacheHitLatencyAvg = stat.getStatistic(AverageHistory.class, statName).getValue(); + if(samplesCacheHitLatencyAvg.length > 0 && !Double.isNaN(samplesCacheHitLatencyAvg[samplesCacheHitLatencyAvg.length - 1].getValue())) { + return samplesCacheHitLatencyAvg[samplesCacheHitLatencyAvg.length - 1].getValue(); + } + + return Double.NaN; + } + +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsRatioTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsRatioTest.java new file mode 100755 index 0000000000..dd05c4207d --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsRatioTest.java @@ -0,0 +1,89 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.management; + +import org.ehcache.Cache; +import org.hamcrest.collection.IsArray; +import org.junit.Test; +import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.model.stats.Sample; +import org.terracotta.management.model.stats.history.RatioHistory; + +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.Matchers.array; +import static org.junit.Assert.assertThat; + +public class ClusteredStatisticsRatioTest extends AbstractClusteringManagementTest { + + @Test + public void ratioTest() throws Exception { + String[] statNames = {"Cache:HitRatio", "Clustered:HitRatio", "Cache:MissRatio", "Clustered:MissRatio"}; + sendManagementCallOnClientToCollectStats(statNames); + + // When testing ratios, we need to wait for the first computation (we do not have any choice) to happen because ratio depends on 2 other sampled statistics. + // If you do not wait, then you'll always get some NaN because the hits will be done within the 1st second, and the hits won't be done in the right "window". + // A ratio is computed by dividing a rate with another rate. See CompoundOperationImpl.ratioOf(). + // And a rate is computed with values aggregated into a EventRateSimpleMovingAverage. + // The call to EventRateSimpleMovingAverage.rateUsingSeconds() will return 0 during the fist second (until first computation did happen). + // So the hits must be after the first second so that values get accumulated into the partitions of EventRateSimpleMovingAverage. + + // Also, we have to take in consideration that in clustered, there is a collector that is scheduled at 75% of the TTD to collect and send stats. + // So the delay can be greater than just the duration of the first sampling. + Thread.sleep(25000); + + Cache cache = cacheManager.getCache("dedicated-cache-1", String.class, String.class); + cache.put("one", "val1"); + cache.put("two", "val2"); + + cache.get("one"); //hit + cache.get("two"); //hit + + cache.get("three"); //miss + cache.get("four"); //miss + + Double[] ratios = new Double[statNames.length]; + + // it could be several seconds before the sampled stats could become available + // let's try until we find the correct values + do { + + // get the stats (we are getting the primitive counter, not the sample history) + // only keep CM stats for the following checks + List stats = waitForNextStats() + .stream() + .filter(statistics -> "dedicated-cache-1".equals(statistics.getContext().get("cacheName"))) + .collect(Collectors.toList()); + + for (ContextualStatistics stat : stats) { + for (int i = 0; i < statNames.length; i++) { + String statName = statNames[i]; + Sample[] samples = stat.getStatistic(RatioHistory.class, statName).getValue(); + ratios[i] = samples.length > 0 ? samples[samples.length - 1].getValue() : 0d; + } + } + } while (!Thread.currentThread().isInterrupted() && !Arrays.equals(ratios, new Double[]{.5d, .5d, .5d, .5d})); + + @SuppressWarnings("unchecked") + IsArray array = array(equalTo(.5d), equalTo(.5d), equalTo(.5d), equalTo(.5d)); + assertThat(ratios, is(array)); + } + +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java new file mode 100644 index 0000000000..6113181093 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java @@ -0,0 +1,417 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.management; + +import org.ehcache.Cache; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.junit.BeforeClass; +import org.junit.FixMethodOrder; +import org.junit.Ignore; +import org.junit.Test; +import org.junit.runners.MethodSorters; +import org.terracotta.management.model.capabilities.Capability; +import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.model.capabilities.descriptors.Settings; +import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; +import org.terracotta.management.model.cluster.Cluster; +import org.terracotta.management.model.context.ContextContainer; +import org.terracotta.management.model.message.Message; +import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.model.stats.Sample; +import org.terracotta.management.model.stats.StatisticType; +import org.terracotta.management.model.stats.history.CounterHistory; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.TreeSet; +import java.util.stream.Collectors; + +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.collection.IsCollectionWithSize.hasSize; +import static org.junit.Assert.assertThat; + +@FixMethodOrder(MethodSorters.NAME_ASCENDING) +public class ClusteringManagementServiceTest extends AbstractClusteringManagementTest { + + private static final Collection ONHEAP_DESCRIPTORS = new ArrayList<>(); + private static final Collection OFFHEAP_DESCRIPTORS = new ArrayList<>(); + private static final Collection DISK_DESCRIPTORS = new ArrayList<>(); + private static final Collection CLUSTERED_DESCRIPTORS = new ArrayList<>(); + private static final Collection CACHE_DESCRIPTORS = new ArrayList<>(); + private static final Collection POOL_DESCRIPTORS = new ArrayList<>(); + private static final Collection SERVER_STORE_DESCRIPTORS = new ArrayList<>(); + private static final Collection OFFHEAP_RES_DESCRIPTORS = new ArrayList<>(); + + @Test + @Ignore("This is not a test, but something useful to show a json print of a cluster topology with all management metadata inside") + public void test_A_topology() throws Exception { + Cluster cluster = tmsAgentService.readTopology(); + String json = mapper.writeValueAsString(cluster.toMap()); + System.out.println(json); + } + + @Test + public void test_A_client_tags_exposed() throws Exception { + String[] tags = readTopology().getClient(ehcacheClientIdentifier).get().getTags().toArray(new String[0]); + assertThat(tags, equalTo(new String[]{"server-node-1", "webapp-1"})); + } + + @Test + public void test_B_client_contextContainer_exposed() throws Exception { + ContextContainer contextContainer = readTopology().getClient(ehcacheClientIdentifier).get().getManagementRegistry().get().getContextContainer(); + assertThat(contextContainer.getValue(), equalTo("my-super-cache-manager")); + Collection subContexts = contextContainer.getSubContexts(); + TreeSet cacheNames = subContexts.stream().map(ContextContainer::getValue).collect(Collectors.toCollection(TreeSet::new)); + assertThat(subContexts, hasSize(3)); + assertThat(cacheNames, hasSize(3)); + assertThat(cacheNames, equalTo(new TreeSet<>(Arrays.asList("dedicated-cache-1", "shared-cache-2", "shared-cache-3")))); + } + + @Test + public void test_C_client_capabilities_exposed() throws Exception { + Capability[] capabilities = readTopology().getClient(ehcacheClientIdentifier).get().getManagementRegistry().get().getCapabilities().toArray(new Capability[0]); + assertThat(capabilities.length, equalTo(5)); + assertThat(capabilities[0].getName(), equalTo("ActionsCapability")); + assertThat(capabilities[1].getName(), equalTo("ManagementAgentService")); + assertThat(capabilities[2].getName(), equalTo("SettingsCapability")); + assertThat(capabilities[3].getName(), equalTo("StatisticCollectorCapability")); + assertThat(capabilities[4].getName(), equalTo("StatisticsCapability")); + + assertThat(capabilities[0].getDescriptors(), hasSize(4)); + + Collection descriptors = capabilities[4].getDescriptors(); + Collection allDescriptors = new ArrayList<>(); + allDescriptors.addAll(CACHE_DESCRIPTORS); + allDescriptors.addAll(ONHEAP_DESCRIPTORS); + allDescriptors.addAll(OFFHEAP_DESCRIPTORS); + allDescriptors.addAll(CLUSTERED_DESCRIPTORS); + + assertThat(descriptors, containsInAnyOrder(allDescriptors.toArray())); + assertThat(descriptors, hasSize(allDescriptors.size())); + } + + @Test + public void test_D_server_capabilities_exposed() throws Exception { + Capability[] capabilities = readTopology().getSingleStripe().getActiveServerEntity(ehcacheServerEntityIdentifier).get().getManagementRegistry().get().getCapabilities().toArray(new Capability[0]); + + assertThat(capabilities.length, equalTo(5)); + + assertThat(capabilities[0].getName(), equalTo("ClientStateSettings")); + assertThat(capabilities[1].getName(), equalTo("PoolSettings")); + assertThat(capabilities[2].getName(), equalTo("PoolStatistics")); + assertThat(capabilities[3].getName(), equalTo("ServerStoreSettings")); + assertThat(capabilities[4].getName(), equalTo("ServerStoreStatistics")); + + assertThat(capabilities[3].getDescriptors(), hasSize(4)); // time descriptor + 3 dedicated store + + // stats + + assertThat(capabilities[4].getDescriptors(), containsInAnyOrder(SERVER_STORE_DESCRIPTORS.toArray())); + assertThat(capabilities[4].getDescriptors(), hasSize(SERVER_STORE_DESCRIPTORS.size())); + assertThat(capabilities[2].getDescriptors(), containsInAnyOrder(POOL_DESCRIPTORS.toArray())); + assertThat(capabilities[2].getDescriptors(), hasSize(POOL_DESCRIPTORS.size())); + + // ClientStateSettings + + assertThat(capabilities[0].getDescriptors(), hasSize(1)); + Settings settings = (Settings) capabilities[0].getDescriptors().iterator().next(); + assertThat(settings.get("attachedStores"), equalTo(new String[]{"dedicated-cache-1", "shared-cache-2", "shared-cache-3"})); + + // ServerStoreSettings + + List descriptors = new ArrayList<>(capabilities[1].getDescriptors()); + assertThat(descriptors, hasSize(4)); + + settings = (Settings) descriptors.get(0); + assertThat(settings.get("alias"), equalTo("resource-pool-b")); + assertThat(settings.get("type"), equalTo("Pool")); + assertThat(settings.get("serverResource"), equalTo("primary-server-resource")); + assertThat(settings.get("size"), equalTo(16 * 1024 * 1024L)); + assertThat(settings.get("allocationType"), equalTo("shared")); + + settings = (Settings) descriptors.get(1); + assertThat(settings.get("alias"), equalTo("resource-pool-a")); + assertThat(settings.get("type"), equalTo("Pool")); + assertThat(settings.get("serverResource"), equalTo("secondary-server-resource")); + assertThat(settings.get("size"), equalTo(28 * 1024 * 1024L)); + assertThat(settings.get("allocationType"), equalTo("shared")); + + settings = (Settings) descriptors.get(2); + assertThat(settings.get("alias"), equalTo("dedicated-cache-1")); + assertThat(settings.get("type"), equalTo("Pool")); + assertThat(settings.get("serverResource"), equalTo("primary-server-resource")); + assertThat(settings.get("size"), equalTo(4 * 1024 * 1024L)); + assertThat(settings.get("allocationType"), equalTo("dedicated")); + + settings = (Settings) descriptors.get(3); + assertThat(settings.get("type"), equalTo("PoolSettings")); + assertThat(settings.get("defaultServerResource"), equalTo("primary-server-resource")); + + // tms entity + + capabilities = readTopology().activeServerEntityStream().filter(serverEntity -> serverEntity.is(tmsServerEntityIdentifier)).findFirst().get().getManagementRegistry().get().getCapabilities().toArray(new Capability[0]); + assertThat(capabilities.length, equalTo(3)); + + assertThat(capabilities[0].getName(), equalTo("OffHeapResourceSettings")); + assertThat(capabilities[1].getName(), equalTo("OffHeapResourceStatistics")); + assertThat(capabilities[2].getName(), equalTo("StatisticCollectorCapability")); + + assertThat(capabilities[0].getDescriptors(), hasSize(3)); // time + 2 resources + + assertThat(capabilities[1].getDescriptors(), containsInAnyOrder(OFFHEAP_RES_DESCRIPTORS.toArray())); + assertThat(capabilities[1].getDescriptors(), hasSize(OFFHEAP_RES_DESCRIPTORS.size())); + } + + @Test + public void test_E_notifs_on_add_cache() throws Exception { + cacheManager.createCache("cache-2", newCacheConfigurationBuilder( + String.class, String.class, + newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .offheap(1, MemoryUnit.MB) + .with(clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) + .build()); + + ContextContainer contextContainer = readTopology().getClient(ehcacheClientIdentifier).get().getManagementRegistry().get().getContextContainer(); + assertThat(contextContainer.getSubContexts(), hasSize(4)); + + TreeSet cNames = contextContainer.getSubContexts().stream().map(ContextContainer::getValue).collect(Collectors.toCollection(TreeSet::new)); + assertThat(cNames, equalTo(new TreeSet<>(Arrays.asList("cache-2", "dedicated-cache-1", "shared-cache-2", "shared-cache-3")))); + + List messages = readMessages(); + assertThat(notificationTypes(messages), equalTo(Arrays.asList( + "ENTITY_REGISTRY_UPDATED", "EHCACHE_SERVER_STORE_CREATED", "ENTITY_REGISTRY_UPDATED", + "CLIENT_REGISTRY_UPDATED", "CACHE_ADDED"))); + assertThat(readMessages(), hasSize(0)); + } + + @Test + public void test_F_notifs_on_remove_cache() throws Exception { + cacheManager.removeCache("cache-2"); + + List messages = readMessages(); + assertThat(notificationTypes(messages), equalTo(Arrays.asList("CLIENT_REGISTRY_UPDATED", "CACHE_REMOVED", "ENTITY_REGISTRY_UPDATED"))); + assertThat(readMessages(), hasSize(0)); + } + + @Test + public void test_G_stats_collection() throws Exception { + + sendManagementCallOnClientToCollectStats("Cache:HitCount"); + + Cache cache1 = cacheManager.getCache("dedicated-cache-1", String.class, String.class); + cache1.put("key1", "val"); + cache1.put("key2", "val"); + + cache1.get("key1"); // hit + cache1.get("key2"); // hit + + List allStats = new ArrayList<>(); + long val = 0; + + // it could be several seconds before the sampled stats could become available + // let's try until we find the correct value : 2 + do { + + // get the stats (we are getting the primitive counter, not the sample history) + List stats = waitForNextStats(); + allStats.addAll(stats); + + // only keep CM stats for the following checks + stats = stats.stream() + .filter(statistics -> "dedicated-cache-1".equals(statistics.getContext().get("cacheName"))) + .collect(Collectors.toList()); + + for (ContextualStatistics stat : stats) { + Sample[] samples = stat.getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); + if(samples.length > 0) { + val = samples[samples.length - 1].getValue(); + } + } + } while(!Thread.currentThread().isInterrupted() && val != 2); + + // do some other operations + cache1.get("key1"); + cache1.get("key2"); + + do { + + List stats = waitForNextStats(); + allStats.addAll(stats); + // only keep CM stats for the following checks + stats = stats.stream() + .filter(statistics -> "dedicated-cache-1".equals(statistics.getContext().get("cacheName"))) + .collect(Collectors.toList()); + + for (ContextualStatistics stat : stats) { + Sample[] samples = stat.getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); + if(samples.length > 0) { + val = samples[samples.length - 1].getValue(); + } + } + + } while(!Thread.currentThread().isInterrupted() && val != 4); + + // wait until we have some stats coming from the server entity + while (!Thread.currentThread().isInterrupted() && !allStats.stream().filter(statistics -> statistics.getContext().contains("consumerId")).findFirst().isPresent()) { + allStats.addAll(waitForNextStats()); + } + List serverStats = allStats.stream().filter(statistics -> statistics.getContext().contains("consumerId")).collect(Collectors.toList()); + + // server-side stats + + assertThat( + serverStats.stream() + .map(ContextualStatistics::getCapability) + .collect(Collectors.toCollection(TreeSet::new)), + equalTo(new TreeSet<>(Arrays.asList("PoolStatistics", "ServerStoreStatistics", "OffHeapResourceStatistics")))); + + // ensure we collect stats from all registered objects (pools and stores) + + assertThat( + serverStats.stream() + .filter(statistics -> statistics.getCapability().equals("PoolStatistics")) + .map(statistics -> statistics.getContext().get("alias")) + .collect(Collectors.toSet()), + equalTo(new HashSet<>(Arrays.asList("resource-pool-b", "resource-pool-a", "dedicated-cache-1", "cache-2")))); + + assertThat( + serverStats.stream() + .filter(statistics -> statistics.getCapability().equals("ServerStoreStatistics")) + .map(statistics -> statistics.getContext().get("alias")) + .collect(Collectors.toSet()), + equalTo(new HashSet<>(Arrays.asList("shared-cache-3", "shared-cache-2", "dedicated-cache-1", "cache-2")))); + + assertThat( + serverStats.stream() + .filter(statistics -> statistics.getCapability().equals("OffHeapResourceStatistics")) + .map(statistics -> statistics.getContext().get("alias")) + .collect(Collectors.toSet()), + equalTo(new HashSet<>(Arrays.asList("primary-server-resource", "secondary-server-resource")))); + + // ensure we collect all the stat names + + assertThat( + serverStats.stream() + .filter(statistics -> statistics.getCapability().equals("PoolStatistics")) + .flatMap(statistics -> statistics.getStatistics().keySet().stream()) + .collect(Collectors.toSet()), + equalTo(POOL_DESCRIPTORS.stream().map(StatisticDescriptor::getName).collect(Collectors.toSet()))); + + assertThat( + serverStats.stream() + .filter(statistics -> statistics.getCapability().equals("ServerStoreStatistics")) + .flatMap(statistics -> statistics.getStatistics().keySet().stream()) + .collect(Collectors.toSet()), + equalTo(SERVER_STORE_DESCRIPTORS.stream().map(StatisticDescriptor::getName).collect(Collectors.toSet()))); + + assertThat( + serverStats.stream() + .filter(statistics -> statistics.getCapability().equals("OffHeapResourceStatistics")) + .flatMap(statistics -> statistics.getStatistics().keySet().stream()) + .collect(Collectors.toSet()), + equalTo(OFFHEAP_RES_DESCRIPTORS.stream().map(StatisticDescriptor::getName).collect(Collectors.toSet()))); + } + + @BeforeClass + public static void initDescriptors() throws ClassNotFoundException { + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionCount" , StatisticType.COUNTER_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissCount" , StatisticType.COUNTER_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionRate" , StatisticType.RATE_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitRatio" , StatisticType.RATIO_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissRatio" , StatisticType.RATIO_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MappingCount" , StatisticType.COUNTER_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:OccupiedByteSize", StatisticType.SIZE_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissRate" , StatisticType.RATE_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitCount" , StatisticType.COUNTER_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitRate" , StatisticType.RATE_HISTORY)); + + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionRate", StatisticType.RATE_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissRate", StatisticType.RATE_HISTORY)); + + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:OccupiedByteSize", StatisticType.SIZE_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:AllocatedByteSize", StatisticType.SIZE_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MappingCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRate", StatisticType.RATE_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRatio", StatisticType.RATIO_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissRatio", StatisticType.RATIO_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MaxMappingCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitCount", StatisticType.COUNTER_HISTORY)); + + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MaxMappingCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitRate", StatisticType.RATE_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:OccupiedByteSize", StatisticType.SIZE_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionRate", StatisticType.RATE_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:AllocatedByteSize", StatisticType.SIZE_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitRatio", StatisticType.RATIO_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissRatio", StatisticType.RATIO_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MappingCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissRate", StatisticType.RATE_HISTORY)); + + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissCount", StatisticType.COUNTER_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitCount", StatisticType.COUNTER_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MaxMappingCount", StatisticType.COUNTER_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitRate", StatisticType.RATE_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:EvictionCount", StatisticType.COUNTER_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissRate", StatisticType.RATE_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:OccupiedByteSize", StatisticType.SIZE_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitRatio", StatisticType.RATIO_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissRatio", StatisticType.RATIO_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:AllocatedByteSize", StatisticType.SIZE_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MappingCount", StatisticType.COUNTER_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:EvictionRate", StatisticType.RATE_HISTORY)); + + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitRate", StatisticType.RATE_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitCount", StatisticType.COUNTER_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitRatio", StatisticType.RATIO_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearRate", StatisticType.RATE_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissRate", StatisticType.RATE_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearCount", StatisticType.COUNTER_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissCount", StatisticType.COUNTER_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissRatio", StatisticType.RATIO_HISTORY)); + + POOL_DESCRIPTORS.add(new StatisticDescriptor("Pool:AllocatedSize", StatisticType.SIZE_HISTORY)); + + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:AllocatedMemory", StatisticType.SIZE_HISTORY)); + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:DataAllocatedMemory", StatisticType.SIZE_HISTORY)); + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:OccupiedMemory", StatisticType.SIZE_HISTORY)); + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:DataOccupiedMemory", StatisticType.SIZE_HISTORY)); + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:Entries", StatisticType.COUNTER_HISTORY)); + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:UsedSlotCount", StatisticType.COUNTER_HISTORY)); + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:DataVitalMemory", StatisticType.SIZE_HISTORY)); + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:VitalMemory", StatisticType.SIZE_HISTORY)); + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:ReprobeLength", StatisticType.SIZE_HISTORY)); + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:RemovedSlotCount", StatisticType.COUNTER_HISTORY)); + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:DataSize", StatisticType.SIZE_HISTORY)); + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:TableCapacity", StatisticType.SIZE_HISTORY)); + + OFFHEAP_RES_DESCRIPTORS.add(new StatisticDescriptor("OffHeapResource:AllocatedMemory", StatisticType.SIZE_HISTORY)); + } + +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheConfigWithManagementTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheConfigWithManagementTest.java new file mode 100644 index 0000000000..40a31887ac --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheConfigWithManagementTest.java @@ -0,0 +1,109 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.management; + +import org.ehcache.CacheManager; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.terracotta.testing.rules.BasicExternalCluster; +import org.terracotta.testing.rules.Cluster; + +import java.io.File; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredShared; +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; + +public class EhcacheConfigWithManagementTest { + + private static final String RESOURCE_CONFIG = + "" + + "" + + "64" + + "64" + + "" + + "\n"; + + private static final List MANAGEMENT_PLUGINS = Stream.of(System.getProperty("managementPlugins", "").split(File.pathSeparator)) + .map(File::new) + .collect(Collectors.toList()); + + @ClassRule + public static Cluster CLUSTER = new BasicExternalCluster(new File("build/cluster"), 1, MANAGEMENT_PLUGINS, "", RESOURCE_CONFIG, ""); + + @BeforeClass + public static void beforeClass() throws Exception { + CLUSTER.getClusterControl().waitForActive(); + } + + @Test + public void create_cache_manager() throws Exception { + CacheManager cacheManager = newCacheManagerBuilder() + // cluster config + .with(cluster(CLUSTER.getConnectionURI().resolve("/my-server-entity-3")) + .autoCreate() + .defaultServerResource("primary-server-resource") + .resourcePool("resource-pool-a", 28, MemoryUnit.MB, "secondary-server-resource") // <2> + .resourcePool("resource-pool-b", 16, MemoryUnit.MB)) // will take from primary-server-resource + // management config + .using(new DefaultManagementRegistryConfiguration() + .addTags("webapp-1", "server-node-1") + .setCacheManagerAlias("my-super-cache-manager") + .addConfiguration(new EhcacheStatisticsProviderConfiguration( + 1, TimeUnit.MINUTES, + 100, 1, TimeUnit.SECONDS, + 2, TimeUnit.SECONDS))) // TTD reduce to 2 seconds so that the stat collector runs faster + // cache config + .withCache("dedicated-cache-1", newCacheConfigurationBuilder( + String.class, String.class, + newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .offheap(1, MemoryUnit.MB) + .with(clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))) + .build()) + .withCache("shared-cache-2", newCacheConfigurationBuilder( + String.class, String.class, + newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .offheap(1, MemoryUnit.MB) + .with(clusteredShared("resource-pool-a"))) + .build()) + .withCache("shared-cache-3", newCacheConfigurationBuilder( + String.class, String.class, + newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .offheap(1, MemoryUnit.MB) + .with(clusteredShared("resource-pool-b"))) + .build()) + .build(true); + + cacheManager.close(); + } + +} diff --git a/management/src/test/java/org/ehcache/core/EhcacheManagerToStringTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheManagerToStringTest.java similarity index 53% rename from management/src/test/java/org/ehcache/core/EhcacheManagerToStringTest.java rename to clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheManagerToStringTest.java index 7126a17d34..a3269aca78 100644 --- a/management/src/test/java/org/ehcache/core/EhcacheManagerToStringTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheManagerToStringTest.java @@ -14,43 +14,26 @@ * limitations under the License. */ -package org.ehcache.core; +package org.ehcache.clustered.management; import org.ehcache.CacheManager; -import org.ehcache.Status; import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; -import org.ehcache.clustered.client.internal.EhcacheClientEntityService; -import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockEntityClientService; -import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; -import org.ehcache.clustered.server.EhcacheServerEntityService; import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.config.builders.WriteBehindConfigurationBuilder; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.HumanReadable; import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.BeforeClass; import org.junit.Test; -import org.terracotta.management.entity.management.client.ManagementAgentEntityClientService; -import org.terracotta.management.entity.management.server.ManagementAgentEntityServerService; -import org.terracotta.management.service.monitoring.IMonitoringConsumer; -import org.terracotta.offheapresource.OffHeapResourcesConfiguration; -import org.terracotta.offheapresource.OffHeapResourcesProvider; -import org.terracotta.offheapresource.config.OffheapResourcesType; -import org.terracotta.offheapresource.config.ResourceType; -import org.terracotta.passthrough.PassthroughClusterControl; -import org.terracotta.passthrough.PassthroughServer; import java.io.File; import java.io.FileNotFoundException; -import java.math.BigInteger; import java.net.URI; import java.util.Map; import java.util.Scanner; @@ -59,11 +42,9 @@ import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.junit.Assert.*; -public class EhcacheManagerToStringTest { - - static IMonitoringConsumer consumer; - static PassthroughClusterControl stripeControl; +public class EhcacheManagerToStringTest extends AbstractClusteringManagementTest { @Test public void simpleOnHeapToString() throws Exception { @@ -93,23 +74,28 @@ public boolean adviseAgainstEviction(String key, String value) { .build()) .build(true); - String actual = ((HumanReadable) cacheManager.getRuntimeConfiguration()).readableString(); - String expected = read("/simpleConfiguration.txt"); - - // only testing part of the string, to avoid collections ordering clashes - Assert.assertThat( - actual.substring(actual.indexOf("resourcePools")).replace(" ", "").replace("\n", "").replaceAll("\\\\|/", "|"), - equalTo( - expected.substring(expected.indexOf("resourcePools")).replace(" ", "").replace("\n", "").replaceAll("\\\\|/", "|") - ) - ); + try { + String actual = normalizeForLineEndings(((HumanReadable) cacheManager.getRuntimeConfiguration()).readableString()); + String expected = normalizeForLineEndings(read("/simpleConfiguration.txt")); + + // only testing part of the string, to avoid collections ordering clashes + assertThat( + actual.substring(actual.indexOf("resourcePools")).replace(" ", "").replace("\n", "").replaceAll("\\\\|/", "|"), + equalTo( + expected.substring(expected.indexOf("resourcePools")).replace(" ", "").replace("\n", "").replaceAll("\\\\|/", "|") + ) + ); + } finally { + cacheManager.close(); + } } @Test public void clusteredToString() throws Exception { + URI uri = CLUSTER.getConnectionURI().resolve("/my-server-entity-2"); CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() // cluster config - .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("passthrough://server-1:9510/my-server-entity-1")) + .with(ClusteringServiceConfigurationBuilder.cluster(uri) .autoCreate() .defaultServerResource("primary-server-resource")) // management config @@ -126,68 +112,30 @@ public void clusteredToString() throws Exception { newResourcePoolsBuilder() .heap(10, EntryUnit.ENTRIES) .offheap(1, MemoryUnit.MB) - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB))) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) .build()) .build(true); - String actual = ((HumanReadable) cacheManager.getRuntimeConfiguration()).readableString(); - String expected = read("/clusteredConfiguration.txt"); - - System.out.println(actual); - - // only testing part of the string, to avoid collections ordering clashes - Assert.assertThat( - actual.substring(actual.indexOf("resourcePools")).replace(" ", "").replace("\n", ""), - equalTo( - expected.substring(expected.indexOf("resourcePools")).replace(" ", "").replace("\n", "") - ) - ); - - Assert.assertThat(actual.indexOf("serviceConfigurations: None"), greaterThan(1)); - Assert.assertThat(actual.indexOf("evictionAdvisor: None"), greaterThan(1)); - - if (cacheManager != null && cacheManager.getStatus() == Status.AVAILABLE) { + try { + String actual = normalizeForLineEndings(((HumanReadable) cacheManager.getRuntimeConfiguration()).readableString()); + String expected = normalizeForLineEndings(read("/clusteredConfiguration.txt")); + + // only testing part of the string, to avoid collections ordering clashes + assertThat( + actual.substring(actual.indexOf("resourcePools")).replace(" ", "").replace("\n", ""), + equalTo( + expected.substring(expected.indexOf("resourcePools")).replace(" ", "").replace("\n", "").replace("server-1:9510", uri.getAuthority()) + ) + ); + + assertThat(actual.indexOf("serviceConfigurations: None"), greaterThan(1)); + assertThat(actual.indexOf("evictionAdvisor: None"), greaterThan(1)); + } finally { cacheManager.close(); } } - @BeforeClass - public static void beforeClass() throws Exception { - PassthroughServer activeServer = new PassthroughServer(); - activeServer.setServerName("server-1"); - activeServer.setBindPort(9510); - activeServer.setGroupPort(9610); - - // management agent entity - activeServer.registerServerEntityService(new ManagementAgentEntityServerService()); - activeServer.registerClientEntityService(new ManagementAgentEntityClientService()); - - // ehcache entity - activeServer.registerServerEntityService(new EhcacheServerEntityService()); - activeServer.registerClientEntityService(new EhcacheClientEntityService()); - - // RW lock entity (required by ehcache) - activeServer.registerServerEntityService(new VoltronReadWriteLockServerEntityService()); - activeServer.registerClientEntityService(new VoltronReadWriteLockEntityClientService()); - - // off-heap service - OffheapResourcesType offheapResourcesType = new OffheapResourcesType(); - ResourceType resourceType = new ResourceType(); - resourceType.setName("primary-server-resource"); - resourceType.setUnit(org.terracotta.offheapresource.config.MemoryUnit.MB); - resourceType.setValue(BigInteger.TEN); - offheapResourcesType.getResource().add(resourceType); - activeServer.registerServiceProvider(new OffHeapResourcesProvider(), new OffHeapResourcesConfiguration(offheapResourcesType)); - - stripeControl = new PassthroughClusterControl("server-1", activeServer); - } - - @AfterClass - public static void afterClass() throws Exception { - stripeControl.tearDown(); - } - public static class SampleLoaderWriter implements CacheLoaderWriter { @Override @@ -221,13 +169,4 @@ public void deleteAll(Iterable keys) throws Exception { } } - private String read(String path) throws FileNotFoundException { - Scanner scanner = new Scanner(getClass().getResourceAsStream(path), "UTF-8"); - try { - return scanner.useDelimiter("\\A").next(); - } finally { - scanner.close(); - } - } - } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java new file mode 100644 index 0000000000..a7cd8dca36 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java @@ -0,0 +1,277 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.replication; + +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteredStoreConfigurationBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameter; +import org.junit.runners.Parameterized.Parameters; +import org.terracotta.testing.rules.BasicExternalCluster; +import org.terracotta.testing.rules.Cluster; + +import java.io.File; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; + +import static java.util.Arrays.asList; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.assertThat; + +/** + * This test asserts Active-Passive fail-over with + * multi-threaded/multi-client scenarios. + * Note that fail-over is happening while client threads are still writing + * Finally the same key set correctness is asserted. + */ +@RunWith(Parameterized.class) +public class BasicClusteredCacheOpsReplicationMultiThreadedTest { + + private static final int NUM_OF_THREADS = 10; + private static final int JOB_SIZE = 100; + private static final String RESOURCE_CONFIG = + "" + + "" + + "16" + + "" + + "\n"; + + private static PersistentCacheManager CACHE_MANAGER1; + private static PersistentCacheManager CACHE_MANAGER2; + private static Cache CACHE1; + private static Cache CACHE2; + + @Parameters(name = "consistency={0}") + public static Consistency[] data() { + return Consistency.values(); + } + + @Parameter + public Consistency cacheConsistency; + + @ClassRule + public static Cluster CLUSTER = + new BasicExternalCluster(new File("build/cluster"), 2, Collections.emptyList(), "", RESOURCE_CONFIG, ""); + + @Before + public void startServers() throws Exception { + CLUSTER.getClusterControl().startAllServers(); + CLUSTER.getClusterControl().waitForActive(); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + final CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/crud-cm-replication")) + .autoCreate() + .defaultServerResource("primary-server-resource")); + CACHE_MANAGER1 = clusteredCacheManagerBuilder.build(true); + CACHE_MANAGER2 = clusteredCacheManagerBuilder.build(true); + CacheConfiguration config = CacheConfigurationBuilder + .newCacheConfigurationBuilder(Long.class, BlobValue.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(500, EntryUnit.ENTRIES) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))) + .add(ClusteredStoreConfigurationBuilder.withConsistency(cacheConsistency)) + .build(); + + CACHE1 = CACHE_MANAGER1.createCache("clustered-cache", config); + CACHE2 = CACHE_MANAGER2.createCache("clustered-cache", config); + } + + @After + public void tearDown() throws Exception { + CACHE_MANAGER1.close(); + CACHE_MANAGER2.close(); + CACHE_MANAGER2.destroy(); + } + + @Test(timeout=180000) + public void testCRUD() throws Exception { + List> caches = new ArrayList<>(); + caches.add(CACHE1); + caches.add(CACHE2); + Random random = new Random(); + Set universalSet = Collections.newSetFromMap(new ConcurrentHashMap<>()); + + ExecutorService executorService = Executors.newWorkStealingPool(NUM_OF_THREADS); + + List futures = new ArrayList<>(); + + caches.forEach(cache -> { + for (int i = 0; i < NUM_OF_THREADS; i++) { + futures.add(executorService.submit(() -> random.longs().limit(JOB_SIZE).forEach(x -> { + cache.put(x, new BlobValue()); + universalSet.add(x); + }))); + } + }); + + //This step is to add values in local tier randomly to test invalidations happen correctly + futures.add(executorService.submit(() -> universalSet.forEach(x -> { + CACHE1.get(x); + CACHE2.get(x); + }))); + + CLUSTER.getClusterControl().terminateActive(); + + for (Future f : futures ) { + f.get(); + } + + Set readKeysByCache1AfterFailOver = new HashSet<>(); + Set readKeysByCache2AfterFailOver = new HashSet<>(); + universalSet.forEach(x -> { + if (CACHE1.get(x) != null) { + readKeysByCache1AfterFailOver.add(x); + } + if (CACHE2.get(x) != null) { + readKeysByCache2AfterFailOver.add(x); + } + }); + + assertThat(readKeysByCache2AfterFailOver.size(), equalTo(readKeysByCache1AfterFailOver.size())); + + readKeysByCache2AfterFailOver.stream().forEach(y -> assertThat(readKeysByCache1AfterFailOver.contains(y), is(true))); + + } + + @Test(timeout=180000) + public void testBulkOps() throws Exception { + List> caches = new ArrayList<>(); + caches.add(CACHE1); + caches.add(CACHE2); + Random random = new Random(); + Set universalSet = Collections.newSetFromMap(new ConcurrentHashMap<>()); + + ExecutorService executorService = Executors.newWorkStealingPool(NUM_OF_THREADS); + + List futures = new ArrayList<>(); + + caches.forEach(cache -> { + for (int i = 0; i < NUM_OF_THREADS; i++) { + Map map = random.longs().limit(JOB_SIZE).collect(HashMap::new, (hashMap, x) -> hashMap.put(x, new BlobValue()), HashMap::putAll); + futures.add(executorService.submit(() -> { + cache.putAll(map); + universalSet.addAll(map.keySet()); + })); + } + }); + + //This step is to add values in local tier randomly to test invalidations happen correctly + futures.add(executorService.submit(() -> { + universalSet.forEach(x -> { + CACHE1.get(x); + CACHE2.get(x); + }); + })); + + CLUSTER.getClusterControl().terminateActive(); + + for (Future f : futures ) { + f.get(); + } + + Set readKeysByCache1AfterFailOver = new HashSet<>(); + Set readKeysByCache2AfterFailOver = new HashSet<>(); + universalSet.forEach(x -> { + if (CACHE1.get(x) != null) { + readKeysByCache1AfterFailOver.add(x); + } + if (CACHE2.get(x) != null) { + readKeysByCache2AfterFailOver.add(x); + } + }); + + assertThat(readKeysByCache2AfterFailOver.size(), equalTo(readKeysByCache1AfterFailOver.size())); + + readKeysByCache2AfterFailOver.stream().forEach(y -> assertThat(readKeysByCache1AfterFailOver.contains(y), is(true))); + + } + + @Test(timeout=180000) + public void testClear() throws Exception { + List> caches = new ArrayList<>(); + caches.add(CACHE1); + caches.add(CACHE2); + Random random = new Random(); + Set universalSet = Collections.newSetFromMap(new ConcurrentHashMap<>()); + + ExecutorService executorService = Executors.newWorkStealingPool(NUM_OF_THREADS); + + List futures = new ArrayList<>(); + + caches.forEach(cache -> { + for (int i = 0; i < NUM_OF_THREADS; i++) { + Map map = random.longs().limit(JOB_SIZE).collect(HashMap::new, (hashMap, x) -> hashMap.put(x, new BlobValue()), HashMap::putAll); + futures.add(executorService.submit(() -> { + cache.putAll(map); + universalSet.addAll(map.keySet()); + })); + } + }); + + for (Future f : futures ) { + f.get(); + } + + universalSet.forEach(x -> { + CACHE1.get(x); + CACHE2.get(x); + }); + + Future clearFuture = executorService.submit(() -> CACHE1.clear()); + + CLUSTER.getClusterControl().terminateActive(); + + clearFuture.get(); + + universalSet.forEach(x -> assertThat(CACHE2.get(x), nullValue())); + + } + + private static class BlobValue implements Serializable { + private final byte[] data = new byte[10 * 1024]; + } + +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java new file mode 100644 index 0000000000..8002f649b2 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java @@ -0,0 +1,226 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.replication; + +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteredStoreConfigurationBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameter; +import org.junit.runners.Parameterized.Parameters; +import org.terracotta.testing.rules.BasicExternalCluster; +import org.terracotta.testing.rules.Cluster; + +import java.io.File; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static java.util.Arrays.asList; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.assertThat; + +@RunWith(Parameterized.class) +public class BasicClusteredCacheOpsReplicationTest { + + private static final String RESOURCE_CONFIG = + "" + + "" + + "16" + + "" + + "\n"; + + private static PersistentCacheManager CACHE_MANAGER; + private static Cache CACHE1; + private static Cache CACHE2; + + @Parameters(name = "consistency={0}") + public static Consistency[] data() { + return Consistency.values(); + } + + @Parameter + public Consistency cacheConsistency; + + @ClassRule + public static Cluster CLUSTER = + new BasicExternalCluster(new File("build/cluster"), 2, Collections.emptyList(), "", RESOURCE_CONFIG, ""); + + @Before + public void startServers() throws Exception { + CLUSTER.getClusterControl().startAllServers(); + CLUSTER.getClusterControl().waitForActive(); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + final CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/cm-replication")) + .autoCreate() + .defaultServerResource("primary-server-resource")); + CACHE_MANAGER = clusteredCacheManagerBuilder.build(true); + CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(100, EntryUnit.ENTRIES) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))) + .add(ClusteredStoreConfigurationBuilder.withConsistency(cacheConsistency)) + .build(); + + CACHE1 = CACHE_MANAGER.createCache("clustered-cache", config); + CACHE2 = CACHE_MANAGER.createCache("another-cache", config); + } + + @After + public void tearDown() throws Exception { + CACHE_MANAGER.close(); + CACHE_MANAGER.destroy(); + } + + @Test + public void testCRUD() throws Exception { + List> caches = new ArrayList<>(); + caches.add(CACHE1); + caches.add(CACHE2); + caches.forEach(x -> { + x.put(1L, "The one"); + x.put(2L, "The two"); + x.put(1L, "Another one"); + x.put(3L, "The three"); + x.put(4L, "The four"); + assertThat(x.get(1L), equalTo("Another one")); + assertThat(x.get(2L), equalTo("The two")); + assertThat(x.get(3L), equalTo("The three")); + x.remove(4L); + }); + + CLUSTER.getClusterControl().terminateActive(); + + caches.forEach(x -> { + assertThat(x.get(1L), equalTo("Another one")); + assertThat(x.get(2L), equalTo("The two")); + assertThat(x.get(3L), equalTo("The three")); + assertThat(x.get(4L), nullValue()); + }); + } + + @Test + public void testBulkOps() throws Exception { + List> caches = new ArrayList<>(); + caches.add(CACHE1); + caches.add(CACHE2); + + Map entriesMap = new HashMap<>(); + entriesMap.put(1L, "one"); + entriesMap.put(2L, "two"); + entriesMap.put(3L, "three"); + entriesMap.put(4L, "four"); + entriesMap.put(5L, "five"); + entriesMap.put(6L, "six"); + caches.forEach(cache -> cache.putAll(entriesMap)); + + CLUSTER.getClusterControl().terminateActive(); + + Set keySet = entriesMap.keySet(); + caches.forEach(cache -> { + Map all = cache.getAll(keySet); + assertThat(all.get(1L), is("one")); + assertThat(all.get(2L), is("two")); + assertThat(all.get(3L), is("three")); + assertThat(all.get(4L), is("four")); + assertThat(all.get(5L), is("five")); + assertThat(all.get(6L), is("six")); + }); + + } + + @Test + public void testCAS() throws Exception { + List> caches = new ArrayList<>(); + caches.add(CACHE1); + caches.add(CACHE2); + caches.forEach(cache -> { + assertThat(cache.putIfAbsent(1L, "one"), nullValue()); + assertThat(cache.putIfAbsent(2L, "two"), nullValue()); + assertThat(cache.putIfAbsent(3L, "three"), nullValue()); + assertThat(cache.replace(3L, "another one", "yet another one"), is(false)); + }); + + CLUSTER.getClusterControl().terminateActive(); + + caches.forEach(cache -> { + assertThat(cache.putIfAbsent(1L, "another one"), is("one")); + assertThat(cache.remove(2L, "not two"), is(false)); + assertThat(cache.replace(3L, "three", "another three"), is(true)); + assertThat(cache.replace(2L, "new two"), is("two")); + }); + } + + @Test + public void testClear() throws Exception { + + List> caches = new ArrayList<>(); + caches.add(CACHE1); + caches.add(CACHE2); + + Map entriesMap = new HashMap<>(); + entriesMap.put(1L, "one"); + entriesMap.put(2L, "two"); + entriesMap.put(3L, "three"); + entriesMap.put(4L, "four"); + entriesMap.put(5L, "five"); + entriesMap.put(6L, "six"); + caches.forEach(cache -> cache.putAll(entriesMap)); + + Set keySet = entriesMap.keySet(); + caches.forEach(cache -> { + Map all = cache.getAll(keySet); + assertThat(all.get(1L), is("one")); + assertThat(all.get(2L), is("two")); + assertThat(all.get(3L), is("three")); + assertThat(all.get(4L), is("four")); + assertThat(all.get(5L), is("five")); + assertThat(all.get(6L), is("six")); + }); + + CACHE1.clear(); + CACHE2.clear(); + + CLUSTER.getClusterControl().terminateActive(); + + keySet.forEach(x -> assertThat(CACHE1.get(x), nullValue())); + keySet.forEach(x -> assertThat(CACHE2.get(x), nullValue())); + + } + +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMultipleClientsTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMultipleClientsTest.java new file mode 100644 index 0000000000..d7f58afae2 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMultipleClientsTest.java @@ -0,0 +1,224 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.replication; + +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteredStoreConfigurationBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameter; +import org.junit.runners.Parameterized.Parameters; +import org.terracotta.testing.rules.BasicExternalCluster; +import org.terracotta.testing.rules.Cluster; + +import java.io.File; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import java.util.stream.LongStream; + +import static java.util.Arrays.asList; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.assertThat; + +/** + * The point of this test is to assert proper data read after fail-over handling. + */ +@RunWith(Parameterized.class) +public class BasicClusteredCacheOpsReplicationWithMultipleClientsTest { + + private static final String RESOURCE_CONFIG = + "" + + "" + + "16" + + "" + + "\n"; + + private static PersistentCacheManager CACHE_MANAGER1; + private static PersistentCacheManager CACHE_MANAGER2; + private static Cache CACHE1; + private static Cache CACHE2; + + @Parameters(name = "consistency={0}") + public static Consistency[] data() { + return Consistency.values(); + } + + @Parameter + public Consistency cacheConsistency; + + @ClassRule + public static Cluster CLUSTER = + new BasicExternalCluster(new File("build/cluster"), 2, Collections.emptyList(), "", RESOURCE_CONFIG, ""); + + @Before + public void startServers() throws Exception { + CLUSTER.getClusterControl().startAllServers(); + CLUSTER.getClusterControl().waitForActive(); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + final CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/crud-cm-replication")) + .autoCreate() + .defaultServerResource("primary-server-resource")); + CACHE_MANAGER1 = clusteredCacheManagerBuilder.build(true); + CACHE_MANAGER2 = clusteredCacheManagerBuilder.build(true); + CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, BlobValue.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(500, EntryUnit.ENTRIES) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))) + .add(ClusteredStoreConfigurationBuilder.withConsistency(cacheConsistency)) + .build(); + + CACHE1 = CACHE_MANAGER1.createCache("clustered-cache", config); + CACHE2 = CACHE_MANAGER2.createCache("clustered-cache", config); + } + + @After + public void tearDown() throws Exception { + CACHE_MANAGER1.close(); + CACHE_MANAGER2.close(); + CACHE_MANAGER2.destroy(); + } + + @Test(timeout=180000) + public void testCRUD() throws Exception { + Random random = new Random(); + LongStream longStream = random.longs(1000); + Set added = new HashSet<>(); + longStream.forEach(x -> { + CACHE1.put(x, new BlobValue()); + added.add(x); + }); + + Set readKeysByCache2BeforeFailOver = new HashSet<>(); + added.forEach(x -> { + if (CACHE2.get(x) != null) { + readKeysByCache2BeforeFailOver.add(x); + } + }); + + CLUSTER.getClusterControl().terminateActive(); + + Set readKeysByCache1AfterFailOver = new HashSet<>(); + added.forEach(x -> { + if (CACHE1.get(x) != null) { + readKeysByCache1AfterFailOver.add(x); + } + }); + + assertThat(readKeysByCache2BeforeFailOver.size(), greaterThanOrEqualTo(readKeysByCache1AfterFailOver.size())); + + readKeysByCache1AfterFailOver.stream().filter(readKeysByCache2BeforeFailOver::contains).forEach(y -> assertThat(CACHE2.get(y), notNullValue())); + + } + + @Test(timeout=180000) + public void testBulkOps() throws Exception { + List> caches = new ArrayList<>(); + caches.add(CACHE1); + caches.add(CACHE2); + + Map entriesMap = new HashMap<>(); + + Random random = new Random(); + LongStream longStream = random.longs(1000); + + longStream.forEach(x -> entriesMap.put(x, new BlobValue())); + caches.forEach(cache -> cache.putAll(entriesMap)); + + Set keySet = entriesMap.keySet(); + + Set readKeysByCache2BeforeFailOver = new HashSet<>(); + keySet.forEach(x -> { + if (CACHE2.get(x) != null) { + readKeysByCache2BeforeFailOver.add(x); + } + }); + + CLUSTER.getClusterControl().terminateActive(); + + Set readKeysByCache1AfterFailOver = new HashSet<>(); + keySet.forEach(x -> { + if (CACHE1.get(x) != null) { + readKeysByCache1AfterFailOver.add(x); + } + }); + + assertThat(readKeysByCache2BeforeFailOver.size(), greaterThanOrEqualTo(readKeysByCache1AfterFailOver.size())); + + readKeysByCache1AfterFailOver.stream().filter(readKeysByCache2BeforeFailOver::contains).forEach(y -> assertThat(CACHE2.get(y), notNullValue())); + + } + + @Test(timeout=180000) + public void testClear() throws Exception { + List> caches = new ArrayList<>(); + caches.add(CACHE1); + caches.add(CACHE2); + + Map entriesMap = new HashMap<>(); + + Random random = new Random(); + LongStream longStream = random.longs(1000); + + longStream.forEach(x -> entriesMap.put(x, new BlobValue())); + caches.forEach(cache -> cache.putAll(entriesMap)); + + Set keySet = entriesMap.keySet(); + + Set readKeysByCache2BeforeFailOver = new HashSet<>(); + keySet.forEach(x -> { + if (CACHE2.get(x) != null) { + readKeysByCache2BeforeFailOver.add(x); + } + }); + + CACHE1.clear(); + + CLUSTER.getClusterControl().terminateActive(); + + readKeysByCache2BeforeFailOver.forEach(x -> assertThat(CACHE2.get(x), nullValue())); + + } + + private static class BlobValue implements Serializable { + private final byte[] data = new byte[10 * 1024]; + } +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicLifeCyclePassiveReplicationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java similarity index 56% rename from clustered/integration-test/src/test/java/org/ehcache/clustered/BasicLifeCyclePassiveReplicationTest.java rename to clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java index 04cce2b581..efb71f5c44 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicLifeCyclePassiveReplicationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java @@ -14,26 +14,35 @@ * limitations under the License. */ -package org.ehcache.clustered; +package org.ehcache.clustered.replication; +import org.ehcache.CachePersistenceException; +import org.ehcache.PersistentCacheManager; import org.ehcache.clustered.client.config.ClusteredResourcePool; import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; import org.ehcache.clustered.client.internal.EhcacheClientEntity; +import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLock; import org.ehcache.clustered.client.internal.service.ClusteredTierCreationException; import org.ehcache.clustered.client.internal.service.ClusteredTierDestructionException; import org.ehcache.clustered.client.internal.service.ClusteredTierManagerConfigurationException; +import org.ehcache.clustered.client.internal.service.ClusteredTierManagerValidationException; import org.ehcache.clustered.client.internal.service.ClusteringServiceFactory; import org.ehcache.clustered.client.service.ClusteringService; import org.ehcache.clustered.common.Consistency; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.exceptions.InvalidStoreException; import org.ehcache.clustered.common.internal.exceptions.InvalidStoreManagerException; +import org.ehcache.clustered.common.internal.exceptions.LifecycleException; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.impl.serialization.CompactJavaSerializer; +import org.ehcache.spi.service.MaintainableService; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; +import org.junit.Ignore; import org.junit.Test; import org.terracotta.testing.rules.BasicExternalCluster; import org.terracotta.testing.rules.Cluster; @@ -42,20 +51,27 @@ import java.lang.reflect.Field; import java.util.Collections; +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.ehcache.config.units.MemoryUnit.MB; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; public class BasicLifeCyclePassiveReplicationTest { private static final String RESOURCE_CONFIG = - "" + "" + "" + "16" + "" + - "\n"; + "\n"; @ClassRule public static Cluster CLUSTER = @@ -63,6 +79,7 @@ public class BasicLifeCyclePassiveReplicationTest { @Before public void startServers() throws Exception { + CLUSTER.getClusterControl().startAllServers(); CLUSTER.getClusterControl().waitForActive(); CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); } @@ -70,7 +87,6 @@ public void startServers() throws Exception { @After public void tearDown() throws Exception { CLUSTER.getClusterControl().terminateActive(); - CLUSTER.getClusterControl().startAllServers(); } @Test @@ -100,7 +116,7 @@ public void testCreateCacheReplication() throws Exception { } service.stop(); - + cleanUpCluster(service); } @Test @@ -133,7 +149,7 @@ public void testDestroyCacheReplication() throws Exception { } service.stop(); - + cleanUpCluster(service); } @Test @@ -160,8 +176,109 @@ public void testConfigureReplication() throws Exception { } service.stop(); + cleanUpCluster(service); } + @Test + public void testValidateReplication() throws Exception { + ClusteringServiceConfiguration configuration = + ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI()) + .autoCreate() + .build(); + + ClusteringService service = new ClusteringServiceFactory().create(configuration); + + service.start(null); + + EhcacheClientEntity clientEntity = getEntity(service); + + CLUSTER.getClusterControl().terminateActive(); + + try { + clientEntity.validate(configuration.getServerConfiguration()); + fail("LifecycleException Expected."); + } catch (ClusteredTierManagerValidationException e) { + assertThat(e.getCause(), instanceOf(LifecycleException.class)); + assertThat(e.getCause().getMessage(), containsString("is already being tracked with Client Id")); + } + + service.stop(); + cleanUpCluster(service); + } + + @Test + public void testDestroyServerStoreIsNotReplicatedIfFailsOnActive() throws Exception { + ClusteringServiceConfiguration configuration = + ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI()) + .autoCreate() + .build(); + + ClusteringService service1 = new ClusteringServiceFactory().create(configuration); + + ClusteringService service2 = new ClusteringServiceFactory().create(configuration); + + service1.start(null); + service2.start(null); + + EhcacheClientEntity clientEntity1 = getEntity(service1); + EhcacheClientEntity clientEntity2 = getEntity(service2); + + clientEntity1.createCache("testCache", getServerStoreConfiguration("primary-server-resource")); + clientEntity2.validateCache("testCache", getServerStoreConfiguration("primary-server-resource")); + + clientEntity1.releaseCache("testCache"); + try { + clientEntity1.destroyCache("testCache"); + fail("ClusteredTierDestructionException Expected"); + } catch (ClusteredTierDestructionException e) { + //nothing to do + } + + CLUSTER.getClusterControl().terminateActive(); + + clientEntity2.releaseCache("testCache"); + clientEntity2.destroyCache("testCache"); + + service1.stop(); + service2.stop(); + cleanUpCluster(service1); + } + + @Test + public void testDestroyCacheManager() throws Exception { + CacheManagerBuilder configBuilder = newCacheManagerBuilder().with(cluster(CLUSTER.getConnectionURI().resolve("/destroy-CM")) + .autoCreate().defaultServerResource("primary-server-resource")); + PersistentCacheManager cacheManager1 = configBuilder.build(true); + PersistentCacheManager cacheManager2 = configBuilder.build(true); + + cacheManager2.close(); + + try { + cacheManager2.destroy(); + fail("Exception expected"); + } catch (Exception e) { + e.printStackTrace(); + } + + CLUSTER.getClusterControl().terminateActive(); + CLUSTER.getClusterControl().waitForActive(); + + cacheManager1.createCache("test", newCacheConfigurationBuilder(Long.class, String.class, heap(10).with(clusteredDedicated(10, MB)))); + } + + @Test + public void testDestroyLockEntity() throws Exception { + VoltronReadWriteLock lock1 = new VoltronReadWriteLock(CLUSTER.newConnection(), "my-lock"); + VoltronReadWriteLock.Hold hold1 = lock1.tryReadLock(); + + VoltronReadWriteLock lock2 = new VoltronReadWriteLock(CLUSTER.newConnection(), "my-lock"); + assertThat(lock2.tryWriteLock(), nullValue()); + + CLUSTER.getClusterControl().terminateActive(); + CLUSTER.getClusterControl().waitForActive(); + + hold1.unlock(); + } private static EhcacheClientEntity getEntity(ClusteringService clusteringService) throws NoSuchFieldException, IllegalAccessException { Field entity = clusteringService.getClass().getDeclaredField("entity"); @@ -169,6 +286,12 @@ private static EhcacheClientEntity getEntity(ClusteringService clusteringService return (EhcacheClientEntity)entity.get(clusteringService); } + private void cleanUpCluster(ClusteringService service) throws CachePersistenceException { + service.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); + service.destroyAll(); + service.stop(); + } + private static ServerStoreConfiguration getServerStoreConfiguration(String resourceName) { ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(resourceName, 4, MB); return new ServerStoreConfiguration(resourcePool.getPoolAllocation(), diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java new file mode 100644 index 0000000000..a6da1f2a9a --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java @@ -0,0 +1,160 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.sync; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Ignore; +import org.junit.Test; +import org.terracotta.testing.rules.BasicExternalCluster; +import org.terracotta.testing.rules.Cluster; + +import com.google.code.tempusfugit.temporal.Timeout; + +import java.io.File; +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicBoolean; + +import static com.google.code.tempusfugit.temporal.Duration.seconds; +import static com.google.code.tempusfugit.temporal.Timeout.timeout; +import static com.google.code.tempusfugit.temporal.WaitFor.waitOrTimeout; +import static org.hamcrest.Matchers.equalTo; +import static org.junit.Assert.assertThat; + +public class PassiveSyncTest { + private static final String RESOURCE_CONFIG = + "" + + "" + + "16" + + "" + + "\n"; + + @ClassRule + public static Cluster CLUSTER = + new BasicExternalCluster(new File("build/cluster"), 2, Collections.emptyList(), "", RESOURCE_CONFIG, ""); + + @Before + public void startServers() throws Exception { + CLUSTER.getClusterControl().startAllServers(); + CLUSTER.getClusterControl().waitForActive(); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + } + + @Test(timeout = 150000) + public void testSync() throws Exception { + CLUSTER.getClusterControl().terminateOnePassive(); + + final CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/op-sync")) + .autoCreate() + .defaultServerResource("primary-server-resource")); + final PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); + cacheManager.init(); + + try { + CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB))).build(); + + Cache cache = cacheManager.createCache("clustered-cache", config); + + for (long i = -5; i < 5; i++) { + cache.put(i, "value" + i); + } + + CLUSTER.getClusterControl().startOneServer(); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + CLUSTER.getClusterControl().terminateActive(); + CLUSTER.getClusterControl().waitForActive(); + + // Sometimes the new passive believes there is a second connection and we have to wait for the full reconnect window before getting a result + waitOrTimeout(() -> "value-5".equals(cache.get(-5L)), timeout(seconds(130))); + + for (long i = -4; i < 5; i++) { + assertThat(cache.get(i), equalTo("value" + i)); + } + } finally { + cacheManager.close(); + } + } + + @Ignore + @Test + public void testLifeCycleOperationsOnSync() throws Exception { + CLUSTER.getClusterControl().terminateOnePassive(); + + final CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/lifecycle-sync")) + .autoCreate() + .defaultServerResource("primary-server-resource")); + final PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true); + + try { + CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB))).build(); + + final Cache cache = cacheManager.createCache("clustered-cache", config); + + for (long i = 0; i < 100; i++) { + cache.put(i, "value" + i); + } + + final CountDownLatch latch = new CountDownLatch(1); + final AtomicBoolean complete = new AtomicBoolean(false); + Thread lifeCycleThread = new Thread(new Runnable() { + @Override + public void run() { + while (!complete.get()) { + try { + latch.await(); + clusteredCacheManagerBuilder.build(true); + Thread.sleep(200); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } + }); + lifeCycleThread.start(); + CLUSTER.getClusterControl().startOneServer(); + latch.countDown(); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + CLUSTER.getClusterControl().terminateActive(); + complete.set(true); + + for (long i = 0; i < 100; i++) { + assertThat(cache.get(i), equalTo("value" + i)); + } + } finally { + cacheManager.close(); + } + } +} diff --git a/management/src/test/resources/clusteredConfiguration.txt b/clustered/integration-test/src/test/resources/clusteredConfiguration.txt similarity index 78% rename from management/src/test/resources/clusteredConfiguration.txt rename to clustered/integration-test/src/test/resources/clusteredConfiguration.txt index 7e5a0f0cb4..240f5ea51e 100644 --- a/management/src/test/resources/clusteredConfiguration.txt +++ b/clustered/integration-test/src/test/resources/clusteredConfiguration.txt @@ -14,11 +14,11 @@ caches: size: 1 MB tierHeight: 1000 clustered-dedicated: - size: 1 MB (persistent) + size: 2 MB (persistent) tierHeight: 10 services: - org.ehcache.clustered.client.config.ClusteringServiceConfiguration: - clusterUri: passthrough://server-1:9510/my-server-entity-1 - readOperationTimeout: TimeoutDuration{5 SECONDS} + clusterUri: terracotta://server-1:9510/my-server-entity-2 + readOperationTimeout: TimeoutDuration{20 SECONDS} autoCreate: true - - org.ehcache.management.registry.DefaultManagementRegistryConfiguration \ No newline at end of file + - org.ehcache.management.registry.DefaultManagementRegistryConfiguration diff --git a/management/src/test/resources/simpleConfiguration.txt b/clustered/integration-test/src/test/resources/simpleConfiguration.txt similarity index 81% rename from management/src/test/resources/simpleConfiguration.txt rename to clustered/integration-test/src/test/resources/simpleConfiguration.txt index 449a883f16..09765c1a48 100644 --- a/management/src/test/resources/simpleConfiguration.txt +++ b/clustered/integration-test/src/test/resources/simpleConfiguration.txt @@ -5,7 +5,7 @@ caches: serviceConfigurations: - org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration - org.ehcache.impl.config.loaderwriter.writebehind.DefaultWriteBehindConfiguration - evictionAdvisor: org.ehcache.core.EhcacheManagerToStringTest$1 + evictionAdvisor: org.ehcache.clustered.management.EhcacheManagerToStringTest$1 expiry: NoExpiry resourcePools: pools: @@ -15,9 +15,9 @@ caches: offheap: size: 1 MB tierHeight: 1000 - disk: + disk: size: 2 MB (persistent) tierHeight: 100 services: - org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration: - rootDirectory: build/tmp/EhcacheManagerToStringTest \ No newline at end of file + rootDirectory: build/tmp/EhcacheManagerToStringTest diff --git a/clustered/integration-test/src/test/resources/simplelogger.properties b/clustered/integration-test/src/test/resources/simplelogger.properties new file mode 100644 index 0000000000..f6142f6c45 --- /dev/null +++ b/clustered/integration-test/src/test/resources/simplelogger.properties @@ -0,0 +1,18 @@ +# +# Copyright Terracotta, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +org.slf4j.simpleLogger.showDateTime=true +org.slf4j.simpleLogger.dateTimeFormat=yyyy-MM-dd HH:mm:ss.SSS diff --git a/clustered/server/build.gradle b/clustered/server/build.gradle index c7f9df290b..3bbba7ed2c 100644 --- a/clustered/server/build.gradle +++ b/clustered/server/build.gradle @@ -14,27 +14,41 @@ * limitations under the License. */ +import org.gradle.internal.jvm.Jvm + apply plugin: EhDeploy -configurations { - provided -} +sourceCompatibility = 1.8 +targetCompatibility = 1.8 dependencies { - compile project(':clustered:common') - compile group: 'org.terracotta', name: 'offheap-resource', version: parent.offheapResourceVersion + compile project(':clustered:common'), "org.slf4j:slf4j-api:$parent.slf4jVersion" + compile("org.terracotta:offheap-resource:$parent.offheapResourceVersion") { + transitive = false + } compile group: 'org.terracotta', name: 'offheap-store', version: parent.offheapVersion compile group: 'org.slf4j', name: 'slf4j-api', version: parent.slf4jVersion + compile("org.terracotta.management:monitoring-service-api:$parent.managementVersion") { + transitive = false + } + compile"org.terracotta.management.dist:management-common:$parent.managementVersion" provided "org.terracotta:entity-server-api:$parent.entityApiVersion" provided "org.terracotta:standard-cluster-services:$parent.terracottaApisVersion" + provided "org.terracotta:runnel:$parent.terracottaPlatformVersion" +} + +compileJava { + options.forkOptions.executable = Jvm.current().javacExecutable +} + +compileTestJava { + options.forkOptions.executable = Jvm.current().javacExecutable +} + +test { + executable = Jvm.current().javaExecutable } -sourceSets { - main { - compileClasspath += configurations.provided - } - test { - compileClasspath += configurations.provided - runtimeClasspath += configurations.provided - } +tasks.withType(JavaCompile) { + options.compilerArgs += ['-Werror'] } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockPassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockPassiveEntity.java index bcb32fc9e7..98451e0ad1 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockPassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockPassiveEntity.java @@ -47,9 +47,6 @@ public void endSyncConcurrencyKey(int concurrencyKey) {} @Override public void createNew() {} - @Override - public void loadExisting() {} - @Override public void destroy() {} } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockServerEntityService.java b/clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockServerEntityService.java index 90a8b1ac49..5ae6adc23a 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockServerEntityService.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockServerEntityService.java @@ -21,6 +21,7 @@ import org.ehcache.clustered.common.internal.lock.LockMessaging.LockOperation; import org.ehcache.clustered.common.internal.lock.LockMessaging.LockTransition; +import org.ehcache.clustered.lock.server.messages.LockSyncMessaging; import org.terracotta.entity.ActiveServerEntity; import org.terracotta.entity.ClientCommunicator; import org.terracotta.entity.ConcurrencyStrategy; @@ -80,7 +81,7 @@ public MessageCodec getMessageCodec() { @Override public SyncMessageCodec getSyncMessageCodec() { - return LockMessaging.syncCodec(); + return LockSyncMessaging.syncCodec(); } private static final ServiceConfiguration config(final Class klazz) { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/lock/server/messages/LockSyncMessaging.java b/clustered/server/src/main/java/org/ehcache/clustered/lock/server/messages/LockSyncMessaging.java new file mode 100644 index 0000000000..354d7b2e1d --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/lock/server/messages/LockSyncMessaging.java @@ -0,0 +1,44 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.lock.server.messages; + +import org.ehcache.clustered.common.internal.lock.LockMessaging; +import org.terracotta.entity.MessageCodecException; +import org.terracotta.entity.SyncMessageCodec; + +/** + * LockSyncMessaging + */ +public class LockSyncMessaging { + + public static SyncMessageCodec syncCodec() { + return SYNC_CODEC; + } + + private static final SyncMessageCodec SYNC_CODEC = new SyncMessageCodec() { + @Override + public byte[] encode(int i, LockMessaging.LockOperation message) throws MessageCodecException { + throw new AssertionError(); + } + + @Override + public LockMessaging.LockOperation decode(int i, byte[] bytes) throws MessageCodecException { + throw new AssertionError(); + } + }; + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/ClientState.java b/clustered/server/src/main/java/org/ehcache/clustered/server/ClientState.java new file mode 100644 index 0000000000..6b4c141a33 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/ClientState.java @@ -0,0 +1,55 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server; + +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +/** + * Represents a client's state against an {@link EhcacheActiveEntity}. + */ +public class ClientState { + /** + * Indicates if the client has either configured or validated with clustered store manager. + */ + private boolean attached = false; + + /** + * The set of stores to which the client has attached. + */ + private final Set attachedStores = new HashSet(); + + public boolean isAttached() { + return attached; + } + + void attach() { + this.attached = true; + } + + boolean addStore(String storeName) { + return this.attachedStores.add(storeName); + } + + boolean removeStore(String storeName) { + return this.attachedStores.remove(storeName); + } + + public Set getAttachedStores() { + return Collections.unmodifiableSet(new HashSet(this.attachedStores)); + } +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/ConcurrencyStrategies.java b/clustered/server/src/main/java/org/ehcache/clustered/server/ConcurrencyStrategies.java index 453d9c8848..b591337b59 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/ConcurrencyStrategies.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/ConcurrencyStrategies.java @@ -16,10 +16,11 @@ package org.ehcache.clustered.server; import java.util.Collections; -import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.Set; import org.ehcache.clustered.common.internal.messages.ConcurrentEntityMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.terracotta.entity.ConcurrencyStrategy; import org.terracotta.entity.EntityMessage; @@ -29,32 +30,36 @@ public final class ConcurrencyStrategies { private ConcurrencyStrategies() { } - public static final ConcurrencyStrategy defaultConcurrency(int bucketCount) { - return new DefaultConcurrencyStrategy(bucketCount); + public static final ConcurrencyStrategy defaultConcurrency(KeySegmentMapper mapper) { + return new DefaultConcurrencyStrategy(mapper); } - static class DefaultConcurrencyStrategy implements ConcurrencyStrategy { + public static class DefaultConcurrencyStrategy implements ConcurrencyStrategy { public static final int DEFAULT_KEY = 1; + public static final int DATA_CONCURRENCY_KEY_OFFSET = DEFAULT_KEY + 1; - private final int bucketCount; + private final KeySegmentMapper mapper; - public DefaultConcurrencyStrategy(int bucketCount) { - this.bucketCount = bucketCount; + public DefaultConcurrencyStrategy(KeySegmentMapper mapper) { + this.mapper = mapper; } @Override public int concurrencyKey(EntityMessage entityMessage) { - if (entityMessage instanceof ConcurrentEntityMessage) { + if (entityMessage instanceof ServerStoreOpMessage.GetMessage) { + return UNIVERSAL_KEY; + } else if (entityMessage instanceof ConcurrentEntityMessage) { ConcurrentEntityMessage concurrentEntityMessage = (ConcurrentEntityMessage) entityMessage; - return DEFAULT_KEY + Math.abs(concurrentEntityMessage.concurrencyKey() % bucketCount); + return DATA_CONCURRENCY_KEY_OFFSET + mapper.getSegmentForKey(concurrentEntityMessage.concurrencyKey()); + } else { + return DEFAULT_KEY; } - return DEFAULT_KEY; } @Override public Set getKeysForSynchronization() { - Set result = new HashSet(); - for (int i = 0; i < bucketCount; i++) { + Set result = new LinkedHashSet<>(); + for (int i = 0; i <= mapper.getSegments(); i++) { result.add(DEFAULT_KEY + i); } return Collections.unmodifiableSet(result); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 82a0ba342f..2a65ec7d52 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -15,46 +15,69 @@ */ package org.ehcache.clustered.server; +import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; +import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.TimeoutException; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.ClusteredEhcacheIdentity; import org.ehcache.clustered.common.PoolAllocation; import org.ehcache.clustered.common.internal.exceptions.ClusterException; -import org.ehcache.clustered.common.internal.exceptions.IllegalMessageException; +import org.ehcache.clustered.common.internal.exceptions.InvalidClientIdException; import org.ehcache.clustered.common.internal.exceptions.InvalidOperationException; import org.ehcache.clustered.common.internal.exceptions.InvalidStoreException; import org.ehcache.clustered.common.internal.exceptions.LifecycleException; import org.ehcache.clustered.common.internal.exceptions.ResourceBusyException; -import org.ehcache.clustered.common.internal.exceptions.ServerMisconfigurationException; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponseFactory; +import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; +import org.ehcache.clustered.common.internal.messages.EhcacheOperationMessage; import org.ehcache.clustered.common.internal.messages.LifecycleMessage; -import org.ehcache.clustered.common.internal.messages.ReconnectDataCodec; +import org.ehcache.clustered.common.internal.store.Element; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.InvalidationCompleteMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ChainReplicationMessage; +import org.ehcache.clustered.common.internal.messages.ReconnectMessage; +import org.ehcache.clustered.common.internal.messages.ReconnectMessageCodec; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.KeyBasedServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; +import org.ehcache.clustered.common.internal.store.Chain; import org.ehcache.clustered.common.internal.store.ServerStore; +import org.ehcache.clustered.server.internal.messages.EhcacheDataSyncMessage; +import org.ehcache.clustered.server.internal.messages.EhcacheStateSyncMessage; +import org.ehcache.clustered.server.management.Management; import org.ehcache.clustered.server.state.EhcacheStateService; +import org.ehcache.clustered.server.state.InvalidationTracker; import org.ehcache.clustered.server.state.config.EhcacheStateServiceConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terracotta.entity.ActiveServerEntity; +import org.terracotta.entity.BasicServiceConfiguration; import org.terracotta.entity.ClientCommunicator; import org.terracotta.entity.ClientDescriptor; +import org.terracotta.entity.IEntityMessenger; import org.terracotta.entity.MessageCodecException; import org.terracotta.entity.PassiveSynchronizationChannel; import org.terracotta.entity.ServiceConfiguration; @@ -67,39 +90,50 @@ import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.hashInvalidationDone; import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.serverInvalidateHash; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isLifecycleMessage; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isPassiveReplicationMessage; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStateRepoOperationMessage; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStoreOperationMessage; import static org.ehcache.clustered.common.internal.messages.LifecycleMessage.ConfigureStoreManager; import static org.ehcache.clustered.common.internal.messages.LifecycleMessage.CreateServerStore; import static org.ehcache.clustered.common.internal.messages.LifecycleMessage.DestroyServerStore; import static org.ehcache.clustered.common.internal.messages.LifecycleMessage.ReleaseServerStore; import static org.ehcache.clustered.common.internal.messages.LifecycleMessage.ValidateServerStore; import static org.ehcache.clustered.common.internal.messages.LifecycleMessage.ValidateStoreManager; +import static org.ehcache.clustered.server.ConcurrencyStrategies.DefaultConcurrencyStrategy.DATA_CONCURRENCY_KEY_OFFSET; +import static org.ehcache.clustered.server.ConcurrencyStrategies.DefaultConcurrencyStrategy.DEFAULT_KEY; -// TODO: Provide some mechanism to report on storage utilization -- PageSource provides little visibility -// TODO: Ensure proper operations for concurrent requests class EhcacheActiveEntity implements ActiveServerEntity { private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheActiveEntity.class); + static final String SYNC_DATA_SIZE_PROP = "ehcache.sync.data.size.threshold"; + private static final long DEFAULT_SYNC_DATA_SIZE_THRESHOLD = 4 * 1024 * 1024; private final UUID identity; - private final Set offHeapResourceIdentifiers; /** * Tracks the state of a connected client. An entry is added to this map when the * {@link #connected(ClientDescriptor)} method is invoked for a client and removed when the * {@link #disconnected(ClientDescriptor)} method is invoked for the client. */ - private final Map clientStateMap = new HashMap(); + private final Map clientStateMap = new ConcurrentHashMap<>(); private final ConcurrentHashMap> storeClientMap = - new ConcurrentHashMap>(); + new ConcurrentHashMap<>(); - private final ReconnectDataCodec reconnectDataCodec = new ReconnectDataCodec(); + private final ConcurrentHashMap clientIdMap = new ConcurrentHashMap<>(); + private final Set trackedClients = Collections.newSetFromMap(new ConcurrentHashMap<>()); + private final ReconnectMessageCodec reconnectMessageCodec = new ReconnectMessageCodec(); private final ServerStoreCompatibility storeCompatibility = new ServerStoreCompatibility(); private final EhcacheEntityResponseFactory responseFactory; - private final ConcurrentMap clientsWaitingForInvalidation = new ConcurrentHashMap(); + private final ConcurrentMap clientsWaitingForInvalidation = new ConcurrentHashMap<>(); private final AtomicInteger invalidationIdGenerator = new AtomicInteger(); private final ClientCommunicator clientCommunicator; private final EhcacheStateService ehcacheStateService; + private final IEntityMessenger entityMessenger; + private volatile ConcurrentHashMap> inflightInvalidations; + private final Management management; + private final AtomicBoolean reconnectComplete = new AtomicBoolean(true); static class InvalidationHolder { final ClientDescriptor clientDescriptorWaitingForInvalidation; @@ -138,20 +172,19 @@ public Class getServiceType() { } - EhcacheActiveEntity(ServiceRegistry services, byte[] config) { + EhcacheActiveEntity(ServiceRegistry services, byte[] config, final KeySegmentMapper mapper) { this.identity = ClusteredEhcacheIdentity.deserialize(config); this.responseFactory = new EhcacheEntityResponseFactory(); this.clientCommunicator = services.getService(new CommunicatorServiceConfiguration()); - OffHeapResources offHeapResources = services.getService(new OffHeapResourcesServiceConfiguration()); - if (offHeapResources == null) { - this.offHeapResourceIdentifiers = Collections.emptySet(); - } else { - this.offHeapResourceIdentifiers = offHeapResources.getAllIdentifiers(); - } - ehcacheStateService = services.getService(new EhcacheStateServiceConfig(services, this.offHeapResourceIdentifiers)); + ehcacheStateService = services.getService(new EhcacheStateServiceConfig(services, mapper)); if (ehcacheStateService == null) { throw new AssertionError("Server failed to retrieve EhcacheStateService."); } + entityMessenger = services.getService(new BasicServiceConfiguration<>(IEntityMessenger.class)); + if (entityMessenger == null) { + throw new AssertionError("Server failed to retrieve IEntityMessenger service."); + } + this.management = new Management(services, ehcacheStateService, true); } /** @@ -162,7 +195,7 @@ public Class getServiceType() { */ // This method is intended for unit test use; modifications are likely needed for other (monitoring) purposes Map> getConnectedClients() { - final HashMap> clientMap = new HashMap>(); + final HashMap> clientMap = new HashMap<>(); for (Entry entry : clientStateMap.entrySet()) { clientMap.put(entry.getKey(), entry.getValue().getAttachedStores()); } @@ -177,9 +210,9 @@ Map> getConnectedClients() { */ // This method is intended for unit test use; modifications are likely needed for other (monitoring) purposes Map> getInUseStores() { - final HashMap> storeMap = new HashMap>(); + final HashMap> storeMap = new HashMap<>(); for (Map.Entry> entry : storeClientMap.entrySet()) { - storeMap.put(entry.getKey(), Collections.unmodifiableSet(new HashSet(entry.getValue()))); + storeMap.put(entry.getKey(), Collections.unmodifiableSet(new HashSet<>(entry.getValue()))); } return Collections.unmodifiableMap(storeMap); } @@ -188,7 +221,9 @@ Map> getInUseStores() { public void connected(ClientDescriptor clientDescriptor) { if (!clientStateMap.containsKey(clientDescriptor)) { LOGGER.info("Connecting {}", clientDescriptor); - clientStateMap.put(clientDescriptor, new ClientState()); + ClientState clientState = new ClientState(); + clientStateMap.put(clientDescriptor, clientState); + management.clientConnected(clientDescriptor, clientState); } else { // This is logically an AssertionError LOGGER.error("Client {} already registered as connected", clientDescriptor); @@ -207,7 +242,8 @@ public void disconnected(ClientDescriptor clientDescriptor) { Iterator> it = clientsWaitingForInvalidation.entrySet().iterator(); while (it.hasNext()) { Entry next = it.next(); - if (next.getValue().clientDescriptorWaitingForInvalidation.equals(clientDescriptor)) { + ClientDescriptor clientDescriptorWaitingForInvalidation = next.getValue().clientDescriptorWaitingForInvalidation; + if (clientDescriptorWaitingForInvalidation != null && clientDescriptorWaitingForInvalidation.equals(clientDescriptor)) { it.remove(); } } @@ -221,27 +257,39 @@ public void disconnected(ClientDescriptor clientDescriptor) { for (String storeId : clientState.getAttachedStores()) { detachStore(clientDescriptor, storeId); } + management.clientDisconnected(clientDescriptor, clientState); + } + UUID clientId = clientIdMap.remove(clientDescriptor); + if (clientId != null) { + try { + entityMessenger.messageSelf(new ClientIDTrackerMessage(clientId)); + } catch (MessageCodecException mce) { + throw new AssertionError("Codec error", mce); + } + trackedClients.remove(clientId); + ehcacheStateService.getClientMessageTracker().remove(clientId); } } @Override public EhcacheEntityResponse invoke(ClientDescriptor clientDescriptor, EhcacheEntityMessage message) { try { - if (this.offHeapResourceIdentifiers.isEmpty()) { - throw new ServerMisconfigurationException("Server started without any offheap resources defined." + - " Check your server configuration and define at least one offheap resource."); - } + clearClientTrackedAtReconnectComplete(); - switch (message.getType()) { - case LIFECYCLE_OP: - return invokeLifeCycleOperation(clientDescriptor, (LifecycleMessage) message); - case SERVER_STORE_OP: + if (message instanceof EhcacheOperationMessage) { + EhcacheOperationMessage operationMessage = (EhcacheOperationMessage) message; + EhcacheMessageType messageType = operationMessage.getMessageType(); + if (isStoreOperationMessage(messageType)) { return invokeServerStoreOperation(clientDescriptor, (ServerStoreOpMessage) message); - case STATE_REPO_OP: + } else if (isLifecycleMessage(messageType)) { + return invokeLifeCycleOperation(clientDescriptor, (LifecycleMessage) message); + } else if (isStateRepoOperationMessage(messageType)) { return invokeStateRepositoryOperation(clientDescriptor, (StateRepositoryOpMessage) message); - default: - throw new IllegalMessageException("Unknown message : " + message); + } else if (isPassiveReplicationMessage(messageType)) { + return responseFactory.success(); + } } + throw new AssertionError("Unsupported message : " + message.getClass()); } catch (ClusterException e) { return responseFactory.failure(e); } catch (Exception e) { @@ -250,63 +298,166 @@ public EhcacheEntityResponse invoke(ClientDescriptor clientDescriptor, EhcacheEn } } + private void clearClientTrackedAtReconnectComplete() { + + if (!reconnectComplete.get()) { + if (reconnectComplete.compareAndSet(false, true)) { + ehcacheStateService.getClientMessageTracker().reconcileTrackedClients(trackedClients); + } + } + + } + @Override public void handleReconnect(ClientDescriptor clientDescriptor, byte[] extendedReconnectData) { + if (inflightInvalidations == null) { + throw new AssertionError("Load existing was not invoked before handleReconnect"); + } ClientState clientState = this.clientStateMap.get(clientDescriptor); if (clientState == null) { throw new AssertionError("Client "+ clientDescriptor +" trying to reconnect is not connected to entity"); } clientState.attach(); - Set cacheIds = reconnectDataCodec.decode(extendedReconnectData); + ReconnectMessage reconnectMessage = reconnectMessageCodec.decode(extendedReconnectData); + addClientId(clientDescriptor, reconnectMessage.getClientId()); + Set cacheIds = reconnectMessage.getAllCaches(); for (final String cacheId : cacheIds) { - ServerStoreImpl serverStore = ehcacheStateService.getStore(cacheId); + ServerSideServerStore serverStore = ehcacheStateService.getStore(cacheId); if (serverStore == null) { //Client removes the cache's reference only when destroy has successfully completed //This happens only when client thinks destroy is still not complete LOGGER.warn("ServerStore '{}' does not exist as expected by Client '{}'.", cacheId, clientDescriptor); continue; } - serverStore.setEvictionListener(new ServerStoreEvictionListener() { - @Override - public void onEviction(long key) { - invalidateHashAfterEviction(cacheId, key); - } - }); + addInflightInvalidationsForStrongCache(clientDescriptor, reconnectMessage, cacheId, serverStore); + + serverStore.setEvictionListener(key -> invalidateHashAfterEviction(cacheId, key)); attachStore(clientDescriptor, cacheId); } LOGGER.info("Client '{}' successfully reconnected to newly promoted ACTIVE after failover.", clientDescriptor); + management.clientReconnected(clientDescriptor, clientState); + } + + private void addInflightInvalidationsForStrongCache(ClientDescriptor clientDescriptor, ReconnectMessage reconnectMessage, String cacheId, ServerSideServerStore serverStore) { + if (serverStore.getStoreConfiguration().getConsistency().equals(Consistency.STRONG)) { + Set invalidationsInProgress = reconnectMessage.getInvalidationsInProgress(cacheId); + LOGGER.debug("Number of Inflight Invalidations from client ID {} for cache {} is {}.", reconnectMessage.getClientId(), cacheId, invalidationsInProgress + .size()); + inflightInvalidations.compute(cacheId, (s, tuples) -> { + if (tuples == null) { + tuples = new ArrayList<>(); + } + tuples.add(new InvalidationTuple(clientDescriptor, invalidationsInProgress, reconnectMessage.isClearInProgress(cacheId))); + return tuples; + }); + } } @Override public void synchronizeKeyToPassive(PassiveSynchronizationChannel syncChannel, int concurrencyKey) { - throw new UnsupportedOperationException("Active/passive is not supported yet"); + LOGGER.info("Sync started for concurrency key {}.", concurrencyKey); + if (concurrencyKey == DEFAULT_KEY) { + ServerSideConfiguration configuration; + if (ehcacheStateService.getDefaultServerResource() == null) { + configuration = new ServerSideConfiguration(ehcacheStateService.getSharedResourcePools()); + } else { + configuration = new ServerSideConfiguration(ehcacheStateService.getDefaultServerResource(), ehcacheStateService.getSharedResourcePools()); + } + + Map storeConfigs = new HashMap<>(); + for (String storeName : ehcacheStateService.getStores()) { + ServerSideServerStore store = ehcacheStateService.getStore(storeName); + storeConfigs.put(storeName, store.getStoreConfiguration()); + } + + syncChannel.synchronizeToPassive(new EhcacheStateSyncMessage(configuration, storeConfigs)); + } else { + Long dataSizeThreshold = Long.getLong(SYNC_DATA_SIZE_PROP, DEFAULT_SYNC_DATA_SIZE_THRESHOLD); + AtomicLong size = new AtomicLong(0); + ehcacheStateService.getStores() + .forEach(name -> { + ServerSideServerStore store = ehcacheStateService.getStore(name); + final AtomicReference> mappingsToSend = new AtomicReference<>(new HashMap<>()); + store.getSegments().get(concurrencyKey - DATA_CONCURRENCY_KEY_OFFSET).keySet() + .forEach(key -> { + final Chain chain; + try { + chain = store.get(key); + } catch (TimeoutException e) { + throw new AssertionError("Server side store is not expected to throw timeout exception"); + } + for (Element element : chain) { + size.addAndGet(element.getPayload().remaining()); + } + mappingsToSend.get().put(key, chain); + if (size.get() > dataSizeThreshold) { + syncChannel.synchronizeToPassive(new EhcacheDataSyncMessage(name, mappingsToSend.get())); + mappingsToSend.set(new HashMap<>()); + size.set(0); + } + }); + if (!mappingsToSend.get().isEmpty()) { + syncChannel.synchronizeToPassive(new EhcacheDataSyncMessage(name, mappingsToSend.get())); + mappingsToSend.set(new HashMap<>()); + size.set(0); + } + }); + } + LOGGER.info("Sync complete for concurrency key {}.", concurrencyKey); } @Override public void createNew() { - //nothing to do + management.init(); } @Override public void loadExisting() { - //nothing to do + ehcacheStateService.loadExisting(); + LOGGER.debug("Preparing for handling Inflight Invalidations and independent Passive Evictions in loadExisting"); + inflightInvalidations = new ConcurrentHashMap<>(); + addInflightInvalidationsForEventualCaches(); + reconnectComplete.set(false); + } + + private void addInflightInvalidationsForEventualCaches() { + Set caches = ehcacheStateService.getStores(); + caches.forEach(cacheId -> { + InvalidationTracker invalidationTracker = ehcacheStateService.removeInvalidationtracker(cacheId); + if (invalidationTracker != null) { + inflightInvalidations.compute(cacheId, (s, invalidationTuples) -> { + if (invalidationTuples == null) { + invalidationTuples = new ArrayList<>(); + } + invalidationTuples.add(new InvalidationTuple(null, invalidationTracker.getInvalidationMap() + .keySet(), invalidationTracker.isClearInProgress())); + return invalidationTuples; + }); + invalidationTracker.getInvalidationMap().clear(); + } + }); } private void validateClientConnected(ClientDescriptor clientDescriptor) throws ClusterException { ClientState clientState = this.clientStateMap.get(clientDescriptor); - if (clientState == null) { - throw new LifecycleException("Client " + clientDescriptor + " is not connected to the Clustered Tier Manager"); - } + validateClientConnected(clientDescriptor, clientState); } private void validateClientAttached(ClientDescriptor clientDescriptor) throws ClusterException { - validateClientConnected(clientDescriptor); - if (!clientStateMap.get(clientDescriptor).isAttached()) { + ClientState clientState = this.clientStateMap.get(clientDescriptor); + validateClientConnected(clientDescriptor, clientState); + if (!clientState.isAttached()) { throw new LifecycleException("Client " + clientDescriptor + " is not attached to the Clustered Tier Manager"); } } + private static void validateClientConnected(ClientDescriptor clientDescriptor, ClientState clientState) throws LifecycleException { + if (clientState == null) { + throw new LifecycleException("Client " + clientDescriptor + " is not connected to the Clustered Tier Manager"); + } + } + private void validateClusteredTierManagerConfigured(ClientDescriptor clientDescriptor) throws ClusterException { validateClientAttached(clientDescriptor); if (!ehcacheStateService.isConfigured()) { @@ -315,7 +466,7 @@ private void validateClusteredTierManagerConfigured(ClientDescriptor clientDescr } private EhcacheEntityResponse invokeLifeCycleOperation(ClientDescriptor clientDescriptor, LifecycleMessage message) throws ClusterException { - switch (message.operation()) { + switch (message.getMessageType()) { case CONFIGURE: configure(clientDescriptor, (ConfigureStoreManager) message); break; @@ -335,7 +486,7 @@ private EhcacheEntityResponse invokeLifeCycleOperation(ClientDescriptor clientDe destroyServerStore(clientDescriptor, (DestroyServerStore) message); break; default: - throw new IllegalMessageException("Unknown LifeCycle operation " + message); + throw new AssertionError("Unsupported LifeCycle operation " + message); } return responseFactory.success(); } @@ -343,7 +494,7 @@ private EhcacheEntityResponse invokeLifeCycleOperation(ClientDescriptor clientDe private EhcacheEntityResponse invokeServerStoreOperation(ClientDescriptor clientDescriptor, ServerStoreOpMessage message) throws ClusterException { validateClusteredTierManagerConfigured(clientDescriptor); - ServerStoreImpl cacheStore = ehcacheStateService.getStore(message.getCacheId()); + ServerSideServerStore cacheStore = ehcacheStateService.getStore(message.getCacheId()); if (cacheStore == null) { // An operation on a non-existent store should never get out of the client throw new LifecycleException("Clustered tier does not exist : '" + message.getCacheId() + "'"); @@ -354,22 +505,70 @@ private EhcacheEntityResponse invokeServerStoreOperation(ClientDescriptor client throw new LifecycleException("Client not attached to clustered tier '" + message.getCacheId() + "'"); } - switch (message.operation()) { - case GET: { + // This logic totally counts on the fact that invokes will only happen + // after all handleReconnects are done, else this is flawed. + if (inflightInvalidations != null && inflightInvalidations.containsKey(message.getCacheId())) { + inflightInvalidations.computeIfPresent(message.getCacheId(), (cacheId, tuples) -> { + LOGGER.debug("Stalling all operations for cache {} for firing inflight invalidations again.", cacheId); + tuples.forEach(invalidationState -> { + if (invalidationState.isClearInProgress()) { + invalidateAll(invalidationState.getClientDescriptor(), cacheId); + } + invalidationState.getInvalidationsInProgress() + .forEach(hashInvalidationToBeResent -> invalidateHashForClient(invalidationState.getClientDescriptor(), cacheId, hashInvalidationToBeResent)); + }); + return null; + }); + } + + switch (message.getMessageType()) { + case GET_STORE: { ServerStoreOpMessage.GetMessage getMessage = (ServerStoreOpMessage.GetMessage) message; - return responseFactory.response(cacheStore.get(getMessage.getKey())); + try { + return responseFactory.response(cacheStore.get(getMessage.getKey())); + } catch (TimeoutException e) { + throw new AssertionError("Server side store is not expected to throw timeout exception"); + } } case APPEND: { - ServerStoreOpMessage.AppendMessage appendMessage = (ServerStoreOpMessage.AppendMessage)message; - cacheStore.append(appendMessage.getKey(), appendMessage.getPayload()); - invalidateHashForClient(clientDescriptor, appendMessage.getCacheId(), appendMessage.getKey()); + if (!isMessageDuplicate(message)) { + ServerStoreOpMessage.AppendMessage appendMessage = (ServerStoreOpMessage.AppendMessage)message; + final Chain newChain; + try { + cacheStore.append(appendMessage.getKey(), appendMessage.getPayload()); + newChain = cacheStore.get(appendMessage.getKey()); + } catch (TimeoutException e) { + throw new AssertionError("Server side store is not expected to throw timeout exception"); + } + sendMessageToSelfAndDeferRetirement(appendMessage, newChain); + invalidateHashForClient(clientDescriptor, appendMessage.getCacheId(), appendMessage.getKey()); + } return responseFactory.success(); } case GET_AND_APPEND: { ServerStoreOpMessage.GetAndAppendMessage getAndAppendMessage = (ServerStoreOpMessage.GetAndAppendMessage)message; - EhcacheEntityResponse response = responseFactory.response(cacheStore.getAndAppend(getAndAppendMessage.getKey(), getAndAppendMessage.getPayload())); - invalidateHashForClient(clientDescriptor, getAndAppendMessage.getCacheId(), getAndAppendMessage.getKey()); - return response; + LOGGER.trace("Message {} : GET_AND_APPEND on key {} from client {}", message, getAndAppendMessage.getKey(), getAndAppendMessage.getClientId()); + if (!isMessageDuplicate(message)) { + LOGGER.trace("Message {} : is not duplicate", message); + final Chain result; + final Chain newChain; + try { + result = cacheStore.getAndAppend(getAndAppendMessage.getKey(), getAndAppendMessage.getPayload()); + newChain = cacheStore.get(getAndAppendMessage.getKey()); + } catch (TimeoutException e) { + throw new AssertionError("Server side store is not expected to throw timeout exception"); + } + sendMessageToSelfAndDeferRetirement(getAndAppendMessage, newChain); + EhcacheEntityResponse response = responseFactory.response(result); + LOGGER.debug("Send invalidations for key {}", getAndAppendMessage.getKey()); + invalidateHashForClient(clientDescriptor, getAndAppendMessage.getCacheId(), getAndAppendMessage.getKey()); + return response; + } + try { + return responseFactory.response(cacheStore.get(getAndAppendMessage.getKey())); + } catch (TimeoutException e) { + throw new AssertionError("Server side store is not expected to throw timeout exception"); + } } case REPLACE: { ServerStoreOpMessage.ReplaceAtHeadMessage replaceAtHeadMessage = (ServerStoreOpMessage.ReplaceAtHeadMessage) message; @@ -385,13 +584,27 @@ private EhcacheEntityResponse invokeServerStoreOperation(ClientDescriptor client return responseFactory.success(); } case CLEAR: { - String cacheId = message.getCacheId(); - cacheStore.clear(); - invalidateAll(clientDescriptor, cacheId); + if (!isMessageDuplicate(message)) { + String cacheId = message.getCacheId(); + try { + cacheStore.clear(); + } catch (TimeoutException e) { + throw new AssertionError("Server side store is not expected to throw timeout exception"); + } + invalidateAll(clientDescriptor, cacheId); + } return responseFactory.success(); } default: - throw new IllegalMessageException("Unknown ServerStore operation : " + message); + throw new AssertionError("Unsupported ServerStore operation : " + message); + } + } + + private void sendMessageToSelfAndDeferRetirement(KeyBasedServerStoreOpMessage message, Chain result) { + try { + entityMessenger.messageSelfAndDeferRetirement(message, new ChainReplicationMessage(message.getCacheId(), message.getKey(), result, message.getId(), message.getClientId())); + } catch (MessageCodecException e) { + throw new AssertionError("Codec error", e); } } @@ -409,8 +622,7 @@ private void invalidateHashAfterEviction(String cacheId, long key) { try { clientCommunicator.sendNoResponse(clientDescriptorThatHasToInvalidate, serverInvalidateHash(cacheId, key)); } catch (MessageCodecException mce) { - //TODO: what should be done here? - LOGGER.error("Codec error", mce); + throw new AssertionError("Codec error", mce); } } } @@ -419,26 +631,24 @@ private void invalidateHashForClient(ClientDescriptor originatingClientDescripto int invalidationId = invalidationIdGenerator.getAndIncrement(); Set clientsToInvalidate = Collections.newSetFromMap(new ConcurrentHashMap()); clientsToInvalidate.addAll(storeClientMap.get(cacheId)); - clientsToInvalidate.remove(originatingClientDescriptor); - - InvalidationHolder invalidationHolder = null; - if (ehcacheStateService.getStore(cacheId).getStoreConfiguration().getConsistency() == Consistency.STRONG) { - invalidationHolder = new InvalidationHolder(originatingClientDescriptor, clientsToInvalidate, cacheId, key); - clientsWaitingForInvalidation.put(invalidationId, invalidationHolder); + if (originatingClientDescriptor != null) { + clientsToInvalidate.remove(originatingClientDescriptor); } + InvalidationHolder invalidationHolder = new InvalidationHolder(originatingClientDescriptor, clientsToInvalidate, cacheId, key); + clientsWaitingForInvalidation.put(invalidationId, invalidationHolder); + LOGGER.debug("SERVER: requesting {} client(s) invalidation of hash {} in cache {} (ID {})", clientsToInvalidate.size(), key, cacheId, invalidationId); for (ClientDescriptor clientDescriptorThatHasToInvalidate : clientsToInvalidate) { LOGGER.debug("SERVER: asking client {} to invalidate hash {} from cache {} (ID {})", clientDescriptorThatHasToInvalidate, key, cacheId, invalidationId); try { clientCommunicator.sendNoResponse(clientDescriptorThatHasToInvalidate, clientInvalidateHash(cacheId, key, invalidationId)); } catch (MessageCodecException mce) { - //TODO: what should be done here? - LOGGER.error("Codec error", mce); + throw new AssertionError("Codec error", mce); } } - if (invalidationHolder != null && clientsToInvalidate.isEmpty()) { + if (clientsToInvalidate.isEmpty()) { clientInvalidated(invalidationHolder.clientDescriptorWaitingForInvalidation, invalidationId); } } @@ -447,26 +657,24 @@ private void invalidateAll(ClientDescriptor originatingClientDescriptor, String int invalidationId = invalidationIdGenerator.getAndIncrement(); Set clientsToInvalidate = Collections.newSetFromMap(new ConcurrentHashMap()); clientsToInvalidate.addAll(storeClientMap.get(cacheId)); - clientsToInvalidate.remove(originatingClientDescriptor); - - InvalidationHolder invalidationHolder = null; - if (ehcacheStateService.getStore(cacheId).getStoreConfiguration().getConsistency() == Consistency.STRONG) { - invalidationHolder = new InvalidationHolder(originatingClientDescriptor, clientsToInvalidate, cacheId); - clientsWaitingForInvalidation.put(invalidationId, invalidationHolder); + if (originatingClientDescriptor != null) { + clientsToInvalidate.remove(originatingClientDescriptor); } + InvalidationHolder invalidationHolder = new InvalidationHolder(originatingClientDescriptor, clientsToInvalidate, cacheId); + clientsWaitingForInvalidation.put(invalidationId, invalidationHolder); + LOGGER.debug("SERVER: requesting {} client(s) invalidation of all in cache {} (ID {})", clientsToInvalidate.size(), cacheId, invalidationId); for (ClientDescriptor clientDescriptorThatHasToInvalidate : clientsToInvalidate) { LOGGER.debug("SERVER: asking client {} to invalidate all from cache {} (ID {})", clientDescriptorThatHasToInvalidate, cacheId, invalidationId); try { clientCommunicator.sendNoResponse(clientDescriptorThatHasToInvalidate, clientInvalidateAll(cacheId, invalidationId)); } catch (MessageCodecException mce) { - //TODO: what should be done here? - LOGGER.error("Codec error", mce); + throw new AssertionError("Codec error", mce); } } - if (invalidationHolder != null && clientsToInvalidate.isEmpty()) { + if (clientsToInvalidate.isEmpty()) { clientInvalidated(invalidationHolder.clientDescriptorWaitingForInvalidation, invalidationId); } } @@ -474,23 +682,34 @@ private void invalidateAll(ClientDescriptor originatingClientDescriptor, String private void clientInvalidated(ClientDescriptor clientDescriptor, int invalidationId) { InvalidationHolder invalidationHolder = clientsWaitingForInvalidation.get(invalidationId); - if (ehcacheStateService.getStore(invalidationHolder.cacheId).getStoreConfiguration().getConsistency() == Consistency.STRONG) { - invalidationHolder.clientsHavingToInvalidate.remove(clientDescriptor); - if (invalidationHolder.clientsHavingToInvalidate.isEmpty()) { - if (clientsWaitingForInvalidation.remove(invalidationId) != null) { - try { - Long key = invalidationHolder.key; - if (key == null) { + if (invalidationHolder == null) { // Happens when client is re-sending/sending invalidations for which server has lost track since fail-over happened. + LOGGER.debug("Ignoring invalidation from client {} " + clientDescriptor); + return; + } + + invalidationHolder.clientsHavingToInvalidate.remove(clientDescriptor); + if (invalidationHolder.clientsHavingToInvalidate.isEmpty()) { + if (clientsWaitingForInvalidation.remove(invalidationId) != null) { + try { + Long key = invalidationHolder.key; + boolean isStrong = ehcacheStateService.getStore(invalidationHolder.cacheId).getStoreConfiguration().getConsistency() == Consistency.STRONG; + if (key == null) { + if (isStrong) { clientCommunicator.sendNoResponse(invalidationHolder.clientDescriptorWaitingForInvalidation, allInvalidationDone(invalidationHolder.cacheId)); LOGGER.debug("SERVER: notifying originating client that all other clients invalidated all in cache {} from {} (ID {})", invalidationHolder.cacheId, clientDescriptor, invalidationId); } else { + entityMessenger.messageSelf(new ClearInvalidationCompleteMessage(invalidationHolder.cacheId)); + } + } else { + if (isStrong) { clientCommunicator.sendNoResponse(invalidationHolder.clientDescriptorWaitingForInvalidation, hashInvalidationDone(invalidationHolder.cacheId, key)); LOGGER.debug("SERVER: notifying originating client that all other clients invalidated key {} in cache {} from {} (ID {})", key, invalidationHolder.cacheId, clientDescriptor, invalidationId); + } else { + entityMessenger.messageSelf(new InvalidationCompleteMessage(invalidationHolder.cacheId, key)); } - } catch (MessageCodecException mce) { - //TODO: what should be done here? - LOGGER.error("Codec error", mce); } + } catch (MessageCodecException mce) { + throw new AssertionError("Codec error", mce); } } } @@ -507,7 +726,6 @@ private void clientInvalidated(ClientDescriptor clientDescriptor, int invalidati */ @Override public void destroy() { - /* * Ensure the allocated stores are closed out. */ @@ -535,8 +753,11 @@ public void destroy() { */ private void configure(ClientDescriptor clientDescriptor, ConfigureStoreManager message) throws ClusterException { validateClientConnected(clientDescriptor); - ehcacheStateService.configure(message); + if (ehcacheStateService.getClientMessageTracker().isConfigureApplicable(message.getClientId(), message.getId())) { + ehcacheStateService.configure(message.getConfiguration()); + } this.clientStateMap.get(clientDescriptor).attach(); + management.sharedPoolsConfigured(); } /** @@ -549,8 +770,22 @@ private void configure(ClientDescriptor clientDescriptor, ConfigureStoreManager */ private void validate(ClientDescriptor clientDescriptor, ValidateStoreManager message) throws ClusterException { validateClientConnected(clientDescriptor); - ehcacheStateService.validate(message); + if (trackedClients.contains(message.getClientId())) { + throw new InvalidClientIdException("Client ID : " + message.getClientId() + " is already being tracked by Active paired with Client : " + clientDescriptor); + } else if (clientIdMap.get(clientDescriptor) != null) { + throw new LifecycleException("Client : " + clientDescriptor + " is already being tracked with Client Id : " + clientIdMap.get(clientDescriptor)); + } + + addClientId(clientDescriptor, message.getClientId()); + ehcacheStateService.validate(message.getConfiguration()); this.clientStateMap.get(clientDescriptor).attach(); + management.clientValidated(clientDescriptor, this.clientStateMap.get(clientDescriptor)); + } + + private void addClientId(ClientDescriptor clientDescriptor, UUID clientId) { + LOGGER.info("Adding Client {} with client ID : {} ", clientDescriptor, clientId); + clientIdMap.put(clientDescriptor, clientId); + trackedClients.add(clientId); } /** @@ -576,21 +811,29 @@ private void createServerStore(ClientDescriptor clientDescriptor, CreateServerSt if(createServerStore.getStoreConfiguration().getPoolAllocation() instanceof PoolAllocation.Unknown) { throw new LifecycleException("Clustered tier can't be created with an Unknown resource pool"); } - + boolean isDuplicate = isMessageDuplicate(createServerStore); final String name = createServerStore.getName(); // client cache identifier/name + ServerSideServerStore serverStore; + if (!isDuplicate) { - LOGGER.info("Client {} creating new clustered tier '{}'", clientDescriptor, name); + LOGGER.info("Client {} creating new clustered tier '{}'", clientDescriptor, name); - ServerStoreConfiguration storeConfiguration = createServerStore.getStoreConfiguration(); + ServerStoreConfiguration storeConfiguration = createServerStore.getStoreConfiguration(); - ServerStoreImpl serverStore = ehcacheStateService.createStore(name, storeConfiguration); - serverStore.setEvictionListener(new ServerStoreEvictionListener() { - @Override - public void onEviction(long key) { - invalidateHashAfterEviction(name, key); - } - }); + serverStore = ehcacheStateService.createStore(name, storeConfiguration); + + management.serverStoreCreated(name); + } else { + serverStore = ehcacheStateService.getStore(name); + } + + serverStore.setEvictionListener(key -> invalidateHashAfterEviction(name, key)); attachStore(clientDescriptor, name); + try { + entityMessenger.messageSelfAndDeferRetirement(createServerStore, new PassiveReplicationMessage.CreateServerStoreReplicationMessage(createServerStore)); + } catch (MessageCodecException e) { + throw new AssertionError("Codec error", e); + } } /** @@ -611,7 +854,7 @@ private void validateServerStore(ClientDescriptor clientDescriptor, ValidateServ ServerStoreConfiguration clientConfiguration = validateServerStore.getStoreConfiguration(); LOGGER.info("Client {} validating clustered tier '{}'", clientDescriptor, name); - ServerStoreImpl store = ehcacheStateService.getStore(name); + ServerSideServerStore store = ehcacheStateService.getStore(name); if (store != null) { storeCompatibility.verify(store.getStoreConfiguration(), clientConfiguration); attachStore(clientDescriptor, name); @@ -635,7 +878,7 @@ private void releaseServerStore(ClientDescriptor clientDescriptor, ReleaseServer String name = releaseServerStore.getName(); LOGGER.info("Client {} releasing clustered tier '{}'", clientDescriptor, name); - ServerStoreImpl store = ehcacheStateService.getStore(name); + ServerSideServerStore store = ehcacheStateService.getStore(name); if (store != null) { boolean removedFromClient = clientState.removeStore(name); @@ -643,6 +886,8 @@ private void releaseServerStore(ClientDescriptor clientDescriptor, ReleaseServer if (!removedFromClient) { throw new InvalidStoreException("Clustered tier '" + name + "' is not in use by client"); + } else { + management.storeReleased(clientDescriptor, clientStateMap.get(clientDescriptor), name); } } else { throw new InvalidStoreException("Clustered tier '" + name + "' does not exist"); @@ -666,9 +911,24 @@ private void destroyServerStore(ClientDescriptor clientDescriptor, DestroyServer throw new ResourceBusyException("Cannot destroy clustered tier '" + name + "': in use by " + clients.size() + " other client(s)"); } - LOGGER.info("Client {} destroying clustered tier '{}'", clientDescriptor, name); - ehcacheStateService.destroyServerStore(name); + boolean isDuplicate = isMessageDuplicate(destroyServerStore); + + if (!isDuplicate) { + LOGGER.info("Client {} destroying clustered tier '{}'", clientDescriptor, name); + management.serverStoreDestroyed(name); + ehcacheStateService.destroyServerStore(name); + } + storeClientMap.remove(name); + try { + entityMessenger.messageSelfAndDeferRetirement(destroyServerStore, new PassiveReplicationMessage.DestroyServerStoreReplicationMessage(destroyServerStore)); + } catch (MessageCodecException e) { + throw new AssertionError("Codec error", e); + } + } + + private boolean isMessageDuplicate(EhcacheEntityMessage message) { + return ehcacheStateService.getClientMessageTracker().isDuplicate(message.getId(), message.getClientId()); } /** @@ -687,12 +947,12 @@ private void attachStore(ClientDescriptor clientDescriptor, String storeId) { Set clients = storeClientMap.get(storeId); Set newClients; if (clients == null) { - newClients = new HashSet(); + newClients = new HashSet<>(); newClients.add(clientDescriptor); updated = (storeClientMap.putIfAbsent(storeId, newClients) == null); } else if (!clients.contains(clientDescriptor)) { - newClients = new HashSet(clients); + newClients = new HashSet<>(clients); newClients.add(clientDescriptor); updated = storeClientMap.replace(storeId, clients, newClients); @@ -706,6 +966,8 @@ private void attachStore(ClientDescriptor clientDescriptor, String storeId) { clientState.addStore(storeId); LOGGER.info("Client {} attached to clustered tier '{}'", clientDescriptor, storeId); + + management.storeAttached(clientDescriptor, clientState, storeId); } /** @@ -725,7 +987,7 @@ private boolean detachStore(ClientDescriptor clientDescriptor, String storeId) { Set clients = storeClientMap.get(storeId); if (clients != null && clients.contains(clientDescriptor)) { wasRegistered = true; - Set newClients = new HashSet(clients); + Set newClients = new HashSet<>(clients); newClients.remove(clientDescriptor); updated = storeClientMap.replace(storeId, clients, newClients); } else { @@ -744,38 +1006,27 @@ ConcurrentMap getClientsWaitingForInvalidation() { return clientsWaitingForInvalidation; } - /** - * Represents a client's state against an {@link EhcacheActiveEntity}. - */ - private static class ClientState { - /** - * Indicates if the client has either configured or validated with clustered store manager. - */ - private boolean attached = false; - - /** - * The set of stores to which the client has attached. - */ - private final Set attachedStores = new HashSet(); - - boolean isAttached() { - return attached; - } + private static class InvalidationTuple { + private final ClientDescriptor clientDescriptor; + private final Set invalidationsInProgress; + private final boolean isClearInProgress; - void attach() { - this.attached = true; + InvalidationTuple(ClientDescriptor clientDescriptor, Set invalidationsInProgress, boolean isClearInProgress) { + this.clientDescriptor = clientDescriptor; + this.invalidationsInProgress = invalidationsInProgress; + this.isClearInProgress = isClearInProgress; } - boolean addStore(String storeName) { - return this.attachedStores.add(storeName); + ClientDescriptor getClientDescriptor() { + return clientDescriptor; } - boolean removeStore(String storeName) { - return this.attachedStores.remove(storeName); + Set getInvalidationsInProgress() { + return invalidationsInProgress; } - Set getAttachedStores() { - return Collections.unmodifiableSet(new HashSet(this.attachedStores)); + boolean isClearInProgress() { + return isClearInProgress; } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java new file mode 100644 index 0000000000..a4a81c0ffb --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java @@ -0,0 +1,64 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server; + +import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; +import org.ehcache.clustered.common.internal.messages.LifecycleMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; +import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; +import org.ehcache.clustered.server.internal.messages.EhcacheSyncMessage; +import org.terracotta.entity.ExecutionStrategy; + +/** + * EhcacheExecutionStrategy + */ +class EhcacheExecutionStrategy implements ExecutionStrategy { + @Override + public Location getExecutionLocation(EhcacheEntityMessage message) { + if (message instanceof ServerStoreOpMessage.ReplaceAtHeadMessage || message instanceof ServerStoreOpMessage.ClearMessage) { + // Server store operation needing replication + return Location.BOTH; + } else if (message instanceof ServerStoreOpMessage) { + // Server store operation not needing replication + return Location.ACTIVE; + } else if (message instanceof LifecycleMessage.ConfigureStoreManager) { + return Location.BOTH; + } else if (message instanceof LifecycleMessage.ValidateStoreManager) { + return Location.ACTIVE; + } else if (message instanceof LifecycleMessage.CreateServerStore) { + return Location.BOTH; + } else if (message instanceof LifecycleMessage.ValidateServerStore) { + return Location.ACTIVE; + } else if (message instanceof LifecycleMessage.ReleaseServerStore) { + return Location.ACTIVE; + } else if (message instanceof LifecycleMessage.DestroyServerStore) { + return Location.BOTH; + } else if (message instanceof StateRepositoryOpMessage.PutIfAbsentMessage) { + // State repository operation needing replication + return Location.BOTH; + } else if (message instanceof StateRepositoryOpMessage) { + // State repository operation not needing replication + return Location.ACTIVE; + } else if (message instanceof PassiveReplicationMessage) { + return Location.PASSIVE; + } else if (message instanceof EhcacheSyncMessage) { + throw new AssertionError("Unexpected use of ExecutionStrategy for sync messages"); + } + throw new AssertionError("Unknown message type: " + message.getClass()); + } +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index d271e4bc19..d91468c8e4 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -16,6 +16,7 @@ package org.ehcache.clustered.server; +import org.ehcache.clustered.common.Consistency; import org.ehcache.clustered.common.PoolAllocation; import org.ehcache.clustered.common.internal.ClusteredEhcacheIdentity; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; @@ -25,13 +26,23 @@ import org.ehcache.clustered.common.internal.exceptions.ServerMisconfigurationException; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; +import org.ehcache.clustered.common.internal.messages.EhcacheOperationMessage; import org.ehcache.clustered.common.internal.messages.LifecycleMessage; import org.ehcache.clustered.common.internal.messages.LifecycleMessage.ConfigureStoreManager; -import org.ehcache.clustered.common.internal.messages.LifecycleMessage.CreateServerStore; -import org.ehcache.clustered.common.internal.messages.LifecycleMessage.DestroyServerStore; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ChainReplicationMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.InvalidationCompleteMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; +import org.ehcache.clustered.server.internal.messages.EhcacheDataSyncMessage; +import org.ehcache.clustered.server.internal.messages.EhcacheStateSyncMessage; +import org.ehcache.clustered.server.internal.messages.EhcacheSyncMessage; +import org.ehcache.clustered.server.management.Management; +import org.ehcache.clustered.server.state.ClientMessageTracker; import org.ehcache.clustered.server.state.EhcacheStateService; +import org.ehcache.clustered.server.state.InvalidationTracker; import org.ehcache.clustered.server.state.config.EhcacheStateServiceConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -41,164 +52,275 @@ import org.terracotta.offheapresource.OffHeapResources; import java.util.Collections; +import java.util.Map; import java.util.Set; import java.util.UUID; +import java.util.concurrent.TimeoutException; + +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isLifecycleMessage; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isPassiveReplicationMessage; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStateRepoOperationMessage; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStoreOperationMessage; class EhcachePassiveEntity implements PassiveServerEntity { private static final Logger LOGGER = LoggerFactory.getLogger(EhcachePassiveEntity.class); private final UUID identity; - private final Set offHeapResourceIdentifiers; private final EhcacheStateService ehcacheStateService; + private final Management management; @Override public void invoke(EhcacheEntityMessage message) { try { - if (this.offHeapResourceIdentifiers.isEmpty()) { - throw new ServerMisconfigurationException("Server started without any offheap resources defined." + - " Check your server configuration and define at least one offheap resource."); - } - - switch (message.getType()) { - case LIFECYCLE_OP: + if (message instanceof EhcacheOperationMessage) { + EhcacheOperationMessage operationMessage = (EhcacheOperationMessage) message; + EhcacheMessageType messageType = operationMessage.getMessageType(); + if (isStoreOperationMessage(messageType)) { + try { + invokeServerStoreOperation((ServerStoreOpMessage)message); + } catch (ClusterException e) { + // Store operation should not be critical enough to fail a passive + LOGGER.error("Unexpected exception raised during operation: " + message, e); + } + } else if (isLifecycleMessage(messageType)) { invokeLifeCycleOperation((LifecycleMessage) message); - break; - case SERVER_STORE_OP: - invokeServerStoreOperation((ServerStoreOpMessage)message); - break; - case STATE_REPO_OP: - ehcacheStateService.getStateRepositoryManager().invoke((StateRepositoryOpMessage)message); - break; - default: - throw new IllegalMessageException("Unknown message : " + message); + } else if (isStateRepoOperationMessage(messageType)) { + try { + ehcacheStateService.getStateRepositoryManager().invoke((StateRepositoryOpMessage)message); + } catch (ClusterException e) { + // State repository operations should not be critical enough to fail a passive + LOGGER.error("Unexpected exception raised during operation: " + message, e); + } + } else if (isPassiveReplicationMessage(messageType)) { + try { + invokeRetirementMessages((PassiveReplicationMessage)message); + } catch (ClusterException e) { + LOGGER.error("Unexpected exception raised during operation: " + message, e); + } + } else { + throw new AssertionError("Unsupported EhcacheOperationMessage: " + operationMessage.getMessageType()); + } + } else if (message instanceof EhcacheSyncMessage) { + invokeSyncOperation((EhcacheSyncMessage) message); + } else { + throw new AssertionError("Unsupported EhcacheEntityMessage: " + message.getClass()); } - } catch (Exception e) { - LOGGER.error("Unexpected exception raised during operation: " + message, e); + } catch (ClusterException e) { + // Reaching here means a lifecycle or sync operation failed + throw new IllegalStateException("A lifecycle or sync operation failed", e); } - } - EhcachePassiveEntity(ServiceRegistry services, byte[] config) { + EhcachePassiveEntity(ServiceRegistry services, byte[] config, final KeySegmentMapper mapper) { this.identity = ClusteredEhcacheIdentity.deserialize(config); - OffHeapResources offHeapResources = services.getService(new BasicServiceConfiguration(OffHeapResources.class)); - if (offHeapResources == null) { - this.offHeapResourceIdentifiers = Collections.emptySet(); - } else { - this.offHeapResourceIdentifiers = offHeapResources.getAllIdentifiers(); - } - ehcacheStateService = services.getService(new EhcacheStateServiceConfig(services, this.offHeapResourceIdentifiers)); + ehcacheStateService = services.getService(new EhcacheStateServiceConfig(services, mapper)); if (ehcacheStateService == null) { throw new AssertionError("Server failed to retrieve EhcacheStateService."); } + management = new Management(services, ehcacheStateService, false); + } + + private void invokeRetirementMessages(PassiveReplicationMessage message) throws ClusterException { + + switch (message.getMessageType()) { + case CHAIN_REPLICATION_OP: + LOGGER.debug("Chain Replication message for msgId {} & client Id {}", message.getId(), message.getClientId()); + ChainReplicationMessage retirementMessage = (ChainReplicationMessage)message; + ServerSideServerStore cacheStore = ehcacheStateService.getStore(retirementMessage.getCacheId()); + if (cacheStore == null) { + // An operation on a non-existent store should never get out of the client + throw new LifecycleException("Clustered tier does not exist : '" + retirementMessage.getCacheId() + "'"); + } + cacheStore.put(retirementMessage.getKey(), retirementMessage.getChain()); + applyMessage(message); + trackHashInvalidationForEventualCache(retirementMessage); + break; + case INVALIDATION_COMPLETE: + untrackHashInvalidationForEventualCache((InvalidationCompleteMessage)message); + break; + case CLEAR_INVALIDATION_COMPLETE: + ehcacheStateService.getInvalidationTracker(((ClearInvalidationCompleteMessage)message).getCacheId()).setClearInProgress(false); + break; + case CREATE_SERVER_STORE_REPLICATION: + ehcacheStateService.getClientMessageTracker().applied(message.getId(), message.getClientId()); + PassiveReplicationMessage.CreateServerStoreReplicationMessage createMessage = (PassiveReplicationMessage.CreateServerStoreReplicationMessage) message; + createServerStore(createMessage.getStoreName(), createMessage.getStoreConfiguration()); + break; + case DESTROY_SERVER_STORE_REPLICATION: + ehcacheStateService.getClientMessageTracker().applied(message.getId(), message.getClientId()); + PassiveReplicationMessage.DestroyServerStoreReplicationMessage destroyMessage = (PassiveReplicationMessage.DestroyServerStoreReplicationMessage) message; + destroyServerStore(destroyMessage.getStoreName()); + break; + case CLIENT_ID_TRACK_OP: + ehcacheStateService.getClientMessageTracker().remove(message.getClientId()); + break; + default: + throw new AssertionError("Unsupported Retirement Message : " + message); + } + } + + private void untrackHashInvalidationForEventualCache(InvalidationCompleteMessage message) { + ehcacheStateService.getInvalidationTracker(message.getCacheId()).getInvalidationMap().computeIfPresent(message.getKey(), (key, count) -> { + if (count == 1) { + return null; + } + return count - 1; + }); + } + + private void trackHashInvalidationForEventualCache(ChainReplicationMessage retirementMessage) { + InvalidationTracker invalidationTracker = ehcacheStateService.getInvalidationTracker(retirementMessage.getCacheId()); + if (invalidationTracker != null) { + invalidationTracker.getInvalidationMap().compute(retirementMessage.getKey(), (key, count) -> { + if (count == null) { + return 1; + } else { + return count + 1; + } + }); + } } private void invokeServerStoreOperation(ServerStoreOpMessage message) throws ClusterException { - ServerStoreImpl cacheStore = ehcacheStateService.getStore(message.getCacheId()); + ServerSideServerStore cacheStore = ehcacheStateService.getStore(message.getCacheId()); if (cacheStore == null) { // An operation on a non-existent store should never get out of the client throw new LifecycleException("Clustered tier does not exist : '" + message.getCacheId() + "'"); } - switch (message.operation()) { - //TODO: check if append and getandappend can be combined - case APPEND: { - ServerStoreOpMessage.AppendMessage appendMessage = (ServerStoreOpMessage.AppendMessage)message; - cacheStore.append(appendMessage.getKey(), appendMessage.getPayload()); - break; - } - case GET_AND_APPEND: { - ServerStoreOpMessage.GetAndAppendMessage getAndAppendMessage = (ServerStoreOpMessage.GetAndAppendMessage)message; - cacheStore.getAndAppend(getAndAppendMessage.getKey(), getAndAppendMessage.getPayload()); - break; - } + switch (message.getMessageType()) { case REPLACE: { - ServerStoreOpMessage.ReplaceAtHeadMessage replaceAtHeadMessage = (ServerStoreOpMessage.ReplaceAtHeadMessage) message; + ServerStoreOpMessage.ReplaceAtHeadMessage replaceAtHeadMessage = (ServerStoreOpMessage.ReplaceAtHeadMessage)message; cacheStore.replaceAtHead(replaceAtHeadMessage.getKey(), replaceAtHeadMessage.getExpect(), replaceAtHeadMessage.getUpdate()); break; } case CLEAR: { - cacheStore.clear(); + try { + cacheStore.clear(); + } catch (TimeoutException e) { + throw new AssertionError("Server side store is not expected to throw timeout exception"); + } + InvalidationTracker invalidationTracker = ehcacheStateService.getInvalidationTracker(message.getCacheId()); + if (invalidationTracker != null) { + invalidationTracker.setClearInProgress(true); + } break; } default: - throw new IllegalMessageException("Unknown ServerStore operation : " + message); + throw new AssertionError("Unsupported ServerStore operation : " + message.getMessageType()); } } - private void invokeLifeCycleOperation(LifecycleMessage message) throws ClusterException{ - switch (message.operation()) { + private void invokeSyncOperation(EhcacheSyncMessage message) throws ClusterException { + switch (message.getMessageType()) { + case STATE: + EhcacheStateSyncMessage stateSyncMessage = (EhcacheStateSyncMessage) message; + + ehcacheStateService.configure(stateSyncMessage.getConfiguration()); + management.sharedPoolsConfigured(); + + for (Map.Entry entry : stateSyncMessage.getStoreConfigs().entrySet()) { + ehcacheStateService.createStore(entry.getKey(), entry.getValue()); + if(entry.getValue().getConsistency() == Consistency.EVENTUAL) { + ehcacheStateService.addInvalidationtracker(entry.getKey()); + } + management.serverStoreCreated(entry.getKey()); + } + break; + case DATA: + EhcacheDataSyncMessage dataSyncMessage = (EhcacheDataSyncMessage) message; + ServerSideServerStore store = ehcacheStateService.getStore(dataSyncMessage.getCacheId()); + dataSyncMessage.getChainMap().entrySet().forEach(entry -> { + store.put(entry.getKey(), entry.getValue()); + + }); + break; + default: + throw new AssertionError("Unsupported Sync operation " + message.getMessageType()); + } + } + + private void invokeLifeCycleOperation(LifecycleMessage message) throws ClusterException { + switch (message.getMessageType()) { case CONFIGURE: - ehcacheStateService.configure((ConfigureStoreManager) message); + configure((ConfigureStoreManager) message); break; - case CREATE_SERVER_STORE: - createServerStore((CreateServerStore) message); + case VALIDATE: + applyMessage(message); break; + case CREATE_SERVER_STORE: case DESTROY_SERVER_STORE: - destroyServerStore((DestroyServerStore) message); + ehcacheStateService.getClientMessageTracker().track(message.getId(), message.getClientId()); break; default: - throw new IllegalMessageException("Unknown LifeCycle operation " + message); + throw new AssertionError("Unsupported LifeCycle operation " + message.getMessageType()); } } - private void createServerStore(CreateServerStore createServerStore) throws ClusterException { + private void configure(ConfigureStoreManager message) throws ClusterException { + ehcacheStateService.configure(message.getConfiguration()); + ehcacheStateService.getClientMessageTracker().setEntityConfiguredStamp(message.getClientId(), message.getId()); + management.sharedPoolsConfigured(); + } + + private void applyMessage(EhcacheOperationMessage message) { + ClientMessageTracker clientMessageTracker = ehcacheStateService.getClientMessageTracker(); + clientMessageTracker.applied(message.getId(), message.getClientId()); + } + + private void createServerStore(String storeName, ServerStoreConfiguration configuration) throws ClusterException { if (!ehcacheStateService.isConfigured()) { throw new LifecycleException("Clustered Tier Manager is not configured"); } - if(createServerStore.getStoreConfiguration().getPoolAllocation() instanceof PoolAllocation.Unknown) { + if(configuration.getPoolAllocation() instanceof PoolAllocation.Unknown) { throw new LifecycleException("Clustered tier can't be created with an Unknown resource pool"); } - final String name = createServerStore.getName(); // client cache identifier/name - - LOGGER.info("Creating new clustered tier '{}'", name); - - ServerStoreConfiguration storeConfiguration = createServerStore.getStoreConfiguration(); - ehcacheStateService.createStore(name, storeConfiguration); + LOGGER.info("Creating new clustered tier '{}'", storeName); + ehcacheStateService.createStore(storeName, configuration); + if(configuration.getConsistency() == Consistency.EVENTUAL) { + ehcacheStateService.addInvalidationtracker(storeName); + } + management.serverStoreCreated(storeName); } - private void destroyServerStore(DestroyServerStore destroyServerStore) throws ClusterException { + private void destroyServerStore(String storeName) throws ClusterException { if (!ehcacheStateService.isConfigured()) { throw new LifecycleException("Clustered Tier Manager is not configured"); } - String name = destroyServerStore.getName(); - - LOGGER.info("Destroying clustered tier '{}'", name); - ehcacheStateService.destroyServerStore(name); + LOGGER.info("Destroying clustered tier '{}'", storeName); + management.serverStoreDestroyed(storeName); + ehcacheStateService.destroyServerStore(storeName); + ehcacheStateService.removeInvalidationtracker(storeName); } @Override public void startSyncEntity() { - + LOGGER.info("Sync started."); } @Override public void endSyncEntity() { - + LOGGER.info("Sync completed."); } @Override public void startSyncConcurrencyKey(int concurrencyKey) { - + LOGGER.info("Sync started for concurrency key {}.", concurrencyKey); } @Override public void endSyncConcurrencyKey(int concurrencyKey) { - + LOGGER.info("Sync complete for concurrency key {}.", concurrencyKey); } @Override public void createNew() { - - } - - @Override - public void loadExisting() { - + management.init(); } @Override diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java index 1999aa9cd1..44bc49b0bc 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java @@ -15,13 +15,23 @@ */ package org.ehcache.clustered.server; +import org.ehcache.clustered.common.internal.messages.CommonConfigCodec; +import org.ehcache.clustered.common.internal.messages.ConfigCodec; import org.ehcache.clustered.common.internal.messages.EhcacheCodec; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.ehcache.clustered.common.internal.messages.LifeCycleMessageCodec; +import org.ehcache.clustered.common.internal.messages.ResponseCodec; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpCodec; +import org.ehcache.clustered.common.internal.messages.StateRepositoryOpCodec; +import org.ehcache.clustered.server.internal.messages.EhcacheServerCodec; +import org.ehcache.clustered.server.internal.messages.EhcacheSyncMessageCodec; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessageCodec; +import org.terracotta.entity.CommonServerEntity; import org.terracotta.entity.ConcurrencyStrategy; import org.terracotta.entity.EntityServerService; +import org.terracotta.entity.ExecutionStrategy; import org.terracotta.entity.MessageCodec; -import org.terracotta.entity.PassiveServerEntity; import org.terracotta.entity.ServiceRegistry; import static org.ehcache.clustered.server.ConcurrencyStrategies.defaultConcurrency; @@ -30,7 +40,9 @@ public class EhcacheServerEntityService implements EntityServerService { private static final long ENTITY_VERSION = 1L; - private static final int DEFAULT_CONCURRENCY = 1024; + private static final int DEFAULT_CONCURRENCY = 16; + private static final KeySegmentMapper DEFAULT_MAPPER = new KeySegmentMapper(DEFAULT_CONCURRENCY); + private static final ConfigCodec CONFIG_CODEC = new CommonConfigCodec(); @Override public long getVersion() { @@ -44,26 +56,38 @@ public boolean handlesEntityType(String typeName) { @Override public EhcacheActiveEntity createActiveEntity(ServiceRegistry registry, byte[] configuration) { - return new EhcacheActiveEntity(registry, configuration); + return new EhcacheActiveEntity(registry, configuration, DEFAULT_MAPPER); } @Override - public PassiveServerEntity createPassiveEntity(ServiceRegistry registry, byte[] configuration) { - return new EhcachePassiveEntity(registry, configuration); + public EhcachePassiveEntity createPassiveEntity(ServiceRegistry registry, byte[] configuration) { + return new EhcachePassiveEntity(registry, configuration, DEFAULT_MAPPER); } @Override public ConcurrencyStrategy getConcurrencyStrategy(byte[] config) { - return defaultConcurrency(DEFAULT_CONCURRENCY); + return defaultConcurrency(DEFAULT_MAPPER); } @Override public MessageCodec getMessageCodec() { - return EhcacheCodec.messageCodec(); + EhcacheCodec ehcacheCodec = new EhcacheCodec(new ServerStoreOpCodec(), + new LifeCycleMessageCodec(CONFIG_CODEC), new StateRepositoryOpCodec(), new ResponseCodec()); + return new EhcacheServerCodec(ehcacheCodec, new PassiveReplicationMessageCodec(CONFIG_CODEC)); } @Override public SyncMessageCodec getSyncMessageCodec() { - return null; + return new EhcacheSyncMessageCodec(CONFIG_CODEC); + } + + @Override + public > AP reconfigureEntity(ServiceRegistry registry, AP oldEntity, byte[] configuration) { + throw new UnsupportedOperationException("Reconfigure not supported in Ehcache"); + } + + @Override + public ExecutionStrategy getExecutionStrategy(byte[] configuration) { + return new EhcacheExecutionStrategy(); } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java index 4189092850..8b3789f505 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java @@ -16,30 +16,24 @@ package org.ehcache.clustered.server; +import java.util.Arrays; import org.ehcache.clustered.common.PoolAllocation; import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; -import org.ehcache.clustered.common.internal.exceptions.ClusterException; +import org.ehcache.clustered.common.internal.exceptions.*; import org.ehcache.clustered.server.repo.StateRepositoryManager; +import org.ehcache.clustered.server.state.ClientMessageTracker; import org.ehcache.clustered.server.state.EhcacheStateService; +import org.ehcache.clustered.server.state.InvalidationTracker; +import org.ehcache.clustered.server.state.ResourcePageSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.ehcache.clustered.common.internal.exceptions.IllegalMessageException; -import org.ehcache.clustered.common.internal.exceptions.InvalidServerSideConfigurationException; -import org.ehcache.clustered.common.internal.exceptions.InvalidStoreException; -import org.ehcache.clustered.common.internal.exceptions.InvalidStoreManagerException; -import org.ehcache.clustered.common.internal.exceptions.LifecycleException; -import org.ehcache.clustered.common.internal.exceptions.ResourceConfigurationException; -import org.ehcache.clustered.common.internal.messages.LifecycleMessage.ConfigureStoreManager; -import org.ehcache.clustered.common.internal.messages.LifecycleMessage.ValidateStoreManager; -import org.terracotta.entity.ServiceRegistry; +import org.terracotta.context.TreeNode; import org.terracotta.offheapresource.OffHeapResource; import org.terracotta.offheapresource.OffHeapResourceIdentifier; -import org.terracotta.offheapstore.buffersource.OffHeapBufferSource; -import org.terracotta.offheapstore.paging.OffHeapStorageArea; -import org.terracotta.offheapstore.paging.Page; +import org.terracotta.offheapresource.OffHeapResources; import org.terracotta.offheapstore.paging.PageSource; -import org.terracotta.offheapstore.paging.UpfrontAllocatingPageSource; +import org.terracotta.statistics.StatisticsManager; import java.util.Collections; import java.util.HashMap; @@ -47,46 +41,80 @@ import java.util.Iterator; import java.util.Map; import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.Callable; +import java.util.function.Function; + +import static java.util.stream.Collectors.toMap; +import static org.terracotta.offheapresource.OffHeapResourceIdentifier.identifier; -import static org.terracotta.offheapstore.util.MemoryUnit.GIGABYTES; -import static org.terracotta.offheapstore.util.MemoryUnit.MEGABYTES; public class EhcacheStateServiceImpl implements EhcacheStateService { private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheStateServiceImpl.class); - private final ServiceRegistry services; - private final Set offHeapResourceIdentifiers; + private static final String STATISTICS_STORE_TAG = "Store"; + private static final String STATISTICS_POOL_TAG = "Pool"; + private static final String PROPERTY_STORE_KEY = "storeName"; + private static final String PROPERTY_POOL_KEY = "poolName"; + + private static final Map> STAT_STORE_METHOD_REFERENCES = new HashMap<>(); + private static final Map> STAT_POOL_METHOD_REFERENCES = new HashMap<>(); + + static { + STAT_STORE_METHOD_REFERENCES.put("allocatedMemory", ServerStoreImpl::getAllocatedMemory); + STAT_STORE_METHOD_REFERENCES.put("dataAllocatedMemory", ServerStoreImpl::getDataAllocatedMemory); + STAT_STORE_METHOD_REFERENCES.put("occupiedMemory", ServerStoreImpl::getOccupiedMemory); + STAT_STORE_METHOD_REFERENCES.put("dataOccupiedMemory", ServerStoreImpl::getDataOccupiedMemory); + STAT_STORE_METHOD_REFERENCES.put("entries", ServerStoreImpl::getSize); + STAT_STORE_METHOD_REFERENCES.put("usedSlotCount", ServerStoreImpl::getUsedSlotCount); + STAT_STORE_METHOD_REFERENCES.put("dataVitalMemory", ServerStoreImpl::getDataVitalMemory); + STAT_STORE_METHOD_REFERENCES.put("vitalMemory", ServerStoreImpl::getVitalMemory); + STAT_STORE_METHOD_REFERENCES.put("reprobeLength", ServerStoreImpl::getReprobeLength); + STAT_STORE_METHOD_REFERENCES.put("removedSlotCount", ServerStoreImpl::getRemovedSlotCount); + STAT_STORE_METHOD_REFERENCES.put("dataSize", ServerStoreImpl::getDataSize); + STAT_STORE_METHOD_REFERENCES.put("tableCapacity", ServerStoreImpl::getTableCapacity); + + STAT_POOL_METHOD_REFERENCES.put("allocatedSize", ResourcePageSource::getAllocatedSize); + } + + private final OffHeapResources offHeapResources; + private volatile boolean configured = false; /** * The name of the resource to use for dedicated resource pools not identifying a resource from which * space for the pool is obtained. This value may be {@code null}; */ - private String defaultServerResource; + private volatile String defaultServerResource; /** * The clustered shared resource pools specified by the CacheManager creating this {@code EhcacheActiveEntity}. * The index is the name assigned to the shared resource pool in the cache manager configuration. */ - private Map sharedResourcePools; + private final Map sharedResourcePools = new ConcurrentHashMap<>(); /** * The clustered dedicated resource pools specified by caches defined in CacheManagers using this * {@code EhcacheActiveEntity}. The index is the cache identifier (alias). */ - private Map dedicatedResourcePools = new HashMap(); + private final Map dedicatedResourcePools = new ConcurrentHashMap<>(); /** * The clustered stores representing the server-side of a {@code ClusterStore}. * The index is the cache alias/identifier. */ - private Map stores = Collections.emptyMap(); + private final Map stores = new ConcurrentHashMap<>(); + private final ClientMessageTracker messageTracker = new ClientMessageTracker(); + private final ConcurrentMap invalidationMap = new ConcurrentHashMap<>(); private final StateRepositoryManager stateRepositoryManager; + private final KeySegmentMapper mapper; - public EhcacheStateServiceImpl(ServiceRegistry services, Set offHeapResourceIdentifiers) { - this.services = services; - this.offHeapResourceIdentifiers = offHeapResourceIdentifiers; + + public EhcacheStateServiceImpl(OffHeapResources offHeapResources, final KeySegmentMapper mapper) { + this.offHeapResources = offHeapResources; + this.mapper = mapper; this.stateRepositoryManager = new StateRepositoryManager(); } @@ -99,25 +127,45 @@ public Set getStores() { } Set getSharedResourcePoolIds() { - return sharedResourcePools == null ? new HashSet() : Collections.unmodifiableSet(sharedResourcePools.keySet()); + return Collections.unmodifiableSet(sharedResourcePools.keySet()); } Set getDedicatedResourcePoolIds() { return Collections.unmodifiableSet(dedicatedResourcePools.keySet()); } - String getDefaultServerResource() { + public String getDefaultServerResource() { return this.defaultServerResource; } - public void validate(ValidateStoreManager message) throws ClusterException { + @Override + public Map getSharedResourcePools() { + return sharedResourcePools.entrySet().stream().collect(toMap(Map.Entry::getKey, e -> e.getValue().getPool())); + } + + @Override + public ResourcePageSource getSharedResourcePageSource(String name) { + return sharedResourcePools.get(name); + } + + @Override + public ServerSideConfiguration.Pool getDedicatedResourcePool(String name) { + ResourcePageSource resourcePageSource = dedicatedResourcePools.get(name); + return resourcePageSource == null ? null : resourcePageSource.getPool(); + } + + @Override + public ResourcePageSource getDedicatedResourcePageSource(String name) { + return dedicatedResourcePools.get(name); + } + + public void validate(ServerSideConfiguration configuration) throws ClusterException { if (!isConfigured()) { throw new LifecycleException("Clustered Tier Manager is not configured"); } - ServerSideConfiguration incomingConfig = message.getConfiguration(); - if (incomingConfig != null) { - checkConfigurationCompatibility(incomingConfig); + if (configuration != null) { + checkConfigurationCompatibility(configuration); } } @@ -150,7 +198,7 @@ private void checkConfigurationCompatibility(ServerSideConfiguration incomingCon } private static Map resolveResourcePools(ServerSideConfiguration configuration) throws InvalidServerSideConfigurationException { - Map pools = new HashMap(); + Map pools = new HashMap<>(); for (Map.Entry e : configuration.getResourcePools().entrySet()) { ServerSideConfiguration.Pool pool = e.getValue(); if (pool.getServerResource() == null) { @@ -166,29 +214,27 @@ private static Map resolveResourcePools(Se return Collections.unmodifiableMap(pools); } - public void configure(ConfigureStoreManager message) throws ClusterException { + public void configure(ServerSideConfiguration configuration) throws ClusterException { if (!isConfigured()) { LOGGER.info("Configuring server-side clustered tier manager"); - ServerSideConfiguration configuration = message.getConfiguration(); this.defaultServerResource = configuration.getDefaultServerResource(); if (this.defaultServerResource != null) { - if (!offHeapResourceIdentifiers.contains(this.defaultServerResource)) { + if (!offHeapResources.getAllIdentifiers().contains(identifier(this.defaultServerResource))) { throw new ResourceConfigurationException("Default server resource '" + this.defaultServerResource - + "' is not defined. Available resources are: " + offHeapResourceIdentifiers); + + "' is not defined. Available resources are: " + offHeapResources.getAllIdentifiers()); } } - this.sharedResourcePools = createPools(resolveResourcePools(configuration)); - this.stores = new HashMap(); - + this.sharedResourcePools.putAll(createPools(resolveResourcePools(configuration))); + configured = true; } else { throw new InvalidStoreManagerException("Clustered Tier Manager already configured"); } } private Map createPools(Map resourcePools) throws ResourceConfigurationException { - Map pools = new HashMap(); + Map pools = new HashMap<>(); try { for (Map.Entry e : resourcePools.entrySet()) { pools.put(e.getKey(), createPageSource(e.getKey(), e.getValue())); @@ -217,13 +263,14 @@ private Map createPools(Map + registerStatistic(store, storeName, entry.getKey(), STATISTICS_STORE_TAG, PROPERTY_STORE_KEY, () -> entry.getValue().apply(store) )); + } + + private void registerPoolStatistics(String poolName, ResourcePageSource pageSource) { + STAT_POOL_METHOD_REFERENCES.entrySet().stream().forEach((entry)-> + registerStatistic(pageSource, poolName, entry.getKey(), STATISTICS_POOL_TAG, PROPERTY_POOL_KEY, () -> entry.getValue().apply(pageSource)) + ); + } + + private void unRegisterStoreStatistics(ServerStoreImpl store) { + TreeNode node = StatisticsManager.nodeFor(store); + + if(node != null) { + node.clean(); + } + } + + private void unRegisterPoolStatistics(ResourcePageSource pageSource) { + TreeNode node = StatisticsManager.nodeFor(pageSource); + + if(node != null) { + node.clean(); + } + } + + private void registerStatistic(Object context, String name, String observerName, String tag, String propertyKey, Callable callable) { + Set tags = new HashSet(Arrays.asList(tag,"tier")); + Map properties = new HashMap(); + properties.put("discriminator", tag); + properties.put(propertyKey, name); + + StatisticsManager.createPassThroughStatistic(context, observerName, tags, properties, callable); + } + private void releaseDedicatedPool(String name, PageSource pageSource) { /* * A ServerStore using a dedicated resource pool is the only referent to that pool. When such a @@ -253,6 +336,7 @@ private void releaseDedicatedPool(String name, PageSource pageSource) { public void destroy() { for (Map.Entry storeEntry: stores.entrySet()) { + unRegisterStoreStatistics(storeEntry.getValue()); storeEntry.getValue().close(); } stores.clear(); @@ -263,7 +347,9 @@ public void destroy() { releasePools("shared", this.sharedResourcePools); releasePools("dedicated", this.dedicatedResourcePools); - this.sharedResourcePools = null; + this.sharedResourcePools.clear(); + invalidationMap.clear(); + this.configured = false; } private void releasePools(String poolType, Map resourcePools) { @@ -280,8 +366,9 @@ private void releasePools(String poolType, Map resou private void releasePool(String poolType, String poolName, ResourcePageSource resourcePageSource) { ServerSideConfiguration.Pool pool = resourcePageSource.getPool(); - OffHeapResource source = services.getService(OffHeapResourceIdentifier.identifier(pool.getServerResource())); + OffHeapResource source = offHeapResources.getOffHeapResource(identifier(pool.getServerResource())); if (source != null) { + unRegisterPoolStatistics(resourcePageSource); source.release(pool.getSize()); LOGGER.info("Released {} bytes from resource '{}' for {} pool '{}'", pool.getSize(), pool.getServerResource(), poolType, poolName); } @@ -292,14 +379,25 @@ public ServerStoreImpl createStore(String name, ServerStoreConfiguration serverS throw new InvalidStoreException("Clustered tier '" + name + "' already exists"); } + ServerStoreImpl serverStore; PageSource resourcePageSource = getPageSource(name, serverStoreConfiguration.getPoolAllocation()); - ServerStoreImpl serverStore = new ServerStoreImpl(serverStoreConfiguration, resourcePageSource); + try { + serverStore = new ServerStoreImpl(serverStoreConfiguration, resourcePageSource, mapper); + } catch (RuntimeException rte) { + releaseDedicatedPool(name, resourcePageSource); + throw new InvalidServerStoreConfigurationException("Failed to create ServerStore.", rte); + } + stores.put(name, serverStore); + + registerStoreStatistics(serverStore, name); + return serverStore; } public void destroyServerStore(String name) throws ClusterException { final ServerStoreImpl store = stores.remove(name); + unRegisterStoreStatistics(store); if (store == null) { throw new InvalidStoreException("Clustered tier '" + name + "' does not exist"); } else { @@ -349,57 +447,42 @@ private PageSource getPageSource(String name, PoolAllocation allocation) throws } - public boolean isConfigured() { - return (sharedResourcePools != null); + @Override + public InvalidationTracker getInvalidationTracker(String cacheId) { + return this.invalidationMap.get(cacheId); } @Override - public StateRepositoryManager getStateRepositoryManager() throws ClusterException { - return this.stateRepositoryManager; + public void addInvalidationtracker(String cacheId) { + this.invalidationMap.put(cacheId, new InvalidationTracker()); } - private static boolean nullSafeEquals(Object s1, Object s2) { - return (s1 == null ? s2 == null : s1.equals(s2)); + @Override + public InvalidationTracker removeInvalidationtracker(String cacheId) { + return this.invalidationMap.remove(cacheId); } - /** - * Pairs a {@link ServerSideConfiguration.Pool} and an {@link UpfrontAllocatingPageSource} instance providing storage - * for the pool. - */ - private static class ResourcePageSource implements PageSource{ - /** - * A description of the resource allocation underlying this {@code PageSource}. - */ - private final ServerSideConfiguration.Pool pool; - private final UpfrontAllocatingPageSource delegatePageSource; - - private ResourcePageSource(ServerSideConfiguration.Pool pool) { - this.pool = pool; - this.delegatePageSource = new UpfrontAllocatingPageSource(new OffHeapBufferSource(), pool.getSize(), GIGABYTES.toBytes(1), MEGABYTES.toBytes(128)); - } + @Override + public void loadExisting() { + //nothing to do + } - public ServerSideConfiguration.Pool getPool() { - return pool; - } + public boolean isConfigured() { + return configured; + } - @Override - public Page allocate(int size, boolean thief, boolean victim, OffHeapStorageArea owner) { - return delegatePageSource.allocate(size, thief, victim, owner); - } + @Override + public StateRepositoryManager getStateRepositoryManager() throws ClusterException { + return this.stateRepositoryManager; + } - @Override - public void free(Page page) { - delegatePageSource.free(page); - } + @Override + public ClientMessageTracker getClientMessageTracker() { + return this.messageTracker; + } - @Override - public String toString() { - final StringBuilder sb = new StringBuilder("ResourcePageSource{"); - sb.append("pool=").append(pool); - sb.append(", delegatePageSource=").append(delegatePageSource); - sb.append('}'); - return sb.toString(); - } + private static boolean nullSafeEquals(Object s1, Object s2) { + return (s1 == null ? s2 == null : s1.equals(s2)); } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/KeySegmentMapper.java b/clustered/server/src/main/java/org/ehcache/clustered/server/KeySegmentMapper.java new file mode 100644 index 0000000000..138360f263 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/KeySegmentMapper.java @@ -0,0 +1,37 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server; + +import com.tc.classloader.CommonComponent; + +@CommonComponent +public class KeySegmentMapper { + + private final int segments; + + public KeySegmentMapper(final int segments) { + this.segments = segments; + } + + public int getSegmentForKey(long key) { + return Math.abs((int) (key % segments)); + } + + public int getSegments() { + return segments; + } +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerSideServerStore.java b/clustered/server/src/main/java/org/ehcache/clustered/server/ServerSideServerStore.java new file mode 100644 index 0000000000..cdafcff88e --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/ServerSideServerStore.java @@ -0,0 +1,34 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server; + +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.ServerStore; +import org.ehcache.clustered.server.offheap.OffHeapChainMap; +import org.terracotta.offheapstore.MapInternals; + +import com.tc.classloader.CommonComponent; + +import java.util.List; + +@CommonComponent +public interface ServerSideServerStore extends ServerStore, MapInternals { + void setEvictionListener(ServerStoreEvictionListener listener); + ServerStoreConfiguration getStoreConfiguration(); + List> getSegments(); + void put(long key, Chain chain); +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java index 895434972f..0851bed60e 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java @@ -19,26 +19,27 @@ import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.store.Chain; import org.ehcache.clustered.common.internal.store.ServerStore; +import org.ehcache.clustered.server.offheap.OffHeapChainMap; import org.ehcache.clustered.server.offheap.OffHeapServerStore; +import org.terracotta.offheapstore.MapInternals; import org.terracotta.offheapstore.paging.PageSource; import com.tc.classloader.CommonComponent; import java.nio.ByteBuffer; +import java.util.List; @CommonComponent -public class ServerStoreImpl implements ServerStore { - - private static final int OFFHEAP_CHAIN_SEGMENTS = 16; +public class ServerStoreImpl implements ServerSideServerStore { private final ServerStoreConfiguration storeConfiguration; private final PageSource pageSource; private final OffHeapServerStore store; - public ServerStoreImpl(ServerStoreConfiguration storeConfiguration, PageSource pageSource) { + public ServerStoreImpl(ServerStoreConfiguration storeConfiguration, PageSource pageSource, KeySegmentMapper mapper) { this.storeConfiguration = storeConfiguration; this.pageSource = pageSource; - this.store = new OffHeapServerStore(pageSource, OFFHEAP_CHAIN_SEGMENTS); + this.store = new OffHeapServerStore(pageSource, mapper); } public void setEvictionListener(ServerStoreEvictionListener listener) { @@ -78,6 +79,10 @@ public void replaceAtHead(long key, Chain expect, Chain update) { store.replaceAtHead(key, expect, update); } + public void put(long key, Chain chain) { + store.put(key, chain); + } + @Override public void clear() { store.clear(); @@ -86,4 +91,77 @@ public void clear() { public void close() { store.close(); } + + public List> getSegments() { + return store.getSegments(); + } + + // stats + + + @Override + public long getSize() { + return store.getSize(); + } + + @Override + public long getTableCapacity() { + return store.getTableCapacity(); + } + + @Override + public long getUsedSlotCount() { + return store.getUsedSlotCount(); + } + + @Override + public long getRemovedSlotCount() { + return store.getRemovedSlotCount(); + } + + @Override + public long getAllocatedMemory() { + return store.getAllocatedMemory(); + } + + @Override + public long getOccupiedMemory() { + return store.getOccupiedMemory(); + } + + @Override + public long getVitalMemory() { + return store.getVitalMemory(); + } + + @Override + public long getDataAllocatedMemory() { + return store.getDataAllocatedMemory(); + } + + @Override + public long getDataOccupiedMemory() { + return store.getDataOccupiedMemory(); + } + + @Override + public long getDataVitalMemory() { + return store.getDataVitalMemory(); + } + + @Override + public long getDataSize() { + return store.getDataSize(); + } + + @Override + public int getReprobeLength() { + //TODO + //MapInternals Interface may need to change to implement this function correctly. + //Currently MapInternals Interface contains function: int getReprobeLength(); + //however OffHeapServerStore.reprobeLength() returns a long + //Thus there could be data loss + + throw new UnsupportedOperationException("Not supported yet."); + } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java new file mode 100644 index 0000000000..c5b10b826c --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java @@ -0,0 +1,50 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.internal.messages; + +import org.ehcache.clustered.common.internal.store.Chain; + +import com.tc.classloader.CommonComponent; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +@CommonComponent +public class EhcacheDataSyncMessage extends EhcacheSyncMessage { + + private final String cacheId; + private final Map chainMap; + + public EhcacheDataSyncMessage(final String cacheId, final Map chainMap) { + this.cacheId = cacheId; + this.chainMap = Collections.unmodifiableMap(chainMap); + } + + @Override + public SyncMessageType getMessageType() { + return SyncMessageType.DATA; + } + + public String getCacheId() { + return cacheId; + } + + public Map getChainMap() { + return chainMap; + } +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodec.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodec.java new file mode 100644 index 0000000000..743089b0b9 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodec.java @@ -0,0 +1,87 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.internal.messages; + +import org.ehcache.clustered.common.internal.messages.EhcacheCodec; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.terracotta.entity.MessageCodec; +import org.terracotta.entity.MessageCodecException; +import org.terracotta.runnel.decoding.Enm; + +import java.nio.ByteBuffer; + +import static java.nio.ByteBuffer.wrap; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isPassiveReplicationMessage; + +/** + * EhcacheServerCodec + */ +public class EhcacheServerCodec implements MessageCodec { + + private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheServerCodec.class); + + private final EhcacheCodec clientCodec; + private final PassiveReplicationMessageCodec replicationCodec; + + public EhcacheServerCodec(EhcacheCodec clientCodec, PassiveReplicationMessageCodec replicationCodec) { + this.clientCodec = clientCodec; + this.replicationCodec = replicationCodec; + } + + @Override + public byte[] encodeMessage(EhcacheEntityMessage message) throws MessageCodecException { + if (message instanceof PassiveReplicationMessage) { + return replicationCodec.encode((PassiveReplicationMessage) message); + } + return clientCodec.encodeMessage(message); + } + + @Override + public EhcacheEntityMessage decodeMessage(byte[] payload) throws MessageCodecException { + ByteBuffer byteBuffer = wrap(payload); + Enm opCodeEnm = EhcacheCodec.OP_CODE_DECODER.decoder(byteBuffer).enm("opCode"); + if (!opCodeEnm.isFound()) { + throw new AssertionError("Got a message without an opCode"); + } + if (!opCodeEnm.isValid()) { + LOGGER.warn("Received message with unknown operation code - more recent version at the other end?"); + return null; + } + + byteBuffer.rewind(); + + EhcacheMessageType messageType = opCodeEnm.get(); + if (isPassiveReplicationMessage(messageType)) { + return replicationCodec.decode(messageType, byteBuffer); + } + return clientCodec.decodeMessage(byteBuffer, messageType); + } + + @Override + public byte[] encodeResponse(EhcacheEntityResponse response) throws MessageCodecException { + return clientCodec.encodeResponse(response); + } + + @Override + public EhcacheEntityResponse decodeResponse(byte[] payload) throws MessageCodecException { + return clientCodec.decodeResponse(payload); + } +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheStateSyncMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheStateSyncMessage.java new file mode 100644 index 0000000000..d73e5bec77 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheStateSyncMessage.java @@ -0,0 +1,53 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.internal.messages; + +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; + +import com.tc.classloader.CommonComponent; + +import java.io.Serializable; +import java.util.Map; +import java.util.Set; +import java.util.UUID; + +@CommonComponent +public class EhcacheStateSyncMessage extends EhcacheSyncMessage implements Serializable { + + private final ServerSideConfiguration configuration; + private final Map storeConfigs; + + public EhcacheStateSyncMessage(final ServerSideConfiguration configuration, + final Map storeConfigs) { + this.configuration = configuration; + this.storeConfigs = storeConfigs; + } + + public ServerSideConfiguration getConfiguration() { + return configuration; + } + + public Map getStoreConfigs() { + return storeConfigs; + } + + @Override + public SyncMessageType getMessageType() { + return SyncMessageType.STATE; + } +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessage.java new file mode 100644 index 0000000000..13d04bba43 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessage.java @@ -0,0 +1,45 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.internal.messages; + +import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; + +import com.tc.classloader.CommonComponent; + +import java.util.UUID; + +@CommonComponent +public abstract class EhcacheSyncMessage extends EhcacheEntityMessage { + + @Override + public void setId(final long id) { + throw new UnsupportedOperationException(); + } + + @Override + public long getId() { + throw new UnsupportedOperationException(); + } + + @Override + public UUID getClientId() { + throw new UnsupportedOperationException(); + } + + public abstract SyncMessageType getMessageType(); + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java new file mode 100644 index 0000000000..a54a7b0e4c --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java @@ -0,0 +1,185 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.internal.messages; + +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.messages.ChainCodec; +import org.ehcache.clustered.common.internal.messages.ConfigCodec; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; +import org.ehcache.clustered.common.internal.store.Chain; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.terracotta.entity.MessageCodecException; +import org.terracotta.entity.SyncMessageCodec; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.StructBuilder; +import org.terracotta.runnel.decoding.Enm; +import org.terracotta.runnel.decoding.StructArrayDecoder; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.StructEncoder; + +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; + +import static org.ehcache.clustered.common.internal.messages.ChainCodec.CHAIN_STRUCT; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.KEY_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; +import static org.ehcache.clustered.server.internal.messages.SyncMessageType.DATA; +import static org.ehcache.clustered.server.internal.messages.SyncMessageType.STATE; +import static org.ehcache.clustered.server.internal.messages.SyncMessageType.SYNC_MESSAGE_TYPE_FIELD_INDEX; +import static org.ehcache.clustered.server.internal.messages.SyncMessageType.SYNC_MESSAGE_TYPE_FIELD_NAME; +import static org.ehcache.clustered.server.internal.messages.SyncMessageType.SYNC_MESSAGE_TYPE_MAPPING; +import static org.terracotta.runnel.StructBuilder.newStructBuilder; + +public class EhcacheSyncMessageCodec implements SyncMessageCodec { + + private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheSyncMessageCodec.class); + + private static final String STORES_SUB_STRUCT = "stores"; + private static final String CHAIN_FIELD = "chain"; + private static final String CHAIN_MAP_ENTRIES_SUB_STRUCT = "entries"; + + private static final Struct CHAIN_MAP_ENTRY_STRUCT = newStructBuilder() + .int64(KEY_FIELD, 10) + .struct(CHAIN_FIELD, 20, CHAIN_STRUCT) + .build(); + + private static final Struct DATA_SYNC_STRUCT = newStructBuilder() + .enm(SYNC_MESSAGE_TYPE_FIELD_NAME, SYNC_MESSAGE_TYPE_FIELD_INDEX, SYNC_MESSAGE_TYPE_MAPPING) + .string(SERVER_STORE_NAME_FIELD, 20) + .structs(CHAIN_MAP_ENTRIES_SUB_STRUCT, 40, CHAIN_MAP_ENTRY_STRUCT) + .build(); + + private final Struct stateSyncStruct; + + private final ChainCodec chainCodec; + private final ConfigCodec configCodec; + + public EhcacheSyncMessageCodec(ConfigCodec configCodec) { + this.configCodec = configCodec; + this.chainCodec = new ChainCodec(); + + StructBuilder stateSyncStructBuilder = newStructBuilder() + .enm(SYNC_MESSAGE_TYPE_FIELD_NAME, SYNC_MESSAGE_TYPE_FIELD_INDEX, SYNC_MESSAGE_TYPE_MAPPING); + ConfigCodec.InjectTuple tuple = configCodec.injectServerSideConfiguration(stateSyncStructBuilder, SYNC_MESSAGE_TYPE_FIELD_INDEX + 10); + stateSyncStructBuilder = tuple.getUpdatedBuilder(); + int lastIndex = tuple.getLastIndex(); + + StructBuilder storeConfigStructBuilder = newStructBuilder().string(SERVER_STORE_NAME_FIELD, 10); + Struct storeConfigStruct = configCodec.injectServerStoreConfiguration(storeConfigStructBuilder, 20).getUpdatedBuilder().build(); + + stateSyncStruct = stateSyncStructBuilder.structs(STORES_SUB_STRUCT, lastIndex + 10, storeConfigStruct).build(); + } + + @Override + public byte[] encode(final int concurrencyKey, final EhcacheEntityMessage message) throws MessageCodecException { + if (message instanceof EhcacheSyncMessage) { + EhcacheSyncMessage syncMessage = (EhcacheSyncMessage) message; + StructEncoder encoder; + switch (syncMessage.getMessageType()) { + case STATE: { + encoder = stateSyncStruct.encoder(); + EhcacheStateSyncMessage stateSyncMessage = (EhcacheStateSyncMessage)syncMessage; + encoder.enm(SYNC_MESSAGE_TYPE_FIELD_NAME, STATE); + configCodec.encodeServerSideConfiguration(encoder, stateSyncMessage.getConfiguration()); + encoder.structs(STORES_SUB_STRUCT, stateSyncMessage.getStoreConfigs().entrySet(), (storeEncoder, storeEntry) -> { + storeEncoder.string(SERVER_STORE_NAME_FIELD, storeEntry.getKey()); + configCodec.encodeServerStoreConfiguration(storeEncoder, storeEntry.getValue()); + }); + return encoder.encode().array(); + } + case DATA: { + encoder = DATA_SYNC_STRUCT.encoder(); + EhcacheDataSyncMessage dataSyncMessage = (EhcacheDataSyncMessage)syncMessage; + encoder.enm(SYNC_MESSAGE_TYPE_FIELD_NAME, DATA); + encoder.string(SERVER_STORE_NAME_FIELD, dataSyncMessage.getCacheId()); + encoder.structs(CHAIN_MAP_ENTRIES_SUB_STRUCT, + dataSyncMessage.getChainMap().entrySet(), (entryEncoder, entry) -> { + entryEncoder.int64(KEY_FIELD, entry.getKey()); + chainCodec.encode(entryEncoder.struct(CHAIN_FIELD), entry.getValue()); + }); + return encoder.encode().array(); + } + default: + throw new IllegalArgumentException("Sync message codec can not encode " + syncMessage.getMessageType()); + } + } else { + throw new IllegalArgumentException(this.getClass().getName() + " can not encode " + message + " which is not a " + EhcacheStateSyncMessage.class); + } + } + + @Override + public EhcacheSyncMessage decode(final int concurrencyKey, final byte[] payload) throws MessageCodecException { + ByteBuffer message = ByteBuffer.wrap(payload); + StructDecoder decoder = stateSyncStruct.decoder(message); + Enm enm = decoder.enm(SYNC_MESSAGE_TYPE_FIELD_NAME); + if (!enm.isFound()) { + throw new AssertionError("Invalid message format - misses the message type field"); + } + if (!enm.isValid()) { + LOGGER.warn("Unknown sync message received - ignoring {}", enm.raw()); + return null; + } + + switch (enm.get()) { + case STATE: + ServerSideConfiguration configuration = configCodec.decodeServerSideConfiguration(decoder); + Map storeConfigs = decodeStoreConfigurations(decoder); + return new EhcacheStateSyncMessage(configuration, storeConfigs); + case DATA: + message.rewind(); + decoder = DATA_SYNC_STRUCT.decoder(message); + String storeName = decoder.string(SERVER_STORE_NAME_FIELD); + Map chainMap = decodeChainMapEntries(decoder); + return new EhcacheDataSyncMessage(storeName, chainMap); + default: + throw new AssertionError("Cannot happen given earlier checks"); + } + } + + private Map decodeChainMapEntries(StructDecoder decoder) { + Map chainMap = new HashMap<>(); + + StructArrayDecoder> entriesDecoder = decoder.structs(CHAIN_MAP_ENTRIES_SUB_STRUCT); + if (entriesDecoder != null) { + for (int i = 0; i < entriesDecoder.length(); i++) { + Long key = entriesDecoder.int64(KEY_FIELD); + StructDecoder>> chainDecoder = entriesDecoder.struct(CHAIN_FIELD); + Chain chain = chainCodec.decode(chainDecoder); + chainMap.put(key, chain); + entriesDecoder.next(); + } + } + return chainMap; + } + + private Map decodeStoreConfigurations(StructDecoder decoder) { + Map result = new HashMap<>(); + + StructArrayDecoder> storesDecoder = decoder.structs(STORES_SUB_STRUCT); + if (storesDecoder != null) { + for (int i = 0; i < storesDecoder.length(); i++) { + String storeName = storesDecoder.string(SERVER_STORE_NAME_FIELD); + result.put(storeName, configCodec.decodeServerStoreConfiguration(storesDecoder)); + storesDecoder.next(); + } + } + return result; + } +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessage.java new file mode 100644 index 0000000000..bbf1dd32d9 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessage.java @@ -0,0 +1,221 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.internal.messages; + +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.messages.ConcurrentEntityMessage; +import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; +import org.ehcache.clustered.common.internal.messages.EhcacheOperationMessage; +import org.ehcache.clustered.common.internal.messages.LifecycleMessage; +import org.ehcache.clustered.common.internal.store.Chain; + +import java.util.UUID; + +/** + * This message is sent by the Active Entity to Passive Entity. + */ +public abstract class PassiveReplicationMessage extends EhcacheOperationMessage { + + @Override + public void setId(long id) { + throw new UnsupportedOperationException("This method is not supported on replication message"); + } + + public static class ClientIDTrackerMessage extends PassiveReplicationMessage { + private final UUID clientId; + + public ClientIDTrackerMessage(UUID clientId) { + this.clientId = clientId; + } + + public UUID getClientId() { + return clientId; + } + + @Override + public long getId() { + throw new UnsupportedOperationException("Not supported for ClientIDTrackerMessage"); + } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.CLIENT_ID_TRACK_OP; + } + } + + public static class ChainReplicationMessage extends ClientIDTrackerMessage implements ConcurrentEntityMessage { + + private final String cacheId; + private final long key; + private final Chain chain; + private final long msgId; + + public ChainReplicationMessage(String cacheId, long key, Chain chain, long msgId, UUID clientId) { + super(clientId); + this.msgId = msgId; + this.cacheId = cacheId; + this.key = key; + this.chain = chain; + } + + public String getCacheId() { + return this.cacheId; + } + + public long getKey() { + return key; + } + + public Chain getChain() { + return chain; + } + + public long getId() { + return msgId; + } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.CHAIN_REPLICATION_OP; + } + + @Override + public long concurrencyKey() { + return key; + } + } + + public static class ClearInvalidationCompleteMessage extends PassiveReplicationMessage implements ConcurrentEntityMessage { + private final String cacheId; + + public ClearInvalidationCompleteMessage(String cacheId) { + this.cacheId = cacheId; + } + + @Override + public long concurrencyKey() { + return this.cacheId.hashCode(); + } + + @Override + public long getId() { + throw new UnsupportedOperationException("Not supported for ClearInvalidationCompleteMessage"); + } + + @Override + public UUID getClientId() { + throw new UnsupportedOperationException("Not supported for ClearInvalidationCompleteMessage"); + } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.CLEAR_INVALIDATION_COMPLETE; + } + + public String getCacheId() { + return cacheId; + } + } + + public static class InvalidationCompleteMessage extends ClearInvalidationCompleteMessage { + + private final long key; + + public InvalidationCompleteMessage(String cacheId, long key) { + super(cacheId); + this.key = key; + } + + @Override + public long concurrencyKey() { + return (getCacheId().hashCode() + key); + } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.INVALIDATION_COMPLETE; + } + + public long getKey() { + return key; + } + } + + public static class CreateServerStoreReplicationMessage extends ClientIDTrackerMessage { + + private final String storeName; + private final ServerStoreConfiguration storeConfiguration; + private final long msgId; + + public CreateServerStoreReplicationMessage(LifecycleMessage.CreateServerStore createMessage) { + this(createMessage.getId(), createMessage.getClientId(), createMessage.getName(), createMessage.getStoreConfiguration()); + } + + public CreateServerStoreReplicationMessage(long msgId, UUID clientId, String storeName, ServerStoreConfiguration configuration) { + super(clientId); + this.msgId = msgId; + this.storeName = storeName; + this.storeConfiguration = configuration; + } + + public String getStoreName() { + return storeName; + } + + public ServerStoreConfiguration getStoreConfiguration() { + return storeConfiguration; + } + + public long getId() { + return msgId; + } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.CREATE_SERVER_STORE_REPLICATION; + } + } + + public static class DestroyServerStoreReplicationMessage extends ClientIDTrackerMessage { + + private final String storeName; + private final long msgId; + + public DestroyServerStoreReplicationMessage(LifecycleMessage.DestroyServerStore destroyMessage) { + this(destroyMessage.getId(), destroyMessage.getClientId(), destroyMessage.getName()); + } + + public DestroyServerStoreReplicationMessage(long msgId, UUID clientId, String storeName) { + super(clientId); + this.storeName = storeName; + this.msgId = msgId; + } + + public String getStoreName() { + return storeName; + } + + public long getId() { + return msgId; + } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.DESTROY_SERVER_STORE_REPLICATION; + } + } +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java new file mode 100644 index 0000000000..72ca3bdb0e --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java @@ -0,0 +1,264 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.internal.messages; + +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.messages.ChainCodec; +import org.ehcache.clustered.common.internal.messages.ConfigCodec; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; +import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; +import org.ehcache.clustered.common.internal.messages.MessageCodecUtils; +import org.ehcache.clustered.common.internal.store.Chain; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.StructBuilder; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.StructEncoder; + +import java.nio.ByteBuffer; +import java.util.UUID; + +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.EHCACHE_MESSAGE_TYPES_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_INDEX; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.KEY_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.LSB_UUID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSB_UUID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSG_ID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; +import static org.terracotta.runnel.StructBuilder.newStructBuilder; + +public class PassiveReplicationMessageCodec { + + private static final String CHAIN_FIELD = "chain"; + + private static final Struct CLIENT_ID_TRACK_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .build(); + + private static final Struct CHAIN_REPLICATION_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30) + .int64(KEY_FIELD, 40) + .struct(CHAIN_FIELD, 45, ChainCodec.CHAIN_STRUCT) + .build(); + + private static final Struct CLEAR_INVALIDATION_COMPLETE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .string(SERVER_STORE_NAME_FIELD, 20) + .build(); + + private static final Struct INVALIDATION_COMPLETE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .string(SERVER_STORE_NAME_FIELD, 20) + .int64(KEY_FIELD, 30) + .build(); + + private static final Struct DESTROY_SERVER_STORE_REPLICATION_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30) + .build(); + + private final StructBuilder CREATE_STORE_MESSAGE_STRUCT_BUILDER_PREFIX = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30); + private static final int CREATE_STORE_NEXT_INDEX = 40; + + private final Struct createStoreReplicationMessageStruct; + + private final ChainCodec chainCodec ; + private final MessageCodecUtils messageCodecUtils; + private final ConfigCodec configCodec; + + public PassiveReplicationMessageCodec(final ConfigCodec configCodec) { + this.chainCodec = new ChainCodec(); + this.messageCodecUtils = new MessageCodecUtils(); + this.configCodec = configCodec; + createStoreReplicationMessageStruct = this.configCodec.injectServerStoreConfiguration( + CREATE_STORE_MESSAGE_STRUCT_BUILDER_PREFIX, CREATE_STORE_NEXT_INDEX).getUpdatedBuilder().build(); + } + + public byte[] encode(PassiveReplicationMessage message) { + + switch (message.getMessageType()) { + case CLIENT_ID_TRACK_OP: + return encodeClientIdTrackMessage((PassiveReplicationMessage.ClientIDTrackerMessage) message); + case CHAIN_REPLICATION_OP: + return encodeChainReplicationMessage((PassiveReplicationMessage.ChainReplicationMessage) message); + case CLEAR_INVALIDATION_COMPLETE: + return encodeClearInvalidationCompleteMessage((PassiveReplicationMessage.ClearInvalidationCompleteMessage) message); + case INVALIDATION_COMPLETE: + return encodeInvalidationCompleteMessage((PassiveReplicationMessage.InvalidationCompleteMessage) message); + case CREATE_SERVER_STORE_REPLICATION: + return encodeCreateServerStoreReplicationMessage((PassiveReplicationMessage.CreateServerStoreReplicationMessage) message); + case DESTROY_SERVER_STORE_REPLICATION: + return encoreDestroyServerStoreReplicationMessage((PassiveReplicationMessage.DestroyServerStoreReplicationMessage) message); + default: + throw new UnsupportedOperationException("This operation is not supported : " + message.getMessageType()); + } + } + + private byte[] encoreDestroyServerStoreReplicationMessage(PassiveReplicationMessage.DestroyServerStoreReplicationMessage message) { + StructEncoder encoder = DESTROY_SERVER_STORE_REPLICATION_STRUCT.encoder(); + + messageCodecUtils.encodeMandatoryFields(encoder, message); + encoder.string(SERVER_STORE_NAME_FIELD, message.getStoreName()); + + return encoder.encode().array(); + } + + private byte[] encodeCreateServerStoreReplicationMessage(PassiveReplicationMessage.CreateServerStoreReplicationMessage message) { + StructEncoder encoder = createStoreReplicationMessageStruct.encoder(); + + messageCodecUtils.encodeMandatoryFields(encoder, message); + encoder.string(SERVER_STORE_NAME_FIELD, message.getStoreName()); + configCodec.encodeServerStoreConfiguration(encoder, message.getStoreConfiguration()); + + return encoder.encode().array(); + } + + private byte[] encodeInvalidationCompleteMessage(PassiveReplicationMessage.InvalidationCompleteMessage message) { + StructEncoder encoder = INVALIDATION_COMPLETE_STRUCT.encoder(); + + encoder.enm(MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) + .string(SERVER_STORE_NAME_FIELD, message.getCacheId()) + .int64(KEY_FIELD, message.getKey()); + + return encoder.encode().array(); + } + + private byte[] encodeClearInvalidationCompleteMessage(PassiveReplicationMessage.ClearInvalidationCompleteMessage message) { + StructEncoder encoder = CLEAR_INVALIDATION_COMPLETE_STRUCT.encoder(); + + encoder.enm(MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) + .string(SERVER_STORE_NAME_FIELD, message.getCacheId()); + + return encoder.encode().array(); + } + + private byte[] encodeChainReplicationMessage(PassiveReplicationMessage.ChainReplicationMessage message) { + StructEncoder encoder = CHAIN_REPLICATION_STRUCT.encoder(); + + messageCodecUtils.encodeMandatoryFields(encoder, message); + + encoder.string(SERVER_STORE_NAME_FIELD, message.getCacheId()); + encoder.int64(KEY_FIELD, message.getKey()); + chainCodec.encode(encoder.struct(CHAIN_FIELD), message.getChain()); + + return encoder.encode().array(); + } + + private byte[] encodeClientIdTrackMessage(PassiveReplicationMessage.ClientIDTrackerMessage message) { + StructEncoder encoder = CLIENT_ID_TRACK_STRUCT.encoder(); + + encoder.enm(EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) + .int64(MSB_UUID_FIELD, message.getClientId().getMostSignificantBits()) + .int64(LSB_UUID_FIELD, message.getClientId().getLeastSignificantBits()); + + return encoder.encode().array(); + } + + public EhcacheEntityMessage decode(EhcacheMessageType messageType, ByteBuffer messageBuffer) { + + switch (messageType) { + case CLIENT_ID_TRACK_OP: + return decodeClientIdTrackMessage(messageBuffer); + case CHAIN_REPLICATION_OP: + return decodeChainReplicationMessage(messageBuffer); + case CLEAR_INVALIDATION_COMPLETE: + return decodeClearInvalidationCompleteMessage(messageBuffer); + case INVALIDATION_COMPLETE: + return decodeInvalidationCompleteMessage(messageBuffer); + case CREATE_SERVER_STORE_REPLICATION: + return decodeCreateServerStoreReplicationMessage(messageBuffer); + case DESTROY_SERVER_STORE_REPLICATION: + return decodeDestroyServerStoreReplicationMessage(messageBuffer); + default: + throw new UnsupportedOperationException("Unknown message type: " + messageType); + } + } + + private PassiveReplicationMessage.DestroyServerStoreReplicationMessage decodeDestroyServerStoreReplicationMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = DESTROY_SERVER_STORE_REPLICATION_STRUCT.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID clientId = messageCodecUtils.decodeUUID(decoder); + + String storeName = decoder.string(SERVER_STORE_NAME_FIELD); + + return new PassiveReplicationMessage.DestroyServerStoreReplicationMessage(msgId, clientId, storeName); + } + + private PassiveReplicationMessage.CreateServerStoreReplicationMessage decodeCreateServerStoreReplicationMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = createStoreReplicationMessageStruct.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID clientId = messageCodecUtils.decodeUUID(decoder); + + String storeName = decoder.string(SERVER_STORE_NAME_FIELD); + ServerStoreConfiguration configuration = configCodec.decodeServerStoreConfiguration(decoder); + + return new PassiveReplicationMessage.CreateServerStoreReplicationMessage(msgId, clientId, storeName, configuration); + } + + private PassiveReplicationMessage.InvalidationCompleteMessage decodeInvalidationCompleteMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = INVALIDATION_COMPLETE_STRUCT.decoder(messageBuffer); + + String storeId = decoder.string(SERVER_STORE_NAME_FIELD); + Long key = decoder.int64(KEY_FIELD); + + return new PassiveReplicationMessage.InvalidationCompleteMessage(storeId, key); + } + + private PassiveReplicationMessage.ClearInvalidationCompleteMessage decodeClearInvalidationCompleteMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = CLEAR_INVALIDATION_COMPLETE_STRUCT.decoder(messageBuffer); + return new PassiveReplicationMessage.ClearInvalidationCompleteMessage(decoder.string(SERVER_STORE_NAME_FIELD)); + } + + private PassiveReplicationMessage.ChainReplicationMessage decodeChainReplicationMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = CHAIN_REPLICATION_STRUCT.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID clientId = messageCodecUtils.decodeUUID(decoder); + + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); + Long key = decoder.int64(KEY_FIELD); + + Chain chain = chainCodec.decode(decoder.struct(CHAIN_FIELD)); + + return new PassiveReplicationMessage.ChainReplicationMessage(cacheId, key, chain, msgId, clientId); + } + + private PassiveReplicationMessage.ClientIDTrackerMessage decodeClientIdTrackMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = CLIENT_ID_TRACK_STRUCT.decoder(messageBuffer); + + UUID clientId = messageCodecUtils.decodeUUID(decoder); + + return new PassiveReplicationMessage.ClientIDTrackerMessage(clientId); + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/SyncMessageType.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/SyncMessageType.java new file mode 100644 index 0000000000..73ac2f2dbd --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/SyncMessageType.java @@ -0,0 +1,39 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.internal.messages; + +import org.terracotta.runnel.EnumMapping; + +import com.tc.classloader.CommonComponent; + +import static org.terracotta.runnel.EnumMappingBuilder.newEnumMappingBuilder; + +/** + * SyncMessageType + */ +@CommonComponent +public enum SyncMessageType { + STATE, + DATA; + + public static final String SYNC_MESSAGE_TYPE_FIELD_NAME = "msgType"; + public static final int SYNC_MESSAGE_TYPE_FIELD_INDEX = 10; + public static final EnumMapping SYNC_MESSAGE_TYPE_MAPPING = newEnumMappingBuilder(SyncMessageType.class) + .mapping(STATE, 1) + .mapping(DATA, 10) + .build(); +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateBinding.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateBinding.java new file mode 100644 index 0000000000..bbb9f6a0a3 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateBinding.java @@ -0,0 +1,33 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.ehcache.clustered.server.ClientState; +import org.terracotta.entity.ClientDescriptor; +import org.terracotta.management.service.monitoring.registry.provider.ClientBinding; + +final class ClientStateBinding extends ClientBinding { + + ClientStateBinding(ClientDescriptor clientDescriptor, ClientState clientState) { + super(clientDescriptor, clientState); + } + + @Override + public ClientState getValue() { + return (ClientState) super.getValue(); + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateSettingsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateSettingsManagementProvider.java new file mode 100644 index 0000000000..ba2d9e33e3 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateSettingsManagementProvider.java @@ -0,0 +1,66 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.ehcache.clustered.server.ClientState; +import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.model.capabilities.descriptors.Settings; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.registry.action.Named; +import org.terracotta.management.registry.action.RequiredContext; +import org.terracotta.management.service.monitoring.registry.provider.ClientBindingManagementProvider; + +import java.util.Collection; +import java.util.Collections; +import java.util.Set; +import java.util.TreeSet; + +@Named("ClientStateSettings") +@RequiredContext({@Named("consumerId"), @Named("clientId"), @Named("type")}) +class ClientStateSettingsManagementProvider extends ClientBindingManagementProvider { + + ClientStateSettingsManagementProvider() { + super(ClientStateBinding.class); + } + + @Override + protected ExposedClientStateBinding internalWrap(Context context, ClientStateBinding managedObject) { + return new ExposedClientStateBinding(context, managedObject); + } + + private static class ExposedClientStateBinding extends ExposedClientBinding { + + ExposedClientStateBinding(Context context, ClientStateBinding clientBinding) { + super(context, clientBinding); + } + + @Override + public Context getContext() { + return super.getContext().with("type", "ClientState"); + } + + @Override + public Collection getDescriptors() { + ClientState clientState = getClientBinding().getValue(); + Set attachedStores = clientState.getAttachedStores(); + return Collections.singleton(new Settings(getContext()) + .set("attached", clientState.isAttached()) + .set("attachedStores", new TreeSet<>(attachedStores).toArray(new String[attachedStores.size()])) + ); + } + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java new file mode 100644 index 0000000000..27c5cdf5c5 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java @@ -0,0 +1,189 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.server.ClientState; +import org.ehcache.clustered.server.ServerSideServerStore; +import org.ehcache.clustered.server.ServerStoreImpl; +import org.ehcache.clustered.server.state.EhcacheStateService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.terracotta.entity.BasicServiceConfiguration; +import org.terracotta.entity.ClientDescriptor; +import org.terracotta.entity.ServiceRegistry; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.registry.collect.StatisticConfiguration; +import org.terracotta.management.service.monitoring.ActiveEntityMonitoringServiceConfiguration; +import org.terracotta.management.service.monitoring.ConsumerManagementRegistry; +import org.terracotta.management.service.monitoring.ConsumerManagementRegistryConfiguration; +import org.terracotta.management.service.monitoring.EntityMonitoringService; +import org.terracotta.management.service.monitoring.PassiveEntityMonitoringServiceConfiguration; +import org.terracotta.management.service.monitoring.registry.provider.ClientBinding; +import org.terracotta.monitoring.IMonitoringProducer; + +import static java.util.concurrent.TimeUnit.SECONDS; + +public class Management { + + private static final Logger LOGGER = LoggerFactory.getLogger(Management.class); + + private final ConsumerManagementRegistry managementRegistry; + private final EhcacheStateService ehcacheStateService; + + public Management(ServiceRegistry services, EhcacheStateService ehcacheStateService, boolean active) { + this.ehcacheStateService = ehcacheStateService; + + // create an entity monitoring service that allows this entity to push some management information into voltron monitoring service + EntityMonitoringService entityMonitoringService; + if (active) { + entityMonitoringService = services.getService(new ActiveEntityMonitoringServiceConfiguration()); + } else { + IMonitoringProducer monitoringProducer = services.getService(new BasicServiceConfiguration<>(IMonitoringProducer.class)); + entityMonitoringService = monitoringProducer == null ? null : services.getService(new PassiveEntityMonitoringServiceConfiguration(monitoringProducer)); + } + + // create a management registry for this entity to handle exposed objects and stats + // if management-server distribution is on the classpath + managementRegistry = entityMonitoringService == null ? null : services.getService(new ConsumerManagementRegistryConfiguration(entityMonitoringService) + .setStatisticConfiguration(new StatisticConfiguration( + 60, SECONDS, + 100, 1, SECONDS, + 30, SECONDS + ))); + + if (managementRegistry != null) { + + if (active) { + // expose settings about attached stores + managementRegistry.addManagementProvider(new ClientStateSettingsManagementProvider()); + } + + // expose settings about server stores + managementRegistry.addManagementProvider(new ServerStoreSettingsManagementProvider()); + // expose settings about pools + managementRegistry.addManagementProvider(new PoolSettingsManagementProvider(ehcacheStateService)); + + // expose stats about server stores + managementRegistry.addManagementProvider(new ServerStoreStatisticsManagementProvider()); + // expose stats about pools + managementRegistry.addManagementProvider(new PoolStatisticsManagementProvider(ehcacheStateService)); + } + } + + // the goal of the following code is to send the management metadata from the entity into the monitoring tre AFTER the entity creation + public void init() { + if (managementRegistry != null) { + LOGGER.trace("init()"); + + // PoolBinding.ALL_SHARED is a marker so that we can send events not specifically related to 1 pool + // this object is ignored from the stats and descriptors + managementRegistry.register(PoolBinding.ALL_SHARED); + + // expose the management registry inside voltorn + managementRegistry.refresh(); + } + } + + public void clientConnected(ClientDescriptor clientDescriptor, ClientState clientState) { + if (managementRegistry != null) { + LOGGER.trace("clientConnected({})", clientDescriptor); + managementRegistry.registerAndRefresh(new ClientStateBinding(clientDescriptor, clientState)); + } + } + + + public void clientDisconnected(ClientDescriptor clientDescriptor, ClientState clientState) { + if (managementRegistry != null) { + LOGGER.trace("clientDisconnected({})", clientDescriptor); + managementRegistry.unregisterAndRefresh(new ClientStateBinding(clientDescriptor, clientState)); + } + } + + public void clientReconnected(ClientDescriptor clientDescriptor, ClientState clientState) { + if (managementRegistry != null) { + LOGGER.trace("clientReconnected({})", clientDescriptor); + managementRegistry.refresh(); + managementRegistry.pushServerEntityNotification(new ClientStateBinding(clientDescriptor, clientState), "EHCACHE_CLIENT_RECONNECTED"); + } + } + + public void sharedPoolsConfigured() { + if (managementRegistry != null) { + LOGGER.trace("sharedPoolsConfigured()"); + ehcacheStateService.getSharedResourcePools() + .entrySet() + .forEach(e -> managementRegistry.register(new PoolBinding(e.getKey(), e.getValue(), PoolBinding.AllocationType.SHARED))); + managementRegistry.refresh(); + managementRegistry.pushServerEntityNotification(PoolBinding.ALL_SHARED, "EHCACHE_RESOURCE_POOLS_CONFIGURED"); + } + } + + public void clientValidated(ClientDescriptor clientDescriptor, ClientState clientState) { + if (managementRegistry != null) { + LOGGER.trace("clientValidated({})", clientDescriptor); + managementRegistry.refresh(); + managementRegistry.pushServerEntityNotification(new ClientStateBinding(clientDescriptor, clientState), "EHCACHE_CLIENT_VALIDATED"); + } + } + + public void serverStoreCreated(String name) { + if (managementRegistry != null) { + LOGGER.trace("serverStoreCreated({})", name); + ServerSideServerStore serverStore = ehcacheStateService.getStore(name); + ServerStoreBinding serverStoreBinding = new ServerStoreBinding(name, serverStore); + managementRegistry.register(serverStoreBinding); + ServerSideConfiguration.Pool pool = ehcacheStateService.getDedicatedResourcePool(name); + if (pool != null) { + managementRegistry.register(new PoolBinding(name, pool, PoolBinding.AllocationType.DEDICATED)); + } + managementRegistry.refresh(); + managementRegistry.pushServerEntityNotification(serverStoreBinding, "EHCACHE_SERVER_STORE_CREATED"); + } + } + + public void storeAttached(ClientDescriptor clientDescriptor, ClientState clientState, String storeName) { + if (managementRegistry != null) { + LOGGER.trace("storeAttached({}, {})", clientDescriptor, storeName); + managementRegistry.refresh(); + managementRegistry.pushServerEntityNotification(new ClientBinding(clientDescriptor, clientState), "EHCACHE_SERVER_STORE_ATTACHED", Context.create("storeName", storeName)); + } + } + + public void storeReleased(ClientDescriptor clientDescriptor, ClientState clientState, String storeName) { + if (managementRegistry != null) { + LOGGER.trace("storeReleased({}, {})", clientDescriptor, storeName); + managementRegistry.refresh(); + managementRegistry.pushServerEntityNotification(new ClientBinding(clientDescriptor, clientState), "EHCACHE_SERVER_STORE_RELEASED", Context.create("storeName", storeName)); + } + } + + public void serverStoreDestroyed(String name) { + ServerSideServerStore serverStore = ehcacheStateService.getStore(name); + if (managementRegistry != null && serverStore != null) { + LOGGER.trace("serverStoreDestroyed({})", name); + ServerStoreBinding managedObject = new ServerStoreBinding(name, serverStore); + managementRegistry.pushServerEntityNotification(managedObject, "EHCACHE_SERVER_STORE_DESTROYED"); + managementRegistry.unregister(managedObject); + ServerSideConfiguration.Pool pool = ehcacheStateService.getDedicatedResourcePool(name); + if (pool != null) { + managementRegistry.unregister(new PoolBinding(name, pool, PoolBinding.AllocationType.DEDICATED)); + } + managementRegistry.refresh(); + } + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolBinding.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolBinding.java new file mode 100644 index 0000000000..511bdadb16 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolBinding.java @@ -0,0 +1,64 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.terracotta.management.model.Objects; +import org.terracotta.management.service.monitoring.registry.provider.AliasBinding; + +class PoolBinding extends AliasBinding { + + enum AllocationType { + SHARED, + DEDICATED + } + + // this marker is used to send global notification - it is not a real pool + static final PoolBinding ALL_SHARED = new PoolBinding("PoolBinding#all-shared", new ServerSideConfiguration.Pool(1, ""), AllocationType.SHARED); + + private final AllocationType allocationType; + + PoolBinding(String identifier, ServerSideConfiguration.Pool serverStore, AllocationType allocationType) { + super(identifier, serverStore); + this.allocationType = Objects.requireNonNull(allocationType); + } + + AllocationType getAllocationType() { + return allocationType; + } + + @Override + public ServerSideConfiguration.Pool getValue() { + return (ServerSideConfiguration.Pool) super.getValue(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + PoolBinding that = (PoolBinding) o; + return allocationType == that.allocationType; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + allocationType.hashCode(); + return result; + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java new file mode 100644 index 0000000000..77bc499d1f --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java @@ -0,0 +1,84 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.ehcache.clustered.server.state.EhcacheStateService; +import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.model.capabilities.descriptors.Settings; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.registry.action.ExposedObject; +import org.terracotta.management.registry.action.Named; +import org.terracotta.management.registry.action.RequiredContext; +import org.terracotta.management.service.monitoring.registry.provider.AliasBindingManagementProvider; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.stream.Collectors; + +@Named("PoolSettings") +@RequiredContext({@Named("consumerId"), @Named("type"), @Named("alias")}) +class PoolSettingsManagementProvider extends AliasBindingManagementProvider { + + private final EhcacheStateService ehcacheStateService; + + PoolSettingsManagementProvider(EhcacheStateService ehcacheStateService) { + super(PoolBinding.class); + this.ehcacheStateService = ehcacheStateService; + } + + @Override + public Collection getDescriptors() { + Collection descriptors = new ArrayList<>(super.getDescriptors()); + descriptors.add(new Settings() + .set("type", getCapabilityName()) + .set("defaultServerResource", ehcacheStateService.getDefaultServerResource())); + return descriptors; + } + + @Override + public Collection> getExposedObjects() { + return super.getExposedObjects().stream().filter(e -> e.getTarget() != PoolBinding.ALL_SHARED).collect(Collectors.toList()); + } + + @Override + protected ExposedPoolBinding internalWrap(Context context, PoolBinding managedObject) { + return new ExposedPoolBinding(context, managedObject); + } + + private static class ExposedPoolBinding extends ExposedAliasBinding { + + ExposedPoolBinding(Context context, PoolBinding binding) { + super(context, binding); + } + + @Override + public Context getContext() { + return super.getContext().with("type", "Pool"); + } + + @Override + public Collection getDescriptors() { + return getBinding() == PoolBinding.ALL_SHARED ? + Collections.emptyList() : + Collections.singleton(new Settings(getContext()) + .set("serverResource", getBinding().getValue().getServerResource()) + .set("size", getBinding().getValue().getSize()) + .set("allocationType", getBinding().getAllocationType().name().toLowerCase())); + } + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java new file mode 100644 index 0000000000..cf57004aaa --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java @@ -0,0 +1,96 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.ehcache.clustered.server.state.EhcacheStateService; +import org.ehcache.clustered.server.state.ResourcePageSource; +import org.terracotta.context.extended.StatisticsRegistry; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.registry.action.ExposedObject; +import org.terracotta.management.registry.action.Named; +import org.terracotta.management.registry.action.RequiredContext; +import org.terracotta.management.service.monitoring.registry.provider.AbstractExposedStatistics; +import org.terracotta.management.service.monitoring.registry.provider.AbstractStatisticsManagementProvider; + +import java.util.Collection; +import java.util.HashSet; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; + +import static java.util.Arrays.asList; +import static org.terracotta.context.extended.ValueStatisticDescriptor.descriptor; + +@Named("PoolStatistics") +@RequiredContext({@Named("consumerId"), @Named("type"), @Named("alias")}) +class PoolStatisticsManagementProvider extends AbstractStatisticsManagementProvider { + + private final EhcacheStateService ehcacheStateService; + + PoolStatisticsManagementProvider(EhcacheStateService ehcacheStateService) { + super(PoolBinding.class); + this.ehcacheStateService = ehcacheStateService; + } + + @Override + public Collection> getExposedObjects() { + return super.getExposedObjects().stream().filter(e -> e.getTarget() != PoolBinding.ALL_SHARED).collect(Collectors.toList()); + } + + @Override + protected StatisticsRegistry createStatisticsRegistry(PoolBinding managedObject) { + if (managedObject == PoolBinding.ALL_SHARED) { + return null; + } + + String poolName = managedObject.getAlias(); + PoolBinding.AllocationType allocationType = managedObject.getAllocationType(); + + if (allocationType == PoolBinding.AllocationType.DEDICATED) { + ResourcePageSource resourcePageSource = Objects.requireNonNull(ehcacheStateService.getDedicatedResourcePageSource(poolName)); + return getStatisticsService().createStatisticsRegistry(resourcePageSource); + + } else { + ResourcePageSource resourcePageSource = Objects.requireNonNull(ehcacheStateService.getSharedResourcePageSource(poolName)); + return getStatisticsService().createStatisticsRegistry(resourcePageSource); + } + } + + @Override + protected AbstractExposedStatistics internalWrap(Context context, PoolBinding managedObject, StatisticsRegistry statisticsRegistry) { + return new PoolExposedStatistics(context, managedObject, statisticsRegistry); + } + + private static class PoolExposedStatistics extends AbstractExposedStatistics { + + PoolExposedStatistics(Context context, PoolBinding binding, StatisticsRegistry statisticsRegistry) { + super(context, binding, statisticsRegistry); + + if (statisticsRegistry != null) { + statisticsRegistry.registerSize("AllocatedSize", descriptor("allocatedSize", tags("tier", "Pool"))); + } + } + + @Override + public Context getContext() { + return super.getContext().with("type", "Pool"); + } + + } + + private static Set tags(String... tags) {return new HashSet<>(asList(tags));} + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreBinding.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreBinding.java new file mode 100644 index 0000000000..5411c609a7 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreBinding.java @@ -0,0 +1,32 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.ehcache.clustered.server.ServerSideServerStore; +import org.terracotta.management.service.monitoring.registry.provider.AliasBinding; + +class ServerStoreBinding extends AliasBinding { + + ServerStoreBinding(String identifier, ServerSideServerStore serverStore) { + super(identifier, serverStore); + } + + @Override + public ServerSideServerStore getValue() { + return (ServerSideServerStore) super.getValue(); + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java new file mode 100644 index 0000000000..e77d2543dd --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java @@ -0,0 +1,91 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.ehcache.clustered.common.PoolAllocation; +import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.model.capabilities.descriptors.Settings; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.registry.action.Named; +import org.terracotta.management.registry.action.RequiredContext; +import org.terracotta.management.service.monitoring.registry.provider.AliasBindingManagementProvider; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; + +@Named("ServerStoreSettings") +@RequiredContext({@Named("consumerId"), @Named("type"), @Named("alias")}) +class ServerStoreSettingsManagementProvider extends AliasBindingManagementProvider { + + ServerStoreSettingsManagementProvider() { + super(ServerStoreBinding.class); + } + + @Override + public Collection getDescriptors() { + Collection descriptors = new ArrayList<>(super.getDescriptors()); + descriptors.add(new Settings() + .set("type", getCapabilityName()) + .set("time", System.currentTimeMillis())); + return descriptors; + } + + @Override + protected ExposedServerStoreBinding internalWrap(Context context, ServerStoreBinding managedObject) { + return new ExposedServerStoreBinding(context, managedObject); + } + + private static class ExposedServerStoreBinding extends ExposedAliasBinding { + + ExposedServerStoreBinding(Context context, ServerStoreBinding binding) { + super(context, binding); + } + + @Override + public Context getContext() { + return super.getContext().with("type", "ServerStore"); + } + + @Override + public Collection getDescriptors() { + return Collections.singleton(getSettings()); + } + + Settings getSettings() { + // names taken from ServerStoreConfiguration.isCompatible() + PoolAllocation poolAllocation = getBinding().getValue().getStoreConfiguration().getPoolAllocation(); + Settings settings = new Settings(getContext()) + .set("resourcePoolType", poolAllocation.getClass().getSimpleName().toLowerCase()) + .set("allocatedMemoryAtTime", getBinding().getValue().getAllocatedMemory()) + .set("tableCapacityAtTime", getBinding().getValue().getTableCapacity()) + .set("vitalMemoryAtTime", getBinding().getValue().getVitalMemory()) + .set("longSizeAtTime", getBinding().getValue().getSize()) + .set("dataAllocatedMemoryAtTime", getBinding().getValue().getDataAllocatedMemory()) + .set("dataOccupiedMemoryAtTime", getBinding().getValue().getDataOccupiedMemory()) + .set("dataSizeAtTime", getBinding().getValue().getDataSize()) + .set("dataVitalMemoryAtTime", getBinding().getValue().getDataVitalMemory()); + if (poolAllocation instanceof PoolAllocation.Dedicated) { + settings.set("resourcePoolDedicatedResourceName", ((PoolAllocation.Dedicated) poolAllocation).getResourceName()); + settings.set("resourcePoolDedicatedSize", ((PoolAllocation.Dedicated) poolAllocation).getSize()); + } else if (poolAllocation instanceof PoolAllocation.Shared) { + settings.set("resourcePoolSharedPoolName", ((PoolAllocation.Shared) poolAllocation).getResourcePoolName()); + } + return settings; + } + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java new file mode 100644 index 0000000000..3d45382b95 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java @@ -0,0 +1,72 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.terracotta.context.extended.StatisticsRegistry; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.registry.action.Named; +import org.terracotta.management.registry.action.RequiredContext; +import org.terracotta.management.service.monitoring.registry.provider.AbstractExposedStatistics; +import org.terracotta.management.service.monitoring.registry.provider.AbstractStatisticsManagementProvider; + +import java.util.HashSet; +import java.util.Set; + +import static java.util.Arrays.asList; +import static org.terracotta.context.extended.ValueStatisticDescriptor.descriptor; + +@Named("ServerStoreStatistics") +@RequiredContext({@Named("consumerId"), @Named("type"), @Named("alias")}) +class ServerStoreStatisticsManagementProvider extends AbstractStatisticsManagementProvider { + + ServerStoreStatisticsManagementProvider() { + super(ServerStoreBinding.class); + } + + @Override + protected AbstractExposedStatistics internalWrap(Context context, ServerStoreBinding managedObject, StatisticsRegistry statisticsRegistry) { + return new ServerStoreExposedStatistics(context, managedObject, statisticsRegistry); + } + + private static class ServerStoreExposedStatistics extends AbstractExposedStatistics { + + ServerStoreExposedStatistics(Context context, ServerStoreBinding binding, StatisticsRegistry statisticsRegistry) { + super(context, binding, statisticsRegistry); + + statisticsRegistry.registerSize("AllocatedMemory", descriptor("allocatedMemory", tags("tier", "Store"))); + statisticsRegistry.registerSize("DataAllocatedMemory", descriptor("dataAllocatedMemory", tags("tier", "Store"))); + statisticsRegistry.registerSize("OccupiedMemory", descriptor("occupiedMemory", tags("tier", "Store"))); + statisticsRegistry.registerSize("DataOccupiedMemory", descriptor("dataOccupiedMemory", tags("tier", "Store"))); + statisticsRegistry.registerCounter("Entries", descriptor("entries", tags("tier", "Store"))); + statisticsRegistry.registerCounter("UsedSlotCount", descriptor("usedSlotCount", tags("tier", "Store"))); + statisticsRegistry.registerSize("DataVitalMemory", descriptor("dataVitalMemory", tags("tier", "Store"))); + statisticsRegistry.registerSize("VitalMemory", descriptor("vitalMemory", tags("tier", "Store"))); + statisticsRegistry.registerSize("ReprobeLength", descriptor("reprobeLength", tags("tier", "Store"))); + statisticsRegistry.registerCounter("RemovedSlotCount", descriptor("removedSlotCount", tags("tier", "Store"))); + statisticsRegistry.registerSize("DataSize", descriptor("dataSize", tags("tier", "Store"))); + statisticsRegistry.registerSize("TableCapacity", descriptor("tableCapacity", tags("tier", "Store"))); + } + + @Override + public Context getContext() { + return super.getContext().with("type", "ServerStore"); + } + + } + + private static Set tags(String... tags) {return new HashSet<>(asList(tags));} + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java index f81b8a4f8c..c03f06909d 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java @@ -21,6 +21,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.locks.Lock; @@ -36,7 +37,10 @@ import org.terracotta.offheapstore.paging.PageSource; import org.terracotta.offheapstore.storage.portability.Portability; -class OffHeapChainMap implements MapInternals { +import com.tc.classloader.CommonComponent; + +@CommonComponent +public class OffHeapChainMap implements MapInternals { interface ChainMapEvictionListener { void onEviction(K key); @@ -44,10 +48,10 @@ interface ChainMapEvictionListener { private final ReadWriteLockedOffHeapClockCache heads; private final OffHeapChainStorageEngine chainStorage; - private volatile ChainMapEvictionListener evictionListener;; + private volatile ChainMapEvictionListener evictionListener; public OffHeapChainMap(PageSource source, Portability keyPortability, int minPageSize, int maxPageSize, boolean shareByThieving) { - this.chainStorage = new OffHeapChainStorageEngine(source, keyPortability, minPageSize, maxPageSize, shareByThieving, shareByThieving); + this.chainStorage = new OffHeapChainStorageEngine<>(source, keyPortability, minPageSize, maxPageSize, shareByThieving, shareByThieving); EvictionListener listener = new EvictionListener() { @Override public void evicting(Callable> callable) { @@ -68,7 +72,13 @@ public void evicting(Callable> callable) { //TODO: EvictionListeningReadWriteLockedOffHeapClockCache lacks ctor that takes shareByThieving // this.heads = new ReadWriteLockedOffHeapClockCache(source, shareByThieving, chainStorage); - this.heads = new EvictionListeningReadWriteLockedOffHeapClockCache(listener, source, chainStorage); + this.heads = new EvictionListeningReadWriteLockedOffHeapClockCache<>(listener, source, chainStorage); + } + + //For tests + OffHeapChainMap(ReadWriteLockedOffHeapClockCache heads, OffHeapChainStorageEngine chainStorage) { + this.chainStorage = chainStorage; + this.heads = heads; } void setEvictionListener(ChainMapEvictionListener listener) { @@ -177,6 +187,27 @@ public void replaceAtHead(K key, Chain expected, Chain replacement) { } } + public void put(K key, Chain chain) { + final Lock lock = heads.writeLock(); + lock.lock(); + try { + InternalChain current = heads.get(key); + if (current != null) { + try { + replaceAtHead(key, current.detach(), chain); + } finally { + current.close(); + } + } else { + for (Element x : chain) { + append(key, x.getPayload()); + } + } + } finally { + lock.unlock(); + } + } + public void clear() { heads.writeLock().lock(); try { @@ -186,6 +217,15 @@ public void clear() { } } + public Set keySet() { + heads.writeLock().lock(); + try { + return heads.keySet(); + } finally { + heads.writeLock().unlock(); + } + } + private void evict() { int evictionIndex = heads.getEvictionIndex(); if (evictionIndex < 0) { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainStorageEngine.java b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainStorageEngine.java index 0e79350ca1..25ec0c40aa 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainStorageEngine.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainStorageEngine.java @@ -17,10 +17,12 @@ import java.nio.ByteBuffer; import java.util.ArrayList; +import java.util.Collections; import java.util.Iterator; import java.util.HashSet; import java.util.List; import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.locks.Lock; import org.ehcache.clustered.common.internal.store.Chain; import org.ehcache.clustered.common.internal.store.Element; @@ -48,7 +50,7 @@ class OffHeapChainStorageEngine implements StorageEngine { private final OffHeapStorageArea storage; private final Portability keyPortability; - private final Set activeChains = new HashSet(); + private final Set activeChains = Collections.newSetFromMap(new ConcurrentHashMap()); private StorageEngine.Owner owner; private long nextSequenceNumber = 0; @@ -58,6 +60,11 @@ public OffHeapChainStorageEngine(PageSource source, Portability keyPo this.keyPortability = keyPortability; } + //For tests + Set getActiveChains() { + return this.activeChains; + } + InternalChain newChain(ByteBuffer element) { return new PrimordialChain(element); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapServerStore.java b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapServerStore.java index 7c46959403..c44e429382 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapServerStore.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapServerStore.java @@ -21,24 +21,32 @@ import org.ehcache.clustered.common.internal.store.Chain; import org.ehcache.clustered.common.internal.store.ServerStore; +import org.ehcache.clustered.server.KeySegmentMapper; import org.ehcache.clustered.server.ServerStoreEvictionListener; +import org.terracotta.offheapstore.MapInternals; import org.terracotta.offheapstore.exceptions.OversizeMappingException; import org.terracotta.offheapstore.paging.PageSource; import static org.terracotta.offheapstore.util.MemoryUnit.KILOBYTES; import static org.terracotta.offheapstore.util.MemoryUnit.MEGABYTES; -public class OffHeapServerStore implements ServerStore { +public class OffHeapServerStore implements ServerStore, MapInternals { private final List> segments; + private final KeySegmentMapper mapper; - public OffHeapServerStore(PageSource source, int concurrency) { - segments = new ArrayList>(concurrency); - for (int i = 0; i < concurrency; i++) { + public OffHeapServerStore(PageSource source, KeySegmentMapper mapper) { + this.mapper = mapper; + segments = new ArrayList>(mapper.getSegments()); + for (int i = 0; i < mapper.getSegments(); i++) { segments.add(new OffHeapChainMap(source, LongPortability.INSTANCE, KILOBYTES.toBytes(4), MEGABYTES.toBytes(8), false)); } } + public List> getSegments() { + return segments; + } + public void setEvictionListener(final ServerStoreEvictionListener listener) { OffHeapChainMap.ChainMapEvictionListener chainMapEvictionListener = new OffHeapChainMap.ChainMapEvictionListener() { @Override @@ -147,6 +155,35 @@ public void replaceAtHead(long key, Chain expect, Chain update) { } } + public void put(long key, Chain chain) { + try { + segmentFor(key).put(key, chain); + } catch (OversizeMappingException e) { + if (handleOversizeMappingException(key)) { + try { + segmentFor(key).put(key, chain); + } catch (OversizeMappingException ex) { + //ignore + } + } + + writeLockAll(); + try { + do { + try { + segmentFor(key).put(key, chain); + } catch (OversizeMappingException ex) { + e = ex; + } + } while (handleOversizeMappingException(key)); + throw e; + } finally { + writeUnlockAll(); + } + } + } + + @Override public void clear() { for (OffHeapChainMap segment : segments) { @@ -155,7 +192,7 @@ public void clear() { } OffHeapChainMap segmentFor(long key) { - return segments.get(Math.abs((int) (key % segments.size()))); + return segments.get(mapper.getSegmentForKey(key)); } private void writeLockAll() { @@ -193,4 +230,114 @@ public void close() { } } + // stats + + @Override + public long getAllocatedMemory() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getAllocatedMemory(); + } + return total; + } + + @Override + public long getOccupiedMemory() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getOccupiedMemory(); + } + return total; + } + + @Override + public long getDataAllocatedMemory() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getDataAllocatedMemory(); + } + return total; + } + + @Override + public long getDataOccupiedMemory() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getDataOccupiedMemory(); + } + return total; + } + + @Override + public long getDataSize() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getDataSize(); + } + return total; + } + + @Override + public long getSize() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getSize(); + } + return total; + } + + @Override + public long getTableCapacity() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getTableCapacity(); + } + return total; + } + + @Override + public long getUsedSlotCount() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getUsedSlotCount(); + } + return total; + } + + @Override + public long getRemovedSlotCount() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getRemovedSlotCount(); + } + return total; + } + + @Override + public int getReprobeLength() { + int total = 0; + for (MapInternals segment : segments) { + total += segment.getReprobeLength(); + } + return total; + } + + @Override + public long getVitalMemory() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getVitalMemory(); + } + return total; + } + + @Override + public long getDataVitalMemory() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getDataVitalMemory(); + } + return total; + } + } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/repo/ServerStateRepository.java b/clustered/server/src/main/java/org/ehcache/clustered/server/repo/ServerStateRepository.java index 0b4aca94fc..f4f878d92f 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/repo/ServerStateRepository.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/repo/ServerStateRepository.java @@ -18,35 +18,32 @@ import org.ehcache.clustered.common.internal.exceptions.ClusterException; import org.ehcache.clustered.common.internal.exceptions.IllegalMessageException; -import org.ehcache.clustered.common.internal.exceptions.LifecycleException; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; import java.util.AbstractMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; +import java.util.stream.Collectors; class ServerStateRepository { - private final ConcurrentMap concurrentMapRepo = new ConcurrentHashMap(); + private final ConcurrentMap> concurrentMapRepo = new ConcurrentHashMap<>(); EhcacheEntityResponse invoke(StateRepositoryOpMessage message) throws ClusterException { String mapId = message.getMapId(); ConcurrentMap map = concurrentMapRepo.get(mapId); if (map == null) { - ConcurrentHashMap newMap = new ConcurrentHashMap(); + ConcurrentHashMap newMap = new ConcurrentHashMap<>(); map = concurrentMapRepo.putIfAbsent(mapId, newMap); if (map == null) { map = newMap; } } - Object result = null; - switch (message.operation()) { - case GET: + Object result; + switch (message.getMessageType()) { + case GET_STATE_REPO: StateRepositoryOpMessage.GetMessage getMessage = (StateRepositoryOpMessage.GetMessage) message; result = map.get(getMessage.getKey()); break; @@ -55,14 +52,13 @@ EhcacheEntityResponse invoke(StateRepositoryOpMessage message) throws ClusterExc result = map.putIfAbsent(putIfAbsentMessage.getKey(), putIfAbsentMessage.getValue()); break; case ENTRY_SET: - Set entrySet = new HashSet(); - for (Map.Entry entry : map.entrySet()) { - entrySet.add(new AbstractMap.SimpleEntry(entry.getKey(), entry.getValue())); - } - result = entrySet; + result = map.entrySet() + .stream() + .map(entry -> new AbstractMap.SimpleEntry<>(entry.getKey(), entry.getValue())) + .collect(Collectors.toSet()); break; default: - throw new IllegalMessageException("Invalid operation: " + message.operation()); + throw new AssertionError("Unsupported operation: " + message.getMessageType()); } return EhcacheEntityResponse.mapValue(result); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java new file mode 100644 index 0000000000..afa1513bfe --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java @@ -0,0 +1,98 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.state; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.tc.classloader.CommonComponent; + +import java.util.Set; +import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +@CommonComponent +public class ClientMessageTracker { + + private static final Logger LOGGER = LoggerFactory.getLogger(ClientMessageTracker.class); + + private final ConcurrentMap messageTrackers = new ConcurrentHashMap<>(); + private volatile UUID entityConfiguredStamp = null; + private volatile long configuredTimestamp; + + //TODO : This method will be removed once we move to model where + //caches are entites. Then passive just needs to keep track of + //applied messages. Thus only 'applied' method will be keeping + // track of watermarking for de-duplication. This method is only + // allowed to be used by cache lifecycle message for now. + @Deprecated + public void track(long msgId, UUID clientId) { + messageTrackers.compute(clientId, (mappedUuid, messageTracker) -> { + if (messageTracker == null) { + messageTracker = new MessageTracker(); + LOGGER.info("Tracking client {}.", clientId); + } + messageTracker.track(msgId); + return messageTracker; + }); + } + + public void applied(long msgId, UUID clientId){ + messageTrackers.compute(clientId, (mappedUuid, messageTracker) -> { + if (messageTracker == null) { + messageTracker = new MessageTracker(); + LOGGER.info("Tracking client {}.", clientId); + } + messageTracker.track(msgId); + messageTracker.applied(msgId); + return messageTracker; + }); + } + + public boolean isDuplicate(long msgId, UUID clientId) { + if (messageTrackers.get(clientId) == null) { + return false; + } + return !messageTrackers.get(clientId).shouldApply(msgId); + } + + public void remove(UUID clientId) { + messageTrackers.remove(clientId); + LOGGER.info("Stop tracking client {}.", clientId); + } + + public void setEntityConfiguredStamp(UUID clientId, long timestamp) { + this.entityConfiguredStamp = clientId; + this.configuredTimestamp = timestamp; + } + + public boolean isConfigureApplicable(UUID clientId, long timestamp) { + if (entityConfiguredStamp == null) { + return true; + } + if (clientId.equals(entityConfiguredStamp) && configuredTimestamp == timestamp) { + return false; + } + return true; + } + + public void reconcileTrackedClients(Set trackedClients) { + messageTrackers.keySet().retainAll(trackedClients); + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java index 56eb427d02..bcbba9d402 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java @@ -16,31 +16,41 @@ package org.ehcache.clustered.server.state; +import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.exceptions.ClusterException; -import org.ehcache.clustered.common.internal.messages.LifecycleMessage.ConfigureStoreManager; -import org.ehcache.clustered.common.internal.messages.LifecycleMessage.ValidateStoreManager; -import org.ehcache.clustered.server.ServerStoreImpl; +import org.ehcache.clustered.server.ServerSideServerStore; import org.ehcache.clustered.server.repo.StateRepositoryManager; import com.tc.classloader.CommonComponent; +import java.util.Map; import java.util.Set; @CommonComponent public interface EhcacheStateService { - ServerStoreImpl getStore(String name); + String getDefaultServerResource(); + + Map getSharedResourcePools(); + + ResourcePageSource getSharedResourcePageSource(String name); + + ServerSideConfiguration.Pool getDedicatedResourcePool(String name); + + ResourcePageSource getDedicatedResourcePageSource(String name); + + ServerSideServerStore getStore(String name); Set getStores(); void destroy(); - void validate(ValidateStoreManager message) throws ClusterException; + void validate(ServerSideConfiguration configuration) throws ClusterException; - void configure(ConfigureStoreManager message) throws ClusterException; + void configure(ServerSideConfiguration configuration) throws ClusterException; - ServerStoreImpl createStore(String name, ServerStoreConfiguration serverStoreConfiguration) throws ClusterException; + ServerSideServerStore createStore(String name, ServerStoreConfiguration serverStoreConfiguration) throws ClusterException; void destroyServerStore(String name) throws ClusterException; @@ -48,4 +58,14 @@ public interface EhcacheStateService { StateRepositoryManager getStateRepositoryManager() throws ClusterException; + ClientMessageTracker getClientMessageTracker(); + + InvalidationTracker getInvalidationTracker(String cacheId); + + void addInvalidationtracker(String cacheId); + + InvalidationTracker removeInvalidationtracker(String cacheId); + + void loadExisting(); + } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java index c61c27b0f1..5ff4f636e9 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java @@ -18,15 +18,20 @@ import org.ehcache.clustered.server.EhcacheStateServiceImpl; import org.ehcache.clustered.server.state.config.EhcacheStateServiceConfig; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.terracotta.entity.PlatformConfiguration; import org.terracotta.entity.ServiceConfiguration; import org.terracotta.entity.ServiceProvider; import org.terracotta.entity.ServiceProviderCleanupException; import org.terracotta.entity.ServiceProviderConfiguration; +import org.terracotta.offheapresource.OffHeapResources; import com.tc.classloader.BuiltinService; import java.util.ArrayList; import java.util.Collection; +import java.util.Iterator; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -37,36 +42,60 @@ @BuiltinService public class EhcacheStateServiceProvider implements ServiceProvider { - private ConcurrentMap serviceMap = new ConcurrentHashMap(); + private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheStateServiceProvider.class); + + private ConcurrentMap serviceMap = new ConcurrentHashMap<>(); + private OffHeapResources offHeapResourcesProvider; @Override - public boolean initialize(ServiceProviderConfiguration configuration) { + public boolean initialize(ServiceProviderConfiguration configuration, PlatformConfiguration platformConfiguration) { + Collection extendedConfiguration = platformConfiguration.getExtendedConfiguration(OffHeapResources.class); + if (extendedConfiguration.size() > 1) { + throw new UnsupportedOperationException("There are " + extendedConfiguration.size() + " OffHeapResourcesProvider, this is not supported. " + + "There must be only one!"); + } + Iterator iterator = extendedConfiguration.iterator(); + if (iterator.hasNext()) { + offHeapResourcesProvider = iterator.next(); + if (offHeapResourcesProvider.getAllIdentifiers().isEmpty()) { + throw new UnsupportedOperationException("There are no offheap-resource defined, this is not supported. There must be at least one!"); + } + } else { + LOGGER.warn("No offheap-resource defined - this will prevent provider from offering any EhcacheStateService."); + } return true; } @Override public T getService(long consumerID, ServiceConfiguration configuration) { if (configuration != null && configuration.getServiceType().equals(EhcacheStateService.class)) { + if (offHeapResourcesProvider == null) { + LOGGER.warn("EhcacheStateService requested but no offheap-resource was defined - returning null"); + return null; + } EhcacheStateServiceConfig stateServiceConfig = (EhcacheStateServiceConfig) configuration; - EhcacheStateService storeManagerService = new EhcacheStateServiceImpl(stateServiceConfig.getServiceRegistry(), stateServiceConfig.getOffHeapResourceIdentifiers()); + EhcacheStateService storeManagerService = new EhcacheStateServiceImpl( + offHeapResourcesProvider, stateServiceConfig.getMapper()); EhcacheStateService result = serviceMap.putIfAbsent(consumerID, storeManagerService); if (result == null) { result = storeManagerService; } - return (T) result; + @SuppressWarnings("unchecked") + T typedResult = (T) result; + return typedResult; } throw new IllegalArgumentException("Unexpected configuration type."); } @Override public Collection> getProvidedServiceTypes() { - List> classes = new ArrayList>(); + List> classes = new ArrayList<>(); classes.add(EhcacheStateService.class); return classes; } @Override - public void clear() throws ServiceProviderCleanupException { + public void prepareForSynchronization() throws ServiceProviderCleanupException { serviceMap.clear(); } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/InvalidationTracker.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/InvalidationTracker.java new file mode 100644 index 0000000000..49177d2fe3 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/InvalidationTracker.java @@ -0,0 +1,43 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.state; + +import com.tc.classloader.CommonComponent; + +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.AtomicBoolean; + +@CommonComponent +public class InvalidationTracker { + + private final ConcurrentMap invalidationMap = new ConcurrentHashMap<>(); + private final AtomicBoolean isClearInProgress = new AtomicBoolean(false); + + public boolean isClearInProgress() { + return isClearInProgress.get(); + } + + public void setClearInProgress(boolean clearInProgress) { + isClearInProgress.getAndSet(clearInProgress); + } + + public ConcurrentMap getInvalidationMap() { + return invalidationMap; + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/MessageTracker.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/MessageTracker.java new file mode 100644 index 0000000000..bb3da1f22c --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/MessageTracker.java @@ -0,0 +1,131 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.state; + +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; + +public class MessageTracker { + + private final ConcurrentHashMap inProgressMessages = new ConcurrentHashMap<>(); + + private long lowerWaterMark = -1L; + private final AtomicLong higherWaterMark = new AtomicLong(-1L); + private final ReadWriteLock lwmLock = new ReentrantReadWriteLock(); + + /** + * This method is only meant to be called by the Active Entity. + * This needs to be thread safe. + * This tells whether the message should be applied or not + * As and when messages are checked for deduplication, which is only + * done on newly promoted active, the non duplicate ones are cleared from + * inProgressMessages. + * + * @param msgId + * @return whether the entity should apply the message or not + */ + boolean shouldApply(long msgId) { + Lock lock = lwmLock.readLock(); + try { + lock.lock(); + if (msgId < lowerWaterMark) { + return false; + } + } finally { + lock.unlock(); + } + if (msgId > higherWaterMark.get()) { + return true; + } + final AtomicBoolean shouldApply = new AtomicBoolean(false); + inProgressMessages.computeIfPresent(msgId, (id, state) -> { + if (!state) { + shouldApply.set(true); + } + return true; + }); + updateLowerWaterMark(); + return shouldApply.get(); + } + + /** + * Only to be invoked on Passive Entity + * @param msgId + */ + @Deprecated + void track(long msgId) { + //TODO: remove this once we move to CACHE as ENTITY model. + inProgressMessages.put(msgId, false); + updateHigherWaterMark(msgId); + } + + /** + * Only to be invoked on Passive Entity + * Assumes there are no message loss & + * message ids are ever increasing + * @param msgId + */ + void applied(long msgId) { + inProgressMessages.computeIfPresent(msgId, ((id, state) -> state = true)); + updateLowerWaterMark(); + } + + boolean isEmpty() { + return inProgressMessages.isEmpty(); + } + + private void updateHigherWaterMark(long msgId) { + while(true) { + long old = higherWaterMark.get(); + if (msgId < old) { + return; + } + if (higherWaterMark.compareAndSet(old, msgId)) { + break; + } + } + } + + private void updateLowerWaterMark() { + Lock lock = lwmLock.writeLock(); + if (lock.tryLock()) { + try { + for (long i = lowerWaterMark + 1; i <= higherWaterMark.get(); i++) { + final AtomicBoolean removed = new AtomicBoolean(false); + inProgressMessages.computeIfPresent(i, (id, state) -> { + if (state) { + removed.set(true); + return null; + } + return state; + }); + if (removed.get()) { + lowerWaterMark ++; + } else { + break; + } + } + } finally { + lock.unlock(); + } + } + } +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/ResourcePageSource.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/ResourcePageSource.java new file mode 100644 index 0000000000..9827e45474 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/ResourcePageSource.java @@ -0,0 +1,72 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.state; + +import com.tc.classloader.CommonComponent; +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.terracotta.offheapstore.buffersource.OffHeapBufferSource; +import org.terracotta.offheapstore.paging.OffHeapStorageArea; +import org.terracotta.offheapstore.paging.Page; +import org.terracotta.offheapstore.paging.PageSource; +import org.terracotta.offheapstore.paging.UpfrontAllocatingPageSource; + +import static org.terracotta.offheapstore.util.MemoryUnit.GIGABYTES; +import static org.terracotta.offheapstore.util.MemoryUnit.MEGABYTES; + +/** + * Pairs a {@link ServerSideConfiguration.Pool} and an {@link UpfrontAllocatingPageSource} instance providing storage + * for the pool. + */ +@CommonComponent +public class ResourcePageSource implements PageSource { + /** + * A description of the resource allocation underlying this {@code PageSource}. + */ + private final ServerSideConfiguration.Pool pool; + private final UpfrontAllocatingPageSource delegatePageSource; + + public ResourcePageSource(ServerSideConfiguration.Pool pool) { + this.pool = pool; + this.delegatePageSource = new UpfrontAllocatingPageSource(new OffHeapBufferSource(), pool.getSize(), GIGABYTES.toBytes(1), MEGABYTES.toBytes(128)); + } + + public ServerSideConfiguration.Pool getPool() { + return pool; + } + + public long getAllocatedSize() { + return delegatePageSource.getAllocatedSizeUnSync(); + } + + @Override + public Page allocate(int size, boolean thief, boolean victim, OffHeapStorageArea owner) { + return delegatePageSource.allocate(size, thief, victim, owner); + } + + @Override + public void free(Page page) { + delegatePageSource.free(page); + } + + @Override + public String toString() { + final StringBuilder sb = new StringBuilder("ResourcePageSource{"); + sb.append("pool=").append(pool); + sb.append(", delegatePageSource=").append(delegatePageSource); + sb.append('}'); + return sb.toString(); + } +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStateServiceConfig.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStateServiceConfig.java index 8eb057fc8f..c3a36acd30 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStateServiceConfig.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStateServiceConfig.java @@ -16,6 +16,7 @@ package org.ehcache.clustered.server.state.config; +import org.ehcache.clustered.server.KeySegmentMapper; import org.ehcache.clustered.server.state.EhcacheStateService; import org.terracotta.entity.ServiceConfiguration; import org.terracotta.entity.ServiceRegistry; @@ -28,11 +29,12 @@ public class EhcacheStateServiceConfig implements ServiceConfiguration { private final ServiceRegistry serviceRegistry; - private final Set offHeapResourceIdentifiers; + private final KeySegmentMapper mapper; - public EhcacheStateServiceConfig(ServiceRegistry serviceRegistry, Set offHeapResourceIdentifiers) { + + public EhcacheStateServiceConfig(ServiceRegistry serviceRegistry, final KeySegmentMapper mapper) { this.serviceRegistry = serviceRegistry; - this.offHeapResourceIdentifiers = offHeapResourceIdentifiers; + this.mapper = mapper; } @Override @@ -44,8 +46,8 @@ public ServiceRegistry getServiceRegistry() { return this.serviceRegistry; } - public Set getOffHeapResourceIdentifiers() { - return this.offHeapResourceIdentifiers; + public KeySegmentMapper getMapper() { + return mapper; } } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/DefaultConcurrencyStrategyTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/DefaultConcurrencyStrategyTest.java index 2664204755..ae51a54962 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/DefaultConcurrencyStrategyTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/DefaultConcurrencyStrategyTest.java @@ -16,6 +16,7 @@ package org.ehcache.clustered.server; import org.ehcache.clustered.common.internal.messages.ConcurrentEntityMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.hamcrest.Matcher; import org.junit.Test; import org.terracotta.entity.ConcurrencyStrategy; @@ -24,44 +25,56 @@ import java.util.HashSet; import java.util.Set; +import static org.ehcache.clustered.server.ConcurrencyStrategies.DefaultConcurrencyStrategy.DATA_CONCURRENCY_KEY_OFFSET; import static org.ehcache.clustered.server.ConcurrencyStrategies.DefaultConcurrencyStrategy.DEFAULT_KEY; import static org.hamcrest.CoreMatchers.allOf; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.core.Is.is; +import static org.mockito.Mockito.mock; +import static org.terracotta.entity.ConcurrencyStrategy.UNIVERSAL_KEY; /** * @author Ludovic Orban */ public class DefaultConcurrencyStrategyTest { + private static final KeySegmentMapper DEFAULT_MAPPER = new KeySegmentMapper(16); + @Test public void testConcurrencyKey() throws Exception { final int concurrency = 107; - ConcurrencyStrategy strategy = ConcurrencyStrategies.defaultConcurrency(concurrency); + ConcurrencyStrategy strategy = ConcurrencyStrategies.defaultConcurrency(DEFAULT_MAPPER); assertThat(strategy.concurrencyKey(new NonConcurrentTestEntityMessage()), is(DEFAULT_KEY)); for (int i = -1024; i < 1024; i++) { - assertThat(strategy.concurrencyKey(new ConcurrentTestEntityMessage(i)), withinRange(DEFAULT_KEY, DEFAULT_KEY + concurrency)); + assertThat(strategy.concurrencyKey(new ConcurrentTestEntityMessage(i)), withinRange(DATA_CONCURRENCY_KEY_OFFSET, DATA_CONCURRENCY_KEY_OFFSET + concurrency)); } } + @Test + public void testConcurrencyKeyForServerStoreGetOperation() throws Exception { + ConcurrencyStrategy strategy = ConcurrencyStrategies.defaultConcurrency(DEFAULT_MAPPER); + ServerStoreOpMessage.GetMessage getMessage = mock(ServerStoreOpMessage.GetMessage.class); + assertThat(strategy.concurrencyKey(getMessage), is(UNIVERSAL_KEY)); + } + @Test public void testKeysForSynchronization() throws Exception { final int concurrency = 111; - ConcurrencyStrategy strategy = ConcurrencyStrategies.defaultConcurrency(concurrency); - - assertThat(strategy.concurrencyKey(new NonConcurrentTestEntityMessage()), is(DEFAULT_KEY)); + ConcurrencyStrategy strategy = ConcurrencyStrategies.defaultConcurrency(DEFAULT_MAPPER); - Set visitedConcurrencyKeys = new HashSet(); + Set visitedConcurrencyKeys = new HashSet<>(); for (int i = -1024; i < 1024; i++) { int concurrencyKey = strategy.concurrencyKey(new ConcurrentTestEntityMessage(i)); - assertThat(concurrencyKey, withinRange(DEFAULT_KEY, DEFAULT_KEY + concurrency)); + assertThat(concurrencyKey, withinRange(DATA_CONCURRENCY_KEY_OFFSET, DATA_CONCURRENCY_KEY_OFFSET + concurrency)); visitedConcurrencyKeys.add(concurrencyKey); } - assertThat(strategy.getKeysForSynchronization().containsAll(visitedConcurrencyKeys), is(true)); + Set keysForSynchronization = strategy.getKeysForSynchronization(); + assertThat(keysForSynchronization.contains(DEFAULT_KEY), is(true)); + assertThat(keysForSynchronization.containsAll(visitedConcurrencyKeys), is(true)); } private static Matcher withinRange(int greaterThanOrEqualTo, int lessThan) { @@ -80,7 +93,7 @@ public ConcurrentTestEntityMessage(int key) { } @Override - public int concurrencyKey() { + public long concurrencyKey() { return key; } } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index 822d13256b..689fac54cc 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -29,37 +29,55 @@ import org.ehcache.clustered.common.internal.exceptions.LifecycleException; import org.ehcache.clustered.common.internal.exceptions.ResourceBusyException; import org.ehcache.clustered.common.internal.exceptions.ResourceConfigurationException; -import org.ehcache.clustered.common.internal.exceptions.ServerMisconfigurationException; +import org.ehcache.clustered.common.internal.messages.ConcurrentEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.Failure; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponseFactory; +import org.ehcache.clustered.common.internal.messages.EhcacheResponseType; import org.ehcache.clustered.common.internal.messages.LifecycleMessage; import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; +import org.ehcache.clustered.common.internal.messages.LifecycleMessage.CreateServerStore; +import org.ehcache.clustered.common.internal.messages.LifecycleMessage.DestroyServerStore; +import org.ehcache.clustered.server.internal.messages.EhcacheDataSyncMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; +import org.ehcache.clustered.server.internal.messages.EhcacheStateSyncMessage; +import org.ehcache.clustered.server.state.ClientMessageTracker; import org.ehcache.clustered.server.state.EhcacheStateService; +import org.ehcache.clustered.server.state.InvalidationTracker; import org.hamcrest.Matchers; +import org.junit.Before; import org.junit.Test; +import org.mockito.ArgumentCaptor; import org.terracotta.entity.ClientCommunicator; import org.terracotta.entity.ClientDescriptor; +import org.terracotta.entity.IEntityMessenger; +import org.terracotta.entity.MessageCodecException; +import org.terracotta.entity.PassiveSynchronizationChannel; import org.terracotta.entity.ServiceConfiguration; import org.terracotta.entity.ServiceRegistry; +import org.terracotta.management.service.monitoring.ActiveEntityMonitoringServiceConfiguration; +import org.terracotta.management.service.monitoring.ConsumerManagementRegistryConfiguration; import org.terracotta.offheapresource.OffHeapResource; import org.terracotta.offheapresource.OffHeapResourceIdentifier; import org.terracotta.offheapresource.OffHeapResources; import org.terracotta.offheapstore.util.MemoryUnit; +import java.nio.ByteBuffer; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; +import java.util.Random; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; + import org.ehcache.clustered.common.PoolAllocation.Dedicated; -import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.Type; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.Type.FAILURE; import static org.ehcache.clustered.common.internal.store.Util.createPayload; +import static org.ehcache.clustered.server.EhcacheActiveEntity.SYNC_DATA_SIZE_PROP; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -67,23 +85,31 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -import org.junit.Assert; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.atLeast; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; -/** - * @author cdennis - */ public class EhcacheActiveEntityTest { private static final byte[] ENTITY_ID = ClusteredEhcacheIdentity.serialize(UUID.randomUUID()); private static final LifeCycleMessageFactory MESSAGE_FACTORY = new LifeCycleMessageFactory(); + private static final UUID CLIENT_ID = UUID.randomUUID(); + private static final KeySegmentMapper DEFAULT_MAPPER = new KeySegmentMapper(16); + + @Before + public void setClientId() { + MESSAGE_FACTORY.setClientId(CLIENT_ID); + } @Test public void testConfigTooShort() { try { - new EhcacheActiveEntity(null, new byte[ENTITY_ID.length - 1]); + new EhcacheActiveEntity(null, new byte[ENTITY_ID.length - 1], DEFAULT_MAPPER); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { //expected @@ -93,7 +119,7 @@ public void testConfigTooShort() { @Test public void testConfigTooLong() { try { - new EhcacheActiveEntity(null, new byte[ENTITY_ID.length + 1]); + new EhcacheActiveEntity(null, new byte[ENTITY_ID.length + 1], DEFAULT_MAPPER); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { //expected @@ -103,7 +129,7 @@ public void testConfigTooLong() { @Test public void testConfigNull() { try { - new EhcacheActiveEntity(null, null); + new EhcacheActiveEntity(null, null, DEFAULT_MAPPER); fail("Expected NullPointerException"); } catch (NullPointerException e) { //expected @@ -115,7 +141,7 @@ public void testConfigNull() { */ @Test public void testConnected() throws Exception { - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); @@ -129,7 +155,7 @@ public void testConnected() throws Exception { @Test public void testConnectedAgain() throws Exception { - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); @@ -146,7 +172,7 @@ public void testConnectedAgain() throws Exception { @Test public void testConnectedSecond() throws Exception { - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); @@ -168,7 +194,7 @@ public void testConnectedSecond() throws Exception { */ @Test public void testDisconnectedNotConnected() throws Exception { - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.disconnected(client); @@ -180,7 +206,7 @@ public void testDisconnectedNotConnected() throws Exception { */ @Test public void testDisconnected() throws Exception { - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); @@ -197,7 +223,7 @@ public void testDisconnected() throws Exception { */ @Test public void testDisconnectedSecond() throws Exception { - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); @@ -213,20 +239,6 @@ public void testDisconnectedSecond() throws Exception { assertThat(activeEntity.getInUseStores().isEmpty(), is(true)); } - @Test - public void testInteractionWithServerWithoutResources() throws Exception { - OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); - ClientDescriptor client = new TestClientDescriptor(); - activeEntity.connected(client); - - String expectedErrorMessage = "Server started without any offheap resources defined."; - assertFailure( - activeEntity.invoke(client, mock(EhcacheEntityMessage.class)), - ServerMisconfigurationException.class, expectedErrorMessage - ); - } - /** * Ensures basic shared resource pool configuration. */ @@ -237,7 +249,7 @@ public void testConfigure() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -267,7 +279,7 @@ public void testNoAttachementFailsToInvokeServerStoreOperation() throws Exceptio registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -293,7 +305,7 @@ public void testNoAttachementFailsToInvokeServerStoreOperation() throws Exceptio activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); - ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testNoAttachementFailsToInvokeServerStoreOperation"); + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testNoAttachementFailsToInvokeServerStoreOperation", CLIENT_ID); assertFailure( activeEntity.invoke(client, messageFactory.appendOperation(1L, createPayload(1L))), @@ -308,7 +320,7 @@ public void testAppendInvalidationAcksTakenIntoAccount() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client1 = new TestClientDescriptor(); ClientDescriptor client2 = new TestClientDescriptor(); ClientDescriptor client3 = new TestClientDescriptor(); @@ -316,6 +328,9 @@ public void testAppendInvalidationAcksTakenIntoAccount() throws Exception { activeEntity.connected(client2); activeEntity.connected(client3); + UUID client2Id = UUID.randomUUID(); + UUID client3Id = UUID.randomUUID(); + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() .defaultResource("defaultServerResource") .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) @@ -335,26 +350,31 @@ public void testAppendInvalidationAcksTakenIntoAccount() throws Exception { MESSAGE_FACTORY.createServerStore("testDisconnection", serverStoreConfiguration)) ); - ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection"); + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection", CLIENT_ID); // attach the clients assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); // attach to the store + MESSAGE_FACTORY.setClientId(CLIENT_ID); assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); @@ -399,7 +419,7 @@ public void testClearInvalidationAcksTakenIntoAccount() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client1 = new TestClientDescriptor(); ClientDescriptor client2 = new TestClientDescriptor(); ClientDescriptor client3 = new TestClientDescriptor(); @@ -407,6 +427,9 @@ public void testClearInvalidationAcksTakenIntoAccount() throws Exception { activeEntity.connected(client2); activeEntity.connected(client3); + UUID client2Id = UUID.randomUUID(); + UUID client3Id = UUID.randomUUID(); + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() .defaultResource("defaultServerResource") .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) @@ -426,26 +449,31 @@ public void testClearInvalidationAcksTakenIntoAccount() throws Exception { MESSAGE_FACTORY.createServerStore("testDisconnection", serverStoreConfiguration)) ); - ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection"); + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection", CLIENT_ID); // attach the clients assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); // attach to the store + MESSAGE_FACTORY.setClientId(CLIENT_ID); assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); @@ -490,7 +518,7 @@ public void testAppendInvalidationDisconnectionOfInvalidatingClientsTakenIntoAcc registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client1 = new TestClientDescriptor(); ClientDescriptor client2 = new TestClientDescriptor(); ClientDescriptor client3 = new TestClientDescriptor(); @@ -498,6 +526,9 @@ public void testAppendInvalidationDisconnectionOfInvalidatingClientsTakenIntoAcc activeEntity.connected(client2); activeEntity.connected(client3); + UUID client2Id = UUID.randomUUID(); + UUID client3Id = UUID.randomUUID(); + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() .defaultResource("defaultServerResource") .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) @@ -517,26 +548,31 @@ public void testAppendInvalidationDisconnectionOfInvalidatingClientsTakenIntoAcc MESSAGE_FACTORY.createServerStore("testDisconnection", serverStoreConfiguration)) ); - ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection"); + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection", CLIENT_ID); // attach the clients assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); // attach to the store + MESSAGE_FACTORY.setClientId(CLIENT_ID); assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); @@ -577,7 +613,7 @@ public void testClearInvalidationDisconnectionOfInvalidatingClientsTakenIntoAcco registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client1 = new TestClientDescriptor(); ClientDescriptor client2 = new TestClientDescriptor(); ClientDescriptor client3 = new TestClientDescriptor(); @@ -585,6 +621,9 @@ public void testClearInvalidationDisconnectionOfInvalidatingClientsTakenIntoAcco activeEntity.connected(client2); activeEntity.connected(client3); + UUID client2Id = UUID.randomUUID(); + UUID client3Id = UUID.randomUUID(); + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() .defaultResource("defaultServerResource") .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) @@ -604,26 +643,31 @@ public void testClearInvalidationDisconnectionOfInvalidatingClientsTakenIntoAcco MESSAGE_FACTORY.createServerStore("testDisconnection", serverStoreConfiguration)) ); - ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection"); + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection", CLIENT_ID); // attach the clients assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); // attach to the store + MESSAGE_FACTORY.setClientId(CLIENT_ID); assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); @@ -664,7 +708,7 @@ public void testAppendInvalidationDisconnectionOfBlockingClientTakenIntoAccount( registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client1 = new TestClientDescriptor(); ClientDescriptor client2 = new TestClientDescriptor(); ClientDescriptor client3 = new TestClientDescriptor(); @@ -672,6 +716,9 @@ public void testAppendInvalidationDisconnectionOfBlockingClientTakenIntoAccount( activeEntity.connected(client2); activeEntity.connected(client3); + UUID client2Id = UUID.randomUUID(); + UUID client3Id = UUID.randomUUID(); + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() .defaultResource("defaultServerResource") .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) @@ -691,26 +738,31 @@ public void testAppendInvalidationDisconnectionOfBlockingClientTakenIntoAccount( MESSAGE_FACTORY.createServerStore("testDisconnection", serverStoreConfiguration)) ); - ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection"); + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection", CLIENT_ID); // attach the clients assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); // attach to the store + MESSAGE_FACTORY.setClientId(CLIENT_ID); assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); @@ -741,7 +793,7 @@ public void testClearInvalidationDisconnectionOfBlockingClientTakenIntoAccount() registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client1 = new TestClientDescriptor(); ClientDescriptor client2 = new TestClientDescriptor(); ClientDescriptor client3 = new TestClientDescriptor(); @@ -749,6 +801,9 @@ public void testClearInvalidationDisconnectionOfBlockingClientTakenIntoAccount() activeEntity.connected(client2); activeEntity.connected(client3); + UUID client2Id = UUID.randomUUID(); + UUID client3Id = UUID.randomUUID(); + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() .defaultResource("defaultServerResource") .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) @@ -768,26 +823,31 @@ public void testClearInvalidationDisconnectionOfBlockingClientTakenIntoAccount() MESSAGE_FACTORY.createServerStore("testDisconnection", serverStoreConfiguration)) ); - ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection"); + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection", CLIENT_ID); // attach the clients assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); // attach to the store + MESSAGE_FACTORY.setClientId(CLIENT_ID); assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); @@ -818,7 +878,7 @@ public void testAttachedClientButNotStoreFailsInvokingServerStoreOperation() thr registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -847,7 +907,7 @@ public void testAttachedClientButNotStoreFailsInvokingServerStoreOperation() thr activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); - ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testAttachedClientButNotStoreFailsInvokingServerStoreOperation"); + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testAttachedClientButNotStoreFailsInvokingServerStoreOperation", CLIENT_ID); // attach the client assertSuccess( @@ -867,7 +927,7 @@ public void testWithAttachmentSucceedsInvokingServerStoreOperation() throws Exce registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -896,7 +956,7 @@ public void testWithAttachmentSucceedsInvokingServerStoreOperation() throws Exce activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); - ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testWithAttachmentSucceedsInvokingServerStoreOperation"); + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testWithAttachmentSucceedsInvokingServerStoreOperation", CLIENT_ID); // attach the client assertSuccess( @@ -920,7 +980,7 @@ public void testConfigureBeforeConnect() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); assertFailure(activeEntity.invoke(client, @@ -939,7 +999,7 @@ public void testConfigureAfterConfigure() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -986,7 +1046,7 @@ public void testConfigureMissingPoolResource() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("defaultServerResource", 64, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1019,7 +1079,7 @@ public void testConfigureMissingDefaultResource() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1050,7 +1110,7 @@ public void testConfigureLargeSharedPool() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1088,7 +1148,7 @@ public void testValidate2Clients() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1119,7 +1179,7 @@ public void testValidate1Client() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1151,7 +1211,7 @@ public void testValidateAfterConfigure() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1171,7 +1231,7 @@ public void testValidateExtraResource() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1199,7 +1259,7 @@ public void testValidateNoDefaultResource() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1224,7 +1284,7 @@ public void testCreateDedicatedServerStoreBeforeConfigure() throws Exception { final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(32, MemoryUnit.MEGABYTES); registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1243,7 +1303,7 @@ public void testCreateDedicatedServerStoreBeforeValidate() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1275,7 +1335,7 @@ public void testCreateDedicatedServerStore() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1325,7 +1385,7 @@ public void testCreateDedicatedServerStoreAfterValidate() throws Exception { .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1364,7 +1424,7 @@ public void testCreateDedicatedServerStoreExisting() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1405,7 +1465,7 @@ public void testCreateReleaseDedicatedServerStoreMultiple() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1475,7 +1535,7 @@ public void testValidateDedicatedServerStore() throws Exception { .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1516,7 +1576,7 @@ public void testValidateDedicatedServerStoreBad() throws Exception { .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1565,7 +1625,7 @@ public void testValidateDedicatedServerStoreBeforeCreate() throws Exception { .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1603,7 +1663,7 @@ public void testCreateSharedServerStore() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1650,7 +1710,7 @@ public void testCreateSharedServerStoreExisting() throws Exception { .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1698,7 +1758,7 @@ public void testValidateSharedServerStore() throws Exception { .shared("primary") .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1748,7 +1808,7 @@ public void testValidateServerStore_DedicatedStoresDifferentSizes() throws Excep .dedicated("serverResource1", 2, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client1 = new TestClientDescriptor(); activeEntity.connected(client1); assertThat(activeEntity.getConnectedClients().keySet(), contains(client1)); @@ -1793,7 +1853,7 @@ public void testValidateServerStore_DedicatedStoresSameSizes() throws Exception .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client1 = new TestClientDescriptor(); activeEntity.connected(client1); assertThat(activeEntity.getConnectedClients().keySet(), contains(client1)); @@ -1831,7 +1891,7 @@ public void testValidateServerStore_DedicatedStoreResourceNamesDifferent() throw .dedicated("serverResource2", 4, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client1 = new TestClientDescriptor(); activeEntity.connected(client1); assertThat(activeEntity.getConnectedClients().keySet(), contains(client1)); @@ -1875,7 +1935,7 @@ public void testValidateServerStore_DedicatedCacheNameDifferent() throws Excepti .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client1 = new TestClientDescriptor(); activeEntity.connected(client1); assertThat(activeEntity.getConnectedClients().keySet(), contains(client1)); @@ -1908,7 +1968,7 @@ public void testServerStoreSameNameInDifferentSharedPools() throws Exception { .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1953,7 +2013,7 @@ public void testValidateSharedServerStoreBad() throws Exception { .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1997,7 +2057,7 @@ public void testReleaseServerStoreBeforeAttach() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -2019,7 +2079,7 @@ public void testReleaseServerStoreAfterRelease() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -2056,7 +2116,7 @@ public void testDestroyServerStore() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -2119,7 +2179,7 @@ public void testDestroyServerStore() throws Exception { /** * Tests the destroy server store operation before the use of either a - * {@link LifecycleMessage.CreateServerStore CreateServerStore} + * {@link CreateServerStore CreateServerStore} * {@link LifecycleMessage.ValidateServerStore ValidateServerStore} * operation. */ @@ -2129,7 +2189,7 @@ public void testDestroyServerStoreBeforeAttach() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -2156,7 +2216,7 @@ public void testDestroyServerStoreInUse() throws Exception { .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -2216,7 +2276,7 @@ public void testSharedPoolCacheNameCollision() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -2279,7 +2339,7 @@ public void testSharedPoolCacheNameCollision() throws Exception { @Test public void testDestroyEmpty() throws Exception { - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry() , ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry() , ENTITY_ID, DEFAULT_MAPPER); activeEntity.destroy(); } @@ -2289,7 +2349,7 @@ public void testDestroyWithStores() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -2363,7 +2423,7 @@ public void testValidateIdenticalConfiguration() { registry.addResource("primary-server-resource", 16, MemoryUnit.MEGABYTES); registry.addResource("secondary-server-resource", 16, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor configurer = new TestClientDescriptor(); activeEntity.connected(configurer); activeEntity.invoke(configurer, MESSAGE_FACTORY.configureStoreManager(configureConfig)); @@ -2371,7 +2431,7 @@ public void testValidateIdenticalConfiguration() { ClientDescriptor validator = new TestClientDescriptor(); activeEntity.connected(validator); - assertThat(activeEntity.invoke(validator, MESSAGE_FACTORY.validateStoreManager(validateConfig)).getType(), is(Type.SUCCESS)); + assertThat(activeEntity.invoke(validator, MESSAGE_FACTORY.validateStoreManager(validateConfig)).getResponseType(), is(EhcacheResponseType.SUCCESS)); } @Test @@ -2381,7 +2441,7 @@ public void testValidateSharedPoolNamesDifferent() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor configurer = new TestClientDescriptor(); activeEntity.connected(configurer); @@ -2409,7 +2469,7 @@ public void testValidateDefaultResourceNameDifferent() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor configurer = new TestClientDescriptor(); activeEntity.connected(configurer); @@ -2437,7 +2497,7 @@ public void testValidateClientSharedPoolSizeTooBig() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor configurer = new TestClientDescriptor(); activeEntity.connected(configurer); @@ -2465,7 +2525,7 @@ public void testValidateSecondClientInheritsFirstClientConfig() throws Exception registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor configurer = new TestClientDescriptor(); activeEntity.connected(configurer); @@ -2486,7 +2546,7 @@ public void testValidateNonExistentSharedPool() throws Exception { OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(32, MemoryUnit.MEGABYTES); registry.addResource("defaultServerResource", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor configurer = new TestClientDescriptor(); activeEntity.connected(configurer); @@ -2516,7 +2576,7 @@ public void testCreateServerStoreWithUnknownPool() throws Exception { .unknown() .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -2533,6 +2593,445 @@ public void testCreateServerStoreWithUnknownPool() throws Exception { } } + @Test + public void testSyncToPassive() throws Exception { + final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); + registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); + registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); + + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); + ClientDescriptor client = new TestClientDescriptor(); + activeEntity.connected(client); + + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() + .defaultResource("serverResource1") + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) + .build(); + + activeEntity.invoke(client, + MESSAGE_FACTORY.configureStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client, + MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client, + MESSAGE_FACTORY.createServerStore("myCache", + new ServerStoreConfigBuilder() + .shared("primary") + .build())); + + @SuppressWarnings("unchecked") + PassiveSynchronizationChannel syncChannel = mock(PassiveSynchronizationChannel.class); + activeEntity.synchronizeKeyToPassive(syncChannel, 1); + + ArgumentCaptor captor = ArgumentCaptor.forClass(EhcacheStateSyncMessage.class); + verify(syncChannel).synchronizeToPassive(captor.capture()); + + EhcacheStateSyncMessage capturedSyncMessage = captor.getValue(); + ServerSideConfiguration configuration = capturedSyncMessage.getConfiguration(); + assertThat(configuration.getDefaultServerResource(), is("serverResource1")); + assertThat(configuration.getResourcePools().keySet(), containsInAnyOrder("primary", "secondary")); + + Map storeConfigs = capturedSyncMessage.getStoreConfigs(); + assertThat(storeConfigs.keySet(), containsInAnyOrder("myCache")); + assertThat(storeConfigs.get("myCache").getPoolAllocation(), instanceOf(PoolAllocation.Shared.class)); + + } + + @Test + public void testDataSyncToPassiveBatchedByDefault() throws Exception { + final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); + registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); + registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); + + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); + ClientDescriptor client = new TestClientDescriptor(); + activeEntity.connected(client); + + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() + .defaultResource("serverResource1") + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) + .build(); + + activeEntity.invoke(client, + MESSAGE_FACTORY.configureStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client, + MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client, + MESSAGE_FACTORY.createServerStore("myCache", + new ServerStoreConfigBuilder() + .shared("primary") + .build())); + + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("myCache", UUID.randomUUID()); + + ByteBuffer payload = ByteBuffer.allocate(512); + // Put keys that maps to the same concurrency key + activeEntity.invoke(client, messageFactory.appendOperation(1L, payload)); + activeEntity.invoke(client, messageFactory.appendOperation(-2L, payload)); + activeEntity.invoke(client, messageFactory.appendOperation(17L, payload)); + + @SuppressWarnings("unchecked") + PassiveSynchronizationChannel syncChannel = mock(PassiveSynchronizationChannel.class); + activeEntity.synchronizeKeyToPassive(syncChannel, 3); + + verify(syncChannel).synchronizeToPassive(any(EhcacheDataSyncMessage.class)); + } + + @Test + public void testDataSyncToPassiveCustomBatchSize() throws Exception { + final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); + registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); + registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); + + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); + ClientDescriptor client = new TestClientDescriptor(); + activeEntity.connected(client); + + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() + .defaultResource("serverResource1") + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) + .build(); + + activeEntity.invoke(client, + MESSAGE_FACTORY.configureStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client, + MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client, + MESSAGE_FACTORY.createServerStore("myCache", + new ServerStoreConfigBuilder() + .shared("primary") + .build())); + + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("myCache", UUID.randomUUID()); + + ByteBuffer payload = ByteBuffer.allocate(512); + // Put keys that maps to the same concurrency key + activeEntity.invoke(client, messageFactory.appendOperation(1L, payload)); + activeEntity.invoke(client, messageFactory.appendOperation(-2L, payload)); + activeEntity.invoke(client, messageFactory.appendOperation(17L, payload)); + activeEntity.invoke(client, messageFactory.appendOperation(33L, payload)); + + System.setProperty(SYNC_DATA_SIZE_PROP, "512"); + try { + @SuppressWarnings("unchecked") + PassiveSynchronizationChannel syncChannel = mock(PassiveSynchronizationChannel.class); + activeEntity.synchronizeKeyToPassive(syncChannel, 3); + + verify(syncChannel, atLeast(2)).synchronizeToPassive(any(EhcacheDataSyncMessage.class)); + } finally { + System.clearProperty(SYNC_DATA_SIZE_PROP); + } + } + + @Test + public void testSyncToPassiveWithoutDefaultServerResource() throws Exception { + final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); + registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); + registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); + + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); + ClientDescriptor client = new TestClientDescriptor(); + activeEntity.connected(client); + + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder().build(); + + activeEntity.invoke(client, + MESSAGE_FACTORY.configureStoreManager(serverSideConfiguration)); + + @SuppressWarnings("unchecked") + PassiveSynchronizationChannel syncChannel = mock(PassiveSynchronizationChannel.class); + activeEntity.synchronizeKeyToPassive(syncChannel, 1); + } + + @Test + public void testLoadExistingRecoversInflightInvalidationsForEventualCache() { + final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); + registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); + + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); + + ClientDescriptor client = new TestClientDescriptor(); + activeEntity.connected(client); + + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() + .defaultResource("serverResource1") + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .build(); + + activeEntity.invoke(client, + MESSAGE_FACTORY.configureStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client, + MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client, + MESSAGE_FACTORY.createServerStore("test", + new ServerStoreConfigBuilder() + .shared("primary") + .build())); + + EhcacheStateServiceImpl ehcacheStateService = registry.getStoreManagerService(); + ehcacheStateService.addInvalidationtracker("test"); + + InvalidationTracker invalidationTracker = ehcacheStateService.getInvalidationTracker("test"); + + Random random = new Random(); + random.ints(0, 100).limit(10).forEach(x -> invalidationTracker.getInvalidationMap().put((long)x, x)); + + activeEntity.loadExisting(); + + assertThat(ehcacheStateService.getInvalidationTracker("test"), nullValue()); + + } + + @Test + public void testCreateServerStoreSendsPassiveReplicationMessageIfSuccessful() throws MessageCodecException { + + final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); + registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); + + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); + + IEntityMessenger entityMessenger = registry.getEntityMessenger(); + + ClientDescriptor client = new TestClientDescriptor(); + activeEntity.connected(client); + + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() + .defaultResource("serverResource1") + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .build(); + + activeEntity.invoke(client, + MESSAGE_FACTORY.configureStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client, + MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)); + + try { + activeEntity.invoke(client, + MESSAGE_FACTORY.createServerStore("test", + new ServerStoreConfigBuilder() + .shared("primary1") + .build())); + } catch (Exception e) { + //nothing to do + } + + verify(entityMessenger, times(0)).messageSelf(any()); + + reset(entityMessenger); + + activeEntity.invoke(client, + MESSAGE_FACTORY.createServerStore("test", + new ServerStoreConfigBuilder() + .shared("primary") + .build())); + + verify(entityMessenger, times(0)).messageSelf(any()); + verify(entityMessenger, times(1)).messageSelfAndDeferRetirement(any(CreateServerStore.class), any(PassiveReplicationMessage.class)); + + } + + @Test + public void testDestroyServerStoreSendsPassiveReplicationMessageIfSuccessful() throws MessageCodecException { + + final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); + registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); + + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); + + IEntityMessenger entityMessenger = registry.getEntityMessenger(); + + ClientDescriptor client1 = new TestClientDescriptor(); + ClientDescriptor client2 = new TestClientDescriptor(); + activeEntity.connected(client1); + activeEntity.connected(client2); + + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() + .defaultResource("serverResource1") + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .build(); + + activeEntity.invoke(client1, + MESSAGE_FACTORY.configureStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client1, + MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client1, + MESSAGE_FACTORY.createServerStore("test", + new ServerStoreConfigBuilder() + .shared("primary") + .build())); + + UUID client2Id = UUID.randomUUID(); + MESSAGE_FACTORY.setClientId(client2Id); + + activeEntity.invoke(client2, + MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client2, + MESSAGE_FACTORY.validateServerStore("test", + new ServerStoreConfigBuilder() + .shared("primary") + .build())); + + MESSAGE_FACTORY.setClientId(CLIENT_ID); + try { + activeEntity.invoke(client1, + MESSAGE_FACTORY.destroyServerStore("test")); + } catch (Exception e) { + //nothing to do + } + + verify(entityMessenger, times(0)).messageSelf(any()); + verify(entityMessenger, times(1)).messageSelfAndDeferRetirement(any(), any()); + + reset(entityMessenger); + + MESSAGE_FACTORY.setClientId(client2Id); + + activeEntity.invoke(client2, + MESSAGE_FACTORY.releaseServerStore("test")); + + MESSAGE_FACTORY.setClientId(CLIENT_ID); + activeEntity.invoke(client1, + MESSAGE_FACTORY.releaseServerStore("test")); + activeEntity.invoke(client1, + MESSAGE_FACTORY.destroyServerStore("test")); + + verify(entityMessenger, times(0)).messageSelf(any()); + verify(entityMessenger, times(1)).messageSelfAndDeferRetirement(any(DestroyServerStore.class), any(PassiveReplicationMessage.class)); + + } + + @Test + public void testPromotedActiveIgnoresDuplicateMessages() throws MessageCodecException, ClusterException { + + final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); + registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); + + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); + + IEntityMessenger entityMessenger = registry.getEntityMessenger(); + + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() + .defaultResource("serverResource1") + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .build(); + + EhcacheStateService ehcacheStateService = registry.getStoreManagerService(); + ehcacheStateService.configure(serverSideConfiguration); + + ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfigBuilder() + .shared("primary") + .setActualKeyType(Long.class) + .setActualValueType(Long.class) + .build(); + + ehcacheStateService.createStore("test", serverStoreConfiguration); + + ClientMessageTracker clientMessageTracker = ehcacheStateService.getClientMessageTracker(); + + Random random = new Random(); + Set msgIds = new HashSet<>(); + random.longs(100).distinct().forEach(x -> { + msgIds.add(x); + clientMessageTracker.track(x, CLIENT_ID); + }); + + Set applied = new HashSet<>(); + msgIds.stream().limit(80).forEach(x -> { + applied.add(x); + clientMessageTracker.applied(x, CLIENT_ID); + }); + + ClientDescriptor client = new TestClientDescriptor(); + activeEntity.connected(client); + + activeEntity.invoke(client, + MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client, MESSAGE_FACTORY.validateServerStore("test", serverStoreConfiguration)); + + reset(entityMessenger); + ServerStoreMessageFactory serverStoreMessageFactory = new ServerStoreMessageFactory("test", CLIENT_ID); + EhcacheEntityResponseFactory entityResponseFactory = new EhcacheEntityResponseFactory(); + applied.forEach(y -> { + EhcacheEntityMessage message = serverStoreMessageFactory.appendOperation(y, createPayload(y)); + message.setId(y); + assertThat(activeEntity.invoke(client, message), is(entityResponseFactory.success())); + }); + + verify(entityMessenger, times(0)).messageSelfAndDeferRetirement(any(), any()); + } + + @Test + public void testReplicationMessageAndOriginalServerStoreOpMessageHasSameConcurrency() throws MessageCodecException { + + final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); + registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); + + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); + + IEntityMessenger entityMessenger = registry.getEntityMessenger(); + + ClientDescriptor client = new TestClientDescriptor(); + activeEntity.connected(client); + + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() + .defaultResource("serverResource1") + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .build(); + activeEntity.invoke(client, MESSAGE_FACTORY.configureStoreManager(serverSideConfiguration)); + activeEntity.invoke(client, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)); + + ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfigBuilder() + .shared("primary") + .build(); + activeEntity.invoke(client, MESSAGE_FACTORY.createServerStore("testCache", serverStoreConfiguration)); + + reset(entityMessenger); + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testCache", CLIENT_ID); + EhcacheEntityMessage getAndAppend = messageFactory.getAndAppendOperation(1L, createPayload(1L)); + activeEntity.invoke(client, getAndAppend); + + ArgumentCaptor captor = ArgumentCaptor.forClass(PassiveReplicationMessage.ChainReplicationMessage.class); + verify(entityMessenger).messageSelfAndDeferRetirement(any(), captor.capture()); + PassiveReplicationMessage.ChainReplicationMessage replicatedMessage = captor.getValue(); + + assertThat(replicatedMessage.concurrencyKey(), is(((ConcurrentEntityMessage) getAndAppend).concurrencyKey())); + } + + @Test + public void testInvalidMessageThrowsError() throws Exception { + final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); + registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); + + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); + + ClientDescriptor client = new TestClientDescriptor(); + activeEntity.connected(client); + + try { + activeEntity.invoke(client, new InvalidMessage()); + fail("Invalid message should result in AssertionError"); + } catch (AssertionError e) { + assertThat(e.getMessage(), containsString("Unsupported")); + } + } + + + private void assertSuccess(EhcacheEntityResponse response) throws Exception { if (!response.equals(EhcacheEntityResponse.Success.INSTANCE)) { throw ((Failure) response).getCause(); @@ -2540,12 +3039,12 @@ private void assertSuccess(EhcacheEntityResponse response) throws Exception { } private void assertFailure(EhcacheEntityResponse response, Class expectedException) { - assertThat(response.getType(), is(FAILURE)); + assertThat(response.getResponseType(), is(EhcacheResponseType.FAILURE)); assertThat(((Failure) response).getCause(), is(instanceOf(expectedException))); } private void assertFailure(EhcacheEntityResponse response, Class expectedException, String expectedMessageContent) { - assertThat(response.getType(), is(FAILURE)); + assertThat(response.getResponseType(), is(EhcacheResponseType.FAILURE)); Exception cause = ((Failure) response).getCause(); assertThat(cause, is(instanceOf(expectedException))); assertThat(cause.getMessage(), containsString(expectedMessageContent)); @@ -2668,6 +3167,10 @@ private static final class OffHeapIdentifierRegistry implements ServiceRegistry private EhcacheStateServiceImpl storeManagerService; + private IEntityMessenger entityMessenger; + + private ClientCommunicator clientCommunicator; + private final Map pools = new HashMap(); @@ -2710,6 +3213,15 @@ private EhcacheStateServiceImpl getStoreManagerService() { return this.storeManagerService; } + + private IEntityMessenger getEntityMessenger() { + return entityMessenger; + } + + private ClientCommunicator getClientCommunicator() { + return clientCommunicator; + } + private static Set getIdentifiers(Set pools) { Set names = new HashSet(); for (OffHeapResourceIdentifier identifier: pools) { @@ -2722,23 +3234,35 @@ private static Set getIdentifiers(Set pools) @SuppressWarnings("unchecked") @Override public T getService(ServiceConfiguration serviceConfiguration) { - if (serviceConfiguration instanceof OffHeapResourceIdentifier) { - final OffHeapResourceIdentifier resourceIdentifier = (OffHeapResourceIdentifier) serviceConfiguration; - return (T) this.pools.get(resourceIdentifier); - } else if (serviceConfiguration.getServiceType().equals(ClientCommunicator.class)) { - return (T) mock(ClientCommunicator.class); - } else if(serviceConfiguration.getServiceType().equals(OffHeapResources.class)) { - return (T) new OffHeapResources() { - @Override - public Set getAllIdentifiers() { - return getIdentifiers(pools.keySet()); - } - }; + if (serviceConfiguration.getServiceType().equals(ClientCommunicator.class)) { + if (this.clientCommunicator == null) { + this.clientCommunicator = mock(ClientCommunicator.class); + } + return (T) this.clientCommunicator; } else if (serviceConfiguration.getServiceType().equals(EhcacheStateService.class)) { if (storeManagerService == null) { - this.storeManagerService = new EhcacheStateServiceImpl(this, getIdentifiers(pools.keySet())); + this.storeManagerService = new EhcacheStateServiceImpl(new OffHeapResources() { + @Override + public Set getAllIdentifiers() { + return pools.keySet(); + } + + @Override + public OffHeapResource getOffHeapResource(OffHeapResourceIdentifier identifier) { + return pools.get(identifier); + } + }, DEFAULT_MAPPER); } return (T) (this.storeManagerService); + } else if (serviceConfiguration.getServiceType().equals(IEntityMessenger.class)) { + if (this.entityMessenger == null) { + this.entityMessenger = mock(IEntityMessenger.class); + } + return (T) this.entityMessenger; + } else if(serviceConfiguration instanceof ConsumerManagementRegistryConfiguration) { + return null; + } else if(serviceConfiguration instanceof ActiveEntityMonitoringServiceConfiguration) { + return null; } throw new UnsupportedOperationException("Registry.getService does not support " + serviceConfiguration.getClass().getName()); @@ -2783,8 +3307,30 @@ public long available() { return this.capacity - this.used; } + @Override + public long capacity() { + return capacity; + } + private long getUsed() { return used; } } + + private static class InvalidMessage extends EhcacheEntityMessage { + @Override + public void setId(long id) { + throw new UnsupportedOperationException("TODO Implement me!"); + } + + @Override + public long getId() { + throw new UnsupportedOperationException("TODO Implement me!"); + } + + @Override + public UUID getClientId() { + throw new UnsupportedOperationException("TODO Implement me!"); + } + } } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java index ee64e46700..acdfbc67cc 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java @@ -21,12 +21,21 @@ import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ClusteredEhcacheIdentity; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; +import org.ehcache.clustered.common.internal.messages.LifecycleMessage; import org.ehcache.clustered.server.state.EhcacheStateService; import org.hamcrest.Matchers; +import org.junit.Before; import org.junit.Test; +import org.terracotta.entity.BasicServiceConfiguration; +import org.terracotta.entity.IEntityMessenger; import org.terracotta.entity.ServiceConfiguration; import org.terracotta.entity.ServiceRegistry; +import org.terracotta.management.service.monitoring.ConsumerManagementRegistryConfiguration; +import org.terracotta.management.service.monitoring.PassiveEntityMonitoringServiceConfiguration; +import org.terracotta.monitoring.IMonitoringProducer; import org.terracotta.offheapresource.OffHeapResource; import org.terracotta.offheapresource.OffHeapResourceIdentifier; import org.terracotta.offheapresource.OffHeapResources; @@ -41,21 +50,30 @@ import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; +import static org.mockito.Mockito.mock; public class EhcachePassiveEntityTest { private static final byte[] ENTITY_ID = ClusteredEhcacheIdentity.serialize(UUID.randomUUID()); private static final LifeCycleMessageFactory MESSAGE_FACTORY = new LifeCycleMessageFactory(); + private static final UUID CLIENT_ID = UUID.randomUUID(); + private static final KeySegmentMapper DEFAULT_MAPPER = new KeySegmentMapper(16); + + @Before + public void setClientId() { + MESSAGE_FACTORY.setClientId(CLIENT_ID); + } @Test public void testConfigTooShort() { try { - new EhcachePassiveEntity(null, new byte[ENTITY_ID.length - 1]); + new EhcachePassiveEntity(null, new byte[ENTITY_ID.length - 1], DEFAULT_MAPPER); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { //expected @@ -65,7 +83,7 @@ public void testConfigTooShort() { @Test public void testConfigTooLong() { try { - new EhcachePassiveEntity(null, new byte[ENTITY_ID.length + 1]); + new EhcachePassiveEntity(null, new byte[ENTITY_ID.length + 1], DEFAULT_MAPPER); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { //expected @@ -75,7 +93,7 @@ public void testConfigTooLong() { @Test public void testConfigNull() { try { - new EhcachePassiveEntity(null, null); + new EhcachePassiveEntity(null, null, DEFAULT_MAPPER); fail("Expected NullPointerException"); } catch (NullPointerException e) { //expected @@ -92,7 +110,7 @@ public void testConfigure() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID); + final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() .defaultResource("defaultServerResource") @@ -117,7 +135,7 @@ public void testConfigureAfterConfigure() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID); + final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() .defaultResource("defaultServerResource") @@ -125,11 +143,16 @@ public void testConfigureAfterConfigure() throws Exception { .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build())); - passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() - .defaultResource("defaultServerResource") - .sharedPool("primary-new", "serverResource1", 4, MemoryUnit.MEGABYTES) - .sharedPool("secondary-new", "serverResource2", 8, MemoryUnit.MEGABYTES) - .build())); + try { + passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() + .defaultResource("defaultServerResource") + .sharedPool("primary-new", "serverResource1", 4, MemoryUnit.MEGABYTES) + .sharedPool("secondary-new", "serverResource2", 8, MemoryUnit.MEGABYTES) + .build())); + fail("invocation should have triggered an exception"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), containsString("operation failed")); + } assertThat(registry.getStoreManagerService() .getSharedResourcePoolIds(), containsInAnyOrder("primary", "secondary")); @@ -153,13 +176,18 @@ public void testConfigureMissingPoolResource() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("defaultServerResource", 64, MemoryUnit.MEGABYTES); - final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID); + final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); - passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() - .defaultResource("defaultServerResource") - .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) - .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) // missing on 'server' - .build())); + try { + passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() + .defaultResource("defaultServerResource") + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) // missing on 'server' + .build())); + fail("invocation should have triggered an exception"); + } catch (Exception e) { + assertThat(e.getMessage(), containsString("operation failed")); + } assertThat(registry.getStoreManagerService().getSharedResourcePoolIds(), is(Matchers.empty())); @@ -179,13 +207,18 @@ public void testConfigureMissingDefaultResource() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID); + final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); - passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() - .defaultResource("defaultServerResource") - .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) - .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) - .build())); + try { + passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() + .defaultResource("defaultServerResource") + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) + .build())); + fail("invocation should have triggered an exception"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), containsString("operation failed")); + } assertThat(registry.getStoreManagerService().getSharedResourcePoolIds(), is(Matchers.empty())); @@ -203,14 +236,19 @@ public void testConfigureLargeSharedPool() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID); + final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); - passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() - .defaultResource("defaultServerResource") - .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) - .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) - .sharedPool("tooBig", "serverResource2", 64, MemoryUnit.MEGABYTES) - .build())); + try { + passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() + .defaultResource("defaultServerResource") + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) + .sharedPool("tooBig", "serverResource2", 64, MemoryUnit.MEGABYTES) + .build())); + fail("invocation should have triggered an exception"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), containsString("operation failed")); + } final Set poolIds = registry.getStoreManagerService().getSharedResourcePoolIds(); assertThat(poolIds, is(Matchers.empty())); @@ -228,17 +266,18 @@ public void testCreateDedicatedServerStore() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID); + final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build())); - - passiveEntity.invoke(MESSAGE_FACTORY.createServerStore("cacheAlias", + EhcacheEntityMessage createServerStore = MESSAGE_FACTORY.createServerStore("cacheAlias", new ServerStoreConfigBuilder() .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) - .build())); + .build()); + passiveEntity.invoke(createServerStore); + passiveEntity.invoke(new PassiveReplicationMessage.CreateServerStoreReplicationMessage((LifecycleMessage.CreateServerStore)createServerStore)); assertThat(registry.getStoreManagerService() .getSharedResourcePoolIds(), containsInAnyOrder("primary", "secondary")); @@ -264,7 +303,7 @@ public void testCreateSharedServerStore() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID); + final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); passiveEntity.invoke( MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() @@ -272,11 +311,12 @@ public void testCreateSharedServerStore() throws Exception { .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build())); - passiveEntity.invoke( - MESSAGE_FACTORY.createServerStore("cacheAlias", - new ServerStoreConfigBuilder() - .shared("primary") - .build())); + EhcacheEntityMessage createServerStore = MESSAGE_FACTORY.createServerStore("cacheAlias", + new ServerStoreConfigBuilder() + .shared("primary") + .build()); + passiveEntity.invoke(createServerStore); + passiveEntity.invoke(new PassiveReplicationMessage.CreateServerStoreReplicationMessage((LifecycleMessage.CreateServerStore)createServerStore)); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("cacheAlias")); assertThat(registry.getStoreManagerService() @@ -298,7 +338,7 @@ public void testDestroyServerStore() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID); + final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); passiveEntity.invoke( MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() @@ -306,11 +346,12 @@ public void testDestroyServerStore() throws Exception { .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build())); - passiveEntity.invoke( - MESSAGE_FACTORY.createServerStore("dedicatedCache", - new ServerStoreConfigBuilder() - .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) - .build())); + EhcacheEntityMessage createServerStore = MESSAGE_FACTORY.createServerStore("dedicatedCache", + new ServerStoreConfigBuilder() + .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) + .build()); + passiveEntity.invoke(createServerStore); + passiveEntity.invoke(new PassiveReplicationMessage.CreateServerStoreReplicationMessage((LifecycleMessage.CreateServerStore) createServerStore)); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache")); assertThat(registry.getStoreManagerService().getDedicatedResourcePoolIds(), containsInAnyOrder("dedicatedCache")); @@ -320,24 +361,29 @@ public void testDestroyServerStore() throws Exception { assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache")); assertThat(registry.getStoreManagerService().getDedicatedResourcePoolIds(), containsInAnyOrder("dedicatedCache")); - passiveEntity.invoke( - MESSAGE_FACTORY.createServerStore("sharedCache", - new ServerStoreConfigBuilder() - .shared("secondary") - .build())); + EhcacheEntityMessage sharedServerStore = MESSAGE_FACTORY.createServerStore("sharedCache", + new ServerStoreConfigBuilder() + .shared("secondary") + .build()); + passiveEntity.invoke(sharedServerStore); + passiveEntity.invoke(new PassiveReplicationMessage.CreateServerStoreReplicationMessage((LifecycleMessage.CreateServerStore) sharedServerStore)); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache", "sharedCache")); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache", "sharedCache")); assertThat(registry.getStoreManagerService().getDedicatedResourcePoolIds(), containsInAnyOrder("dedicatedCache")); - passiveEntity.invoke(MESSAGE_FACTORY.destroyServerStore("sharedCache")); + EhcacheEntityMessage destroySharedCache = MESSAGE_FACTORY.destroyServerStore("sharedCache"); + passiveEntity.invoke(destroySharedCache); + passiveEntity.invoke(new PassiveReplicationMessage.DestroyServerStoreReplicationMessage((LifecycleMessage.DestroyServerStore) destroySharedCache)); assertThat(registry.getResource("serverResource1").getUsed(), is(MemoryUnit.MEGABYTES.toBytes(4L + 4L))); assertThat(registry.getResource("serverResource2").getUsed(), is(MemoryUnit.MEGABYTES.toBytes(8L))); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache")); - passiveEntity.invoke(MESSAGE_FACTORY.destroyServerStore("dedicatedCache")); + EhcacheEntityMessage destroyDedicatedCache = MESSAGE_FACTORY.destroyServerStore("dedicatedCache"); + passiveEntity.invoke(destroyDedicatedCache); + passiveEntity.invoke(new PassiveReplicationMessage.DestroyServerStoreReplicationMessage((LifecycleMessage.DestroyServerStore) destroyDedicatedCache)); assertThat(registry.getStoreManagerService().getStores(), is(Matchers.empty())); assertThat(registry.getStoreManagerService().getDedicatedResourcePoolIds(), is(Matchers.empty())); @@ -358,7 +404,7 @@ public void testSharedPoolCacheNameCollision() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID); + final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); passiveEntity.invoke( MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() @@ -367,31 +413,34 @@ public void testSharedPoolCacheNameCollision() throws Exception { .build())); assertThat(registry.getStoreManagerService().getStores(), is(Matchers.empty())); - passiveEntity.invoke( - MESSAGE_FACTORY.createServerStore("dedicatedCache", - new ServerStoreConfigBuilder() - .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) - .build())); + EhcacheEntityMessage createServerStore = MESSAGE_FACTORY.createServerStore("dedicatedCache", + new ServerStoreConfigBuilder() + .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) + .build()); + passiveEntity.invoke(createServerStore); + passiveEntity.invoke(new PassiveReplicationMessage.CreateServerStoreReplicationMessage((LifecycleMessage.CreateServerStore) createServerStore)); assertThat(registry.getStoreManagerService() .getSharedResourcePoolIds(), containsInAnyOrder("primary", "secondary")); assertThat(registry.getStoreManagerService().getDedicatedResourcePoolIds(), containsInAnyOrder("dedicatedCache")); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache")); - passiveEntity.invoke( - MESSAGE_FACTORY.createServerStore("sharedCache", - new ServerStoreConfigBuilder() - .shared("primary") - .build())); + EhcacheEntityMessage sharedServerStore = MESSAGE_FACTORY.createServerStore("sharedCache", + new ServerStoreConfigBuilder() + .shared("primary") + .build()); + passiveEntity.invoke(sharedServerStore); + passiveEntity.invoke(new PassiveReplicationMessage.CreateServerStoreReplicationMessage((LifecycleMessage.CreateServerStore) sharedServerStore)); assertThat(registry.getStoreManagerService() .getSharedResourcePoolIds(), containsInAnyOrder("primary", "secondary")); assertThat(registry.getStoreManagerService().getDedicatedResourcePoolIds(), containsInAnyOrder("dedicatedCache")); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache", "sharedCache")); - passiveEntity.invoke( - MESSAGE_FACTORY.createServerStore("primary", - new ServerStoreConfigBuilder() - .dedicated("serverResource2", 4, MemoryUnit.MEGABYTES) - .build())); + EhcacheEntityMessage createServerStore2 = MESSAGE_FACTORY.createServerStore("primary", + new ServerStoreConfigBuilder() + .dedicated("serverResource2", 4, MemoryUnit.MEGABYTES) + .build()); + passiveEntity.invoke(createServerStore2); + passiveEntity.invoke(new PassiveReplicationMessage.CreateServerStoreReplicationMessage((LifecycleMessage.CreateServerStore) createServerStore2)); assertThat(registry.getStoreManagerService() .getSharedResourcePoolIds(), containsInAnyOrder("primary", "secondary")); assertThat(registry.getStoreManagerService() @@ -427,29 +476,30 @@ public void testDestroyWithStores() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID); + final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); - passiveEntity.invoke( - MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() - .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) - .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) - .build())); + passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) + .build())); - passiveEntity.invoke( - MESSAGE_FACTORY.createServerStore("dedicatedCache", - new ServerStoreConfigBuilder() - .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) - .build())); + EhcacheEntityMessage createServerStore = MESSAGE_FACTORY.createServerStore("dedicatedCache", + new ServerStoreConfigBuilder() + .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) + .build()); + passiveEntity.invoke(createServerStore); + passiveEntity.invoke(new PassiveReplicationMessage.CreateServerStoreReplicationMessage((LifecycleMessage.CreateServerStore) createServerStore)); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache")); assertThat(registry.getResource("serverResource1").getUsed(), is(MemoryUnit.MEGABYTES.toBytes(4L + 4L))); assertThat(registry.getResource("serverResource2").getUsed(), is(MemoryUnit.MEGABYTES.toBytes(8L))); - passiveEntity.invoke( - MESSAGE_FACTORY.createServerStore("sharedCache", - new ServerStoreConfigBuilder() - .shared("secondary") - .build())); + EhcacheEntityMessage sharedServerStore = MESSAGE_FACTORY.createServerStore("sharedCache", + new ServerStoreConfigBuilder() + .shared("secondary") + .build()); + passiveEntity.invoke(sharedServerStore); + passiveEntity.invoke(new PassiveReplicationMessage.CreateServerStoreReplicationMessage((LifecycleMessage.CreateServerStore) sharedServerStore)); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache", "sharedCache")); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache", "sharedCache")); @@ -467,6 +517,21 @@ public void testDestroyWithStores() throws Exception { assertThat(registry.getResource("serverResource2").getUsed(), is(MemoryUnit.MEGABYTES.toBytes(0L))); } + @Test + public void testInvalidMessageThrowsError() throws Exception { + OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(4, MemoryUnit.MEGABYTES); + registry.addResource("serverResource", 4, MemoryUnit.MEGABYTES); + + final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); + + try { + passiveEntity.invoke(new InvalidMessage()); + fail("Invalid message should result in AssertionError"); + } catch (AssertionError e) { + assertThat(e.getMessage(), containsString("Unsupported")); + } + } + private static ServerSideConfiguration.Pool pool(String resourceName, int poolSize, MemoryUnit unit) { return new ServerSideConfiguration.Pool(unit.toBytes(poolSize), resourceName); } @@ -627,21 +692,29 @@ private static Set getIdentifiers(Set pools) @SuppressWarnings("unchecked") @Override public T getService(ServiceConfiguration serviceConfiguration) { - if (serviceConfiguration instanceof OffHeapResourceIdentifier) { - final OffHeapResourceIdentifier resourceIdentifier = (OffHeapResourceIdentifier) serviceConfiguration; - return (T) this.pools.get(resourceIdentifier); - } else if(serviceConfiguration.getServiceType().equals(OffHeapResources.class)) { - return (T) new OffHeapResources() { - @Override - public Set getAllIdentifiers() { - return getIdentifiers(pools.keySet()); - } - }; - } else if (serviceConfiguration.getServiceType().equals(EhcacheStateService.class)) { + if (serviceConfiguration.getServiceType().equals(EhcacheStateService.class)) { if (storeManagerService == null) { - this.storeManagerService = new EhcacheStateServiceImpl(this, getIdentifiers(pools.keySet())); + this.storeManagerService = new EhcacheStateServiceImpl(new OffHeapResources() { + @Override + public Set getAllIdentifiers() { + return pools.keySet(); + } + + @Override + public OffHeapResource getOffHeapResource(OffHeapResourceIdentifier identifier) { + return pools.get(identifier); + } + }, DEFAULT_MAPPER); } return (T) (this.storeManagerService); + } else if (serviceConfiguration.getServiceType().equals(IEntityMessenger.class)) { + return (T) mock(IEntityMessenger.class); + } else if(serviceConfiguration instanceof ConsumerManagementRegistryConfiguration) { + return null; + } else if(serviceConfiguration instanceof PassiveEntityMonitoringServiceConfiguration) { + return null; + } else if(serviceConfiguration instanceof BasicServiceConfiguration && serviceConfiguration.getServiceType() == IMonitoringProducer.class) { + return null; } throw new UnsupportedOperationException("Registry.getService does not support " + serviceConfiguration.getClass().getName()); @@ -686,8 +759,30 @@ public long available() { return this.capacity - this.used; } + @Override + public long capacity() { + return capacity; + } + private long getUsed() { return used; } } + + private static class InvalidMessage extends EhcacheEntityMessage { + @Override + public void setId(long id) { + throw new UnsupportedOperationException("TODO Implement me!"); + } + + @Override + public long getId() { + throw new UnsupportedOperationException("TODO Implement me!"); + } + + @Override + public UUID getClientId() { + throw new UnsupportedOperationException("TODO Implement me!"); + } + } } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/ServerStoreCompatibilityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/ServerStoreCompatibilityTest.java index 24049b0f5d..2f7e972379 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/ServerStoreCompatibilityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/ServerStoreCompatibilityTest.java @@ -24,9 +24,6 @@ import org.ehcache.clustered.common.internal.exceptions.InvalidServerStoreConfigurationException; import org.ehcache.clustered.common.PoolAllocation.Unknown; import org.junit.Test; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodecTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodecTest.java new file mode 100644 index 0000000000..25f0f9798b --- /dev/null +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodecTest.java @@ -0,0 +1,121 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.internal.messages; + +import org.ehcache.clustered.common.internal.messages.EhcacheCodec; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; +import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; +import org.ehcache.clustered.common.internal.messages.LifecycleMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; + +import java.nio.ByteBuffer; +import java.util.UUID; + +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.MockitoAnnotations.initMocks; + +/** + * EhcacheServerCodecTest + */ +public class EhcacheServerCodecTest { + + private static final UUID CLIENT_ID = UUID.randomUUID(); + + @Mock + private EhcacheCodec clientCodec; + + @Mock + private PassiveReplicationMessageCodec replicationCodec; + + private EhcacheServerCodec serverCodec; + + @Before + public void setUp() { + initMocks(this); + serverCodec = new EhcacheServerCodec(clientCodec, replicationCodec); + } + + @Test + public void testDelegatesToEhcacheCodeForEncoding() throws Exception { + LifecycleMessage lifecycleMessage = new LifecycleMessage() { + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.APPEND; + } + }; + serverCodec.encodeMessage(lifecycleMessage); + + verify(clientCodec).encodeMessage(any(EhcacheEntityMessage.class)); + verifyZeroInteractions(replicationCodec); + } + + @Test + public void testDelegatesToPassiveReplicationCodeForEncoding() throws Exception { + ClientIDTrackerMessage message = new ClientIDTrackerMessage(CLIENT_ID); + serverCodec.encodeMessage(message); + + verify(replicationCodec).encode(message); + verifyZeroInteractions(clientCodec); + } + + @Test + public void decodeLifeCycleMessages() throws Exception { + for (EhcacheMessageType messageType : EhcacheMessageType.LIFECYCLE_MESSAGES) { + ByteBuffer encodedBuffer = EhcacheCodec.OP_CODE_DECODER.encoder().enm("opCode", messageType).encode(); + serverCodec.decodeMessage(encodedBuffer.array()); + } + verify(clientCodec, times(EhcacheMessageType.LIFECYCLE_MESSAGES.size())).decodeMessage(any(ByteBuffer.class), any(EhcacheMessageType.class)); + verifyZeroInteractions(replicationCodec); + } + + @Test + public void decodeServerStoreMessages() throws Exception { + for (EhcacheMessageType messageType : EhcacheMessageType.STORE_OPERATION_MESSAGES) { + ByteBuffer encodedBuffer = EhcacheCodec.OP_CODE_DECODER.encoder().enm("opCode", messageType).encode(); + serverCodec.decodeMessage(encodedBuffer.array()); + } + verify(clientCodec, times(EhcacheMessageType.STORE_OPERATION_MESSAGES.size())).decodeMessage(any(ByteBuffer.class), any(EhcacheMessageType.class)); + verifyZeroInteractions(replicationCodec); + } + + @Test + public void decodeStateRepoMessages() throws Exception { + for (EhcacheMessageType messageType : EhcacheMessageType.STATE_REPO_OPERATION_MESSAGES) { + ByteBuffer encodedBuffer = EhcacheCodec.OP_CODE_DECODER.encoder().enm("opCode", messageType).encode(); + serverCodec.decodeMessage(encodedBuffer.array()); + } + verify(clientCodec, times(EhcacheMessageType.STATE_REPO_OPERATION_MESSAGES.size())).decodeMessage(any(ByteBuffer.class), any(EhcacheMessageType.class)); + verifyZeroInteractions(replicationCodec); + } + + @Test + public void decodeClientIDTrackerMessages() throws Exception { + for (EhcacheMessageType messageType : EhcacheMessageType.PASSIVE_REPLICATION_MESSAGES) { + ByteBuffer encodedBuffer = EhcacheCodec.OP_CODE_DECODER.encoder().enm("opCode", messageType).encode(); + serverCodec.decodeMessage(encodedBuffer.array()); + } + verify(replicationCodec, times(EhcacheMessageType.PASSIVE_REPLICATION_MESSAGES.size())).decode(any(EhcacheMessageType.class), any(ByteBuffer.class)); + verifyZeroInteractions(clientCodec); + } +} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java new file mode 100644 index 0000000000..9073fc23d8 --- /dev/null +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java @@ -0,0 +1,110 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.internal.messages; + +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.PoolAllocation; +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.messages.CommonConfigCodec; +import org.ehcache.clustered.common.internal.store.Chain; +import org.junit.Test; + +import java.util.HashMap; +import java.util.Map; + +import static org.ehcache.clustered.common.internal.store.Util.chainsEqual; +import static org.ehcache.clustered.common.internal.store.Util.createPayload; +import static org.ehcache.clustered.common.internal.store.Util.getChain; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.*; + +public class EhcacheSyncMessageCodecTest { + + @Test + public void testStateSyncMessageEncodeDecode() throws Exception { + Map sharedPools = new HashMap<>(); + ServerSideConfiguration.Pool pool1 = new ServerSideConfiguration.Pool(1, "foo1"); + ServerSideConfiguration.Pool pool2 = new ServerSideConfiguration.Pool(2, "foo2"); + sharedPools.put("shared-pool-1", pool1); + sharedPools.put("shared-pool-2", pool2); + ServerSideConfiguration serverSideConfig = new ServerSideConfiguration("default-pool", sharedPools); + + PoolAllocation poolAllocation1 = new PoolAllocation.Dedicated("dedicated", 4); + ServerStoreConfiguration serverStoreConfiguration1 = new ServerStoreConfiguration(poolAllocation1, + "storedKeyType1", "storedValueType1", null, null, + "keySerializerType1", "valueSerializerType1", Consistency.STRONG); + + PoolAllocation poolAllocation2 = new PoolAllocation.Shared("shared"); + ServerStoreConfiguration serverStoreConfiguration2 = new ServerStoreConfiguration(poolAllocation2, + "storedKeyType2", "storedValueType2", null, null, + "keySerializerType2", "valueSerializerType2", Consistency.EVENTUAL); + + Map storeConfigs = new HashMap<>(); + storeConfigs.put("cache1", serverStoreConfiguration1); + storeConfigs.put("cache2", serverStoreConfiguration2); + + EhcacheStateSyncMessage message = new EhcacheStateSyncMessage(serverSideConfig, storeConfigs); + EhcacheSyncMessageCodec codec = new EhcacheSyncMessageCodec(new CommonConfigCodec()); + EhcacheStateSyncMessage decodedMessage = (EhcacheStateSyncMessage) codec.decode(0, codec.encode(0, message)); + + assertThat(decodedMessage.getConfiguration().getDefaultServerResource(), is("default-pool")); + assertThat(decodedMessage.getConfiguration().getResourcePools(), is(sharedPools)); + assertThat(decodedMessage.getStoreConfigs().keySet(), containsInAnyOrder("cache1", "cache2")); + + ServerStoreConfiguration serverStoreConfiguration = decodedMessage.getStoreConfigs().get("cache1"); + assertThat(serverStoreConfiguration.getPoolAllocation(), instanceOf(PoolAllocation.Dedicated.class)); + PoolAllocation.Dedicated dedicatedPool = (PoolAllocation.Dedicated) serverStoreConfiguration.getPoolAllocation(); + assertThat(dedicatedPool.getResourceName(), is("dedicated")); + assertThat(dedicatedPool.getSize(), is(4L)); + assertThat(serverStoreConfiguration.getStoredKeyType(), is("storedKeyType1")); + assertThat(serverStoreConfiguration.getStoredValueType(), is("storedValueType1")); + assertThat(serverStoreConfiguration.getKeySerializerType(), is("keySerializerType1")); + assertThat(serverStoreConfiguration.getValueSerializerType(), is("valueSerializerType1")); + assertThat(serverStoreConfiguration.getConsistency(), is(Consistency.STRONG)); + + serverStoreConfiguration = decodedMessage.getStoreConfigs().get("cache2"); + assertThat(serverStoreConfiguration.getPoolAllocation(), instanceOf(PoolAllocation.Shared.class)); + PoolAllocation.Shared sharedPool = (PoolAllocation.Shared) serverStoreConfiguration.getPoolAllocation(); + assertThat(sharedPool.getResourcePoolName(), is("shared")); + assertThat(serverStoreConfiguration.getStoredKeyType(), is("storedKeyType2")); + assertThat(serverStoreConfiguration.getStoredValueType(), is("storedValueType2")); + assertThat(serverStoreConfiguration.getKeySerializerType(), is("keySerializerType2")); + assertThat(serverStoreConfiguration.getValueSerializerType(), is("valueSerializerType2")); + assertThat(serverStoreConfiguration.getConsistency(), is(Consistency.EVENTUAL)); + } + + @Test + public void testDataSyncMessageEncodeDecode() throws Exception { + EhcacheSyncMessageCodec codec = new EhcacheSyncMessageCodec(new CommonConfigCodec()); + Map chainMap = new HashMap<>(); + Chain chain = getChain(true, createPayload(10L), createPayload(100L), createPayload(1000L)); + chainMap.put(1L, chain); + chainMap.put(2L, chain); + chainMap.put(3L, chain); + EhcacheDataSyncMessage message = new EhcacheDataSyncMessage("foo", chainMap); + byte[] encodedMessage = codec.encode(0, message); + EhcacheDataSyncMessage decoded = (EhcacheDataSyncMessage) codec.decode(0, encodedMessage); + assertThat(decoded.getCacheId(), is(message.getCacheId())); + Map decodedChainMap = decoded.getChainMap(); + assertThat(decodedChainMap.size(), is(3)); + assertThat(chainsEqual(decodedChainMap.get(1L), chain), is(true)); + assertThat(chainsEqual(decodedChainMap.get(2L), chain), is(true)); + assertThat(chainsEqual(decodedChainMap.get(3L), chain), is(true)); + } +} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodecTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodecTest.java new file mode 100644 index 0000000000..d846eb8882 --- /dev/null +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodecTest.java @@ -0,0 +1,170 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.internal.messages; + +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.PoolAllocation; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.messages.CommonConfigCodec; +import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ChainReplicationMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.InvalidationCompleteMessage; +import org.junit.Test; + +import java.util.UUID; + +import static java.nio.ByteBuffer.wrap; +import static org.ehcache.clustered.common.internal.store.Util.chainsEqual; +import static org.ehcache.clustered.common.internal.store.Util.createPayload; +import static org.ehcache.clustered.common.internal.store.Util.getChain; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.assertThat; +import static org.junit.Assert.assertTrue; + + +public class PassiveReplicationMessageCodecTest { + + private static final long MESSAGE_ID = 42L; + private PassiveReplicationMessageCodec codec = new PassiveReplicationMessageCodec(new CommonConfigCodec()); + + @Test + public void testClientIDTrackerMessageCodec() { + ClientIDTrackerMessage clientIDTrackerMessage = new ClientIDTrackerMessage(UUID.randomUUID()); + + byte[] encoded = codec.encode(clientIDTrackerMessage); + PassiveReplicationMessage decodedMsg = (PassiveReplicationMessage) codec.decode(EhcacheMessageType.CLIENT_ID_TRACK_OP, wrap(encoded)); + + assertThat(decodedMsg.getClientId(), is(clientIDTrackerMessage.getClientId())); + + } + + @Test + public void testChainReplicationMessageCodec() { + Chain chain = getChain(false, createPayload(2L), createPayload(20L)); + ChainReplicationMessage chainReplicationMessage = new ChainReplicationMessage("test", 2L, chain, 200L, UUID.randomUUID()); + + byte[] encoded = codec.encode(chainReplicationMessage); + ChainReplicationMessage decodedMsg = (ChainReplicationMessage) codec.decode(EhcacheMessageType.CHAIN_REPLICATION_OP, wrap(encoded)); + + assertThat(decodedMsg.getCacheId(), is(chainReplicationMessage.getCacheId())); + assertThat(decodedMsg.getClientId(), is(chainReplicationMessage.getClientId())); + assertThat(decodedMsg.getId(), is(chainReplicationMessage.getId())); + assertThat(decodedMsg.getKey(), is(chainReplicationMessage.getKey())); + assertTrue(chainsEqual(decodedMsg.getChain(), chainReplicationMessage.getChain())); + + } + + @Test + public void testClearInvalidationCompleteMessage() { + ClearInvalidationCompleteMessage clearInvalidationCompleteMessage = new ClearInvalidationCompleteMessage("test"); + + byte[] encoded = codec.encode(clearInvalidationCompleteMessage); + ClearInvalidationCompleteMessage decoded = (ClearInvalidationCompleteMessage) codec.decode(EhcacheMessageType.CLEAR_INVALIDATION_COMPLETE, wrap(encoded)); + + assertThat(decoded.getMessageType(), is(EhcacheMessageType.CLEAR_INVALIDATION_COMPLETE)); + assertThat(decoded.getCacheId(), is(clearInvalidationCompleteMessage.getCacheId())); + + } + + @Test + public void testInvalidationCompleteMessage() { + + InvalidationCompleteMessage invalidationCompleteMessage = new InvalidationCompleteMessage("test", 20L); + + byte[] encoded = codec.encode(invalidationCompleteMessage); + InvalidationCompleteMessage decoded = (InvalidationCompleteMessage) codec.decode(EhcacheMessageType.INVALIDATION_COMPLETE, wrap(encoded)); + + assertThat(decoded.getMessageType(), is(EhcacheMessageType.INVALIDATION_COMPLETE)); + assertThat(decoded.getCacheId(), equalTo(invalidationCompleteMessage.getCacheId())); + assertThat(decoded.getKey(), equalTo(invalidationCompleteMessage.getKey())); + } + + @Test + public void testCreateServerStoreReplicationDedicated() throws Exception { + UUID clientId = UUID.randomUUID(); + PoolAllocation.Dedicated dedicated = new PoolAllocation.Dedicated("dedicate", 420000L); + ServerStoreConfiguration configuration = new ServerStoreConfiguration(dedicated, "java.lang.Long", "java.lang.String", null, null, + "org.ehcache.impl.serialization.LongSerializer", "org.ehcache.impl.serialization.StringSerializer", + Consistency.STRONG); + PassiveReplicationMessage.CreateServerStoreReplicationMessage message = new PassiveReplicationMessage.CreateServerStoreReplicationMessage(MESSAGE_ID, clientId, "storeId", configuration); + + byte[] encoded = codec.encode(message); + PassiveReplicationMessage.CreateServerStoreReplicationMessage decodedMessage = (PassiveReplicationMessage.CreateServerStoreReplicationMessage) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.CREATE_SERVER_STORE_REPLICATION)); + assertThat(decodedMessage.getId(), is(MESSAGE_ID)); + assertThat(decodedMessage.getClientId(), is(clientId)); + assertThat(decodedMessage.getStoreName(), is("storeId")); + assertThat(decodedMessage.getStoreConfiguration().getStoredKeyType(), is(configuration.getStoredKeyType())); + assertThat(decodedMessage.getStoreConfiguration().getStoredValueType(), is(configuration.getStoredValueType())); + assertThat(decodedMessage.getStoreConfiguration().getActualKeyType(), nullValue()); + assertThat(decodedMessage.getStoreConfiguration().getActualValueType(), nullValue()); + assertThat(decodedMessage.getStoreConfiguration().getConsistency(), is(configuration.getConsistency())); + assertThat(decodedMessage.getStoreConfiguration().getKeySerializerType(), is(configuration.getKeySerializerType())); + assertThat(decodedMessage.getStoreConfiguration().getValueSerializerType(), is(configuration.getValueSerializerType())); + PoolAllocation.Dedicated decodedPoolAllocation = (PoolAllocation.Dedicated) decodedMessage.getStoreConfiguration().getPoolAllocation(); + assertThat(decodedPoolAllocation.getResourceName(), is(dedicated.getResourceName())); + assertThat(decodedPoolAllocation.getSize(), is(dedicated.getSize())); + } + + @Test + public void testCreateServerStoreReplicationShared() throws Exception { + UUID clientId = UUID.randomUUID(); + PoolAllocation.Shared shared = new PoolAllocation.Shared("shared"); + ServerStoreConfiguration configuration = new ServerStoreConfiguration(shared, "java.lang.Long", "java.lang.String", null, null, + "org.ehcache.impl.serialization.LongSerializer", "org.ehcache.impl.serialization.StringSerializer", + Consistency.STRONG); + PassiveReplicationMessage.CreateServerStoreReplicationMessage message = new PassiveReplicationMessage.CreateServerStoreReplicationMessage(MESSAGE_ID, clientId, "storeId", configuration); + + byte[] encoded = codec.encode(message); + PassiveReplicationMessage.CreateServerStoreReplicationMessage decodedMessage = (PassiveReplicationMessage.CreateServerStoreReplicationMessage) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.CREATE_SERVER_STORE_REPLICATION)); + assertThat(decodedMessage.getId(), is(MESSAGE_ID)); + assertThat(decodedMessage.getClientId(), is(clientId)); + assertThat(decodedMessage.getStoreName(), is("storeId")); + assertThat(decodedMessage.getStoreConfiguration().getStoredKeyType(), is(configuration.getStoredKeyType())); + assertThat(decodedMessage.getStoreConfiguration().getStoredValueType(), is(configuration.getStoredValueType())); + assertThat(decodedMessage.getStoreConfiguration().getActualKeyType(), nullValue()); + assertThat(decodedMessage.getStoreConfiguration().getActualValueType(), nullValue()); + assertThat(decodedMessage.getStoreConfiguration().getConsistency(), is(configuration.getConsistency())); + assertThat(decodedMessage.getStoreConfiguration().getKeySerializerType(), is(configuration.getKeySerializerType())); + assertThat(decodedMessage.getStoreConfiguration().getValueSerializerType(), is(configuration.getValueSerializerType())); + PoolAllocation.Shared decodedPoolAllocation = (PoolAllocation.Shared) decodedMessage.getStoreConfiguration().getPoolAllocation(); + assertThat(decodedPoolAllocation.getResourcePoolName(), is(shared.getResourcePoolName())); + } + + @Test + public void testDestroyServerStoreReplication() throws Exception { + UUID clientId = UUID.randomUUID(); + PassiveReplicationMessage.DestroyServerStoreReplicationMessage message = new PassiveReplicationMessage.DestroyServerStoreReplicationMessage(MESSAGE_ID, clientId, "storeId"); + + byte[] encoded = codec.encode(message); + PassiveReplicationMessage.DestroyServerStoreReplicationMessage decodedMessage = (PassiveReplicationMessage.DestroyServerStoreReplicationMessage) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.DESTROY_SERVER_STORE_REPLICATION)); + assertThat(decodedMessage.getId(), is(MESSAGE_ID)); + assertThat(decodedMessage.getClientId(), is(clientId)); + assertThat(decodedMessage.getStoreName(), is("storeId")); + } + +} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/ChainMapTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/ChainMapTest.java index cfd588b88e..82fe871e1e 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/ChainMapTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/ChainMapTest.java @@ -16,6 +16,12 @@ package org.ehcache.clustered.server.offheap; import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; import org.ehcache.clustered.common.internal.store.Element; @@ -29,7 +35,9 @@ import org.junit.runners.Parameterized.Parameters; import org.junit.Test; +import org.terracotta.offheapstore.ReadWriteLockedOffHeapClockCache; import org.terracotta.offheapstore.buffersource.OffHeapBufferSource; +import org.terracotta.offheapstore.eviction.EvictionListeningReadWriteLockedOffHeapClockCache; import org.terracotta.offheapstore.paging.UnlimitedPageSource; import org.terracotta.offheapstore.paging.UpfrontAllocatingPageSource; import org.terracotta.offheapstore.storage.portability.StringPortability; @@ -323,6 +331,67 @@ public void testContinualAppendCausingEvictionIsStable() { } } + @Test + public void testPutWhenKeyIsNotNull() { + OffHeapChainMap map = new OffHeapChainMap(new UnlimitedPageSource(new OffHeapBufferSource()), StringPortability.INSTANCE, minPageSize, maxPageSize, steal); + map.append("key", buffer(3)); + map.put("key", chain(buffer(1), buffer(2))); + + assertThat(map.get("key"), contains(element(1), element(2))); + } + + @Test + public void testPutWhenKeyIsNull() { + OffHeapChainMap map = new OffHeapChainMap(new UnlimitedPageSource(new OffHeapBufferSource()), StringPortability.INSTANCE, minPageSize, maxPageSize, steal); + map.put("key", chain(buffer(1), buffer(2))); + + assertThat(map.get("key"), contains(element(1), element(2))); + } + + @Test + public void testActiveChainsThreadSafety() throws ExecutionException, InterruptedException { + UnlimitedPageSource source = new UnlimitedPageSource(new OffHeapBufferSource()); + OffHeapChainStorageEngine chainStorage = new OffHeapChainStorageEngine<>(source, StringPortability.INSTANCE, minPageSize, maxPageSize, steal, steal); + + ReadWriteLockedOffHeapClockCache heads = new EvictionListeningReadWriteLockedOffHeapClockCache<>(callable -> {}, source, chainStorage); + + OffHeapChainMap map = new OffHeapChainMap<>(heads, chainStorage); + + map.put("key", chain(buffer(1), buffer(2))); + + int nThreads = 10; + ExecutorService executorService = Executors.newFixedThreadPool(nThreads); + + List futures = new ArrayList<>(); + + for (int i = 0; i < nThreads ; i++) { + futures.add(executorService.submit(() -> map.get("key"))); + } + + for (Future f : futures) { + f.get(); + } + + assertThat(chainStorage.getActiveChains().size(), is(0)); + + } + + @Test + public void testPutDoesNotLeakWhenMappingIsNotNull() { + UnlimitedPageSource source = new UnlimitedPageSource(new OffHeapBufferSource()); + OffHeapChainStorageEngine chainStorage = new OffHeapChainStorageEngine<>(source, StringPortability.INSTANCE, minPageSize, maxPageSize, steal, steal); + + ReadWriteLockedOffHeapClockCache heads = new EvictionListeningReadWriteLockedOffHeapClockCache<>(callable -> {}, source, chainStorage); + + OffHeapChainMap map = new OffHeapChainMap<>(heads, chainStorage); + + map.put("key", chain(buffer(1))); + map.put("key", chain(buffer(2))); + + assertThat(chainStorage.getActiveChains().size(), is(0)); + + } + private static ByteBuffer buffer(int i) { ByteBuffer buffer = ByteBuffer.allocate(i); while (buffer.hasRemaining()) { diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/OffHeapServerStoreTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/OffHeapServerStoreTest.java index c23fcdab5f..13320570d9 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/OffHeapServerStoreTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/OffHeapServerStoreTest.java @@ -20,6 +20,7 @@ import org.ehcache.clustered.common.internal.store.Chain; import org.ehcache.clustered.common.internal.store.Element; +import org.ehcache.clustered.server.KeySegmentMapper; import org.ehcache.clustered.server.store.ChainBuilder; import org.ehcache.clustered.server.store.ElementBuilder; import org.ehcache.clustered.common.internal.store.ServerStore; @@ -34,7 +35,10 @@ import org.terracotta.offheapstore.paging.UpfrontAllocatingPageSource; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.core.Is.is; +import org.junit.Assert; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyLong; import static org.mockito.Mockito.doThrow; @@ -45,6 +49,8 @@ public class OffHeapServerStoreTest extends ServerStoreTest { + private static final KeySegmentMapper DEFAULT_MAPPER = new KeySegmentMapper(16); + @SuppressWarnings("unchecked") private OffHeapChainMap getOffHeapChainMapMock() { return mock(OffHeapChainMap.class); @@ -52,7 +58,7 @@ private OffHeapChainMap getOffHeapChainMapMock() { @Override public ServerStore newStore() { - return new OffHeapServerStore(new UnlimitedPageSource(new OffHeapBufferSource()), 16); + return new OffHeapServerStore(new UnlimitedPageSource(new OffHeapBufferSource()), DEFAULT_MAPPER); } @Override @@ -103,7 +109,6 @@ public Object answer(InvocationOnMock invocation) throws Throwable { }); when(store.handleOversizeMappingException(anyLong())).thenReturn(true); - ByteBuffer payload = createPayload(1L); store.append(1L, payload); @@ -174,7 +179,7 @@ public void testCrossSegmentShrinking() { long seed = System.nanoTime(); Random random = new Random(seed); try { - OffHeapServerStore store = new OffHeapServerStore(new UpfrontAllocatingPageSource(new OffHeapBufferSource(), MEGABYTES.toBytes(1L), MEGABYTES.toBytes(1)), 16); + OffHeapServerStore store = new OffHeapServerStore(new UpfrontAllocatingPageSource(new OffHeapBufferSource(), MEGABYTES.toBytes(1L), MEGABYTES.toBytes(1)), DEFAULT_MAPPER); ByteBuffer smallValue = ByteBuffer.allocate(1024); for (int i = 0; i < 10000; i++) { @@ -198,4 +203,44 @@ public void testCrossSegmentShrinking() { } } + @Test + public void testServerSideUsageStats() { + + long maxBytes = MEGABYTES.toBytes(1); + OffHeapServerStore store = new OffHeapServerStore(new UpfrontAllocatingPageSource(new OffHeapBufferSource(), maxBytes, MEGABYTES.toBytes(1)), new KeySegmentMapper(16)); + + int oneKb = 1024; + long smallLoopCount = 5; + ByteBuffer smallValue = ByteBuffer.allocate(oneKb); + for (long i = 0; i < smallLoopCount; i++) { + store.getAndAppend(i, smallValue.duplicate()); + } + + Assert.assertThat(store.getAllocatedMemory(),lessThanOrEqualTo(maxBytes)); + Assert.assertThat(store.getAllocatedMemory(),greaterThanOrEqualTo(smallLoopCount * oneKb)); + Assert.assertThat(store.getAllocatedMemory(),greaterThanOrEqualTo(store.getOccupiedMemory())); + + //asserts above already guarantee that occupiedMemory <= maxBytes and that occupiedMemory <= allocatedMemory + Assert.assertThat(store.getOccupiedMemory(),greaterThanOrEqualTo(smallLoopCount * oneKb)); + + Assert.assertThat(store.getSize(), is(smallLoopCount)); + + int multiplier = 100; + long largeLoopCount = 5 + smallLoopCount; + ByteBuffer largeValue = ByteBuffer.allocate(multiplier * oneKb); + for (long i = smallLoopCount; i < largeLoopCount; i++) { + store.getAndAppend(i, largeValue.duplicate()); + } + + Assert.assertThat(store.getAllocatedMemory(),lessThanOrEqualTo(maxBytes)); + Assert.assertThat(store.getAllocatedMemory(),greaterThanOrEqualTo( (smallLoopCount * oneKb) + ( (largeLoopCount - smallLoopCount) * oneKb * multiplier) )); + Assert.assertThat(store.getAllocatedMemory(),greaterThanOrEqualTo(store.getOccupiedMemory())); + + //asserts above already guarantee that occupiedMemory <= maxBytes and that occupiedMemory <= allocatedMemory + Assert.assertThat(store.getOccupiedMemory(),greaterThanOrEqualTo(smallLoopCount * oneKb)); + + Assert.assertThat(store.getSize(), is(smallLoopCount + (largeLoopCount - smallLoopCount))); + + } + } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/repo/ServerStateRepositoryTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/repo/ServerStateRepositoryTest.java index 82d5123166..0ee8442c5b 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/repo/ServerStateRepositoryTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/repo/ServerStateRepositoryTest.java @@ -18,11 +18,13 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; +import org.hamcrest.Matcher; import org.junit.Test; import java.util.AbstractMap; import java.util.Map; import java.util.Set; +import java.util.UUID; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.is; @@ -31,54 +33,59 @@ public class ServerStateRepositoryTest { + private static final UUID CLIENT_ID = UUID.randomUUID(); + @Test public void testInvokeOnNonExistentRepositorySucceeds() throws Exception { ServerStateRepository repository = new ServerStateRepository(); EhcacheEntityResponse.MapValue response = (EhcacheEntityResponse.MapValue) repository.invoke( - new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value1")); + new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value1", CLIENT_ID)); assertThat(response.getValue(), nullValue()); response = (EhcacheEntityResponse.MapValue) repository.invoke( - new StateRepositoryOpMessage.GetMessage("foo", "bar", "key1")); - assertThat(response.getValue(), is((Object)"value1")); + new StateRepositoryOpMessage.GetMessage("foo", "bar", "key1", CLIENT_ID)); + assertThat(response.getValue(), is("value1")); } @Test public void testInvokePutIfAbsent() throws Exception { ServerStateRepository repository = new ServerStateRepository(); EhcacheEntityResponse.MapValue response = (EhcacheEntityResponse.MapValue) repository.invoke( - new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value1")); + new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value1", CLIENT_ID)); assertThat(response.getValue(), nullValue()); response = (EhcacheEntityResponse.MapValue) repository.invoke( - new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value2")); - assertThat(response.getValue(), is((Object)"value1")); + new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value2", CLIENT_ID)); + assertThat(response.getValue(), is("value1")); } @Test public void testInvokeGet() throws Exception { ServerStateRepository repository = new ServerStateRepository(); - repository.invoke(new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value1")); + repository.invoke(new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value1", CLIENT_ID)); EhcacheEntityResponse.MapValue response = (EhcacheEntityResponse.MapValue) repository.invoke( - new StateRepositoryOpMessage.GetMessage("foo", "bar", "key1")); - assertThat(response.getValue(), is((Object)"value1")); + new StateRepositoryOpMessage.GetMessage("foo", "bar", "key1", CLIENT_ID)); + assertThat(response.getValue(), is("value1")); } @Test public void testInvokeEntrySet() throws Exception { ServerStateRepository repository = new ServerStateRepository(); - repository.invoke(new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value1")); - repository.invoke(new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key2", "value2")); - repository.invoke(new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key3", "value3")); + repository.invoke(new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value1", CLIENT_ID)); + repository.invoke(new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key2", "value2", CLIENT_ID)); + repository.invoke(new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key3", "value3", CLIENT_ID)); EhcacheEntityResponse.MapValue response = (EhcacheEntityResponse.MapValue) repository.invoke( - new StateRepositoryOpMessage.EntrySetMessage("foo", "bar")); + new StateRepositoryOpMessage.EntrySetMessage("foo", "bar", CLIENT_ID)); + @SuppressWarnings("unchecked") Set> entrySet = (Set>) response.getValue(); assertThat(entrySet.size(), is(3)); Map.Entry entry1 = new AbstractMap.SimpleEntry("key1", "value1"); Map.Entry entry2 = new AbstractMap.SimpleEntry("key2", "value2"); Map.Entry entry3 = new AbstractMap.SimpleEntry("key3", "value3"); - assertThat(entrySet, containsInAnyOrder(entry1, entry2, entry3)); + @SuppressWarnings("unchecked") + Matcher>> matcher = containsInAnyOrder(entry1, entry2, entry3); + assertThat(entrySet, matcher); } -} \ No newline at end of file +} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/repo/StateRepositoryManagerTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/repo/StateRepositoryManagerTest.java index c6f59827e9..cf76233156 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/repo/StateRepositoryManagerTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/repo/StateRepositoryManagerTest.java @@ -16,25 +16,28 @@ package org.ehcache.clustered.server.repo; -import org.ehcache.clustered.common.internal.exceptions.LifecycleException; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; import org.junit.Test; +import java.util.UUID; + import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.assertThat; public class StateRepositoryManagerTest { + private static final UUID CLIENT_ID = UUID.randomUUID(); + @Test public void testInvokeOnNonExistentRepositorySucceeds() throws Exception { StateRepositoryManager manager = new StateRepositoryManager(); EhcacheEntityResponse.MapValue response = (EhcacheEntityResponse.MapValue) manager.invoke( - new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value1")); + new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value1", CLIENT_ID)); assertThat(response.getValue(), nullValue()); response = (EhcacheEntityResponse.MapValue) manager.invoke( - new StateRepositoryOpMessage.GetMessage("foo", "bar", "key1")); + new StateRepositoryOpMessage.GetMessage("foo", "bar", "key1", CLIENT_ID)); assertThat(response.getValue(), is((Object)"value1")); } } \ No newline at end of file diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/state/ClientMessageTrackerTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/state/ClientMessageTrackerTest.java new file mode 100644 index 0000000000..2074a209c1 --- /dev/null +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/state/ClientMessageTrackerTest.java @@ -0,0 +1,65 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.state; + +import org.junit.Test; + +import java.lang.reflect.Field; +import java.util.Collections; +import java.util.Map; +import java.util.UUID; + +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertThat; + +public class ClientMessageTrackerTest { + + @Test + public void testReconcilationOfClients() throws Exception { + + ClientMessageTracker clientMessageTracker = new ClientMessageTracker(); + UUID clientId = UUID.randomUUID(); + clientMessageTracker.applied(20L, clientId); + + clientMessageTracker.reconcileTrackedClients(Collections.singleton(clientId)); + + Map messageTracker = getMessageTracker(clientMessageTracker); + assertThat(messageTracker.size(), is(1)); + + clientMessageTracker.reconcileTrackedClients(Collections.singleton(UUID.randomUUID())); + + assertThat(messageTracker.size(), is(0)); + + } + + @Test + public void testClientsAreTrackedLazily() throws Exception { + + ClientMessageTracker clientMessageTracker = new ClientMessageTracker(); + Map messageTracker = getMessageTracker(clientMessageTracker); + assertThat(messageTracker.size(), is(0)); + clientMessageTracker.applied(20L, UUID.randomUUID()); + assertThat(messageTracker.size(), is(1)); + + } + + private Map getMessageTracker(ClientMessageTracker clientMessageTracker) throws Exception { + Field field = clientMessageTracker.getClass().getDeclaredField("messageTrackers"); + field.setAccessible(true); + return (Map)field.get(clientMessageTracker); + } +} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java index 316993adfc..8a49f88c54 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java @@ -16,45 +16,84 @@ package org.ehcache.clustered.server.state; +import org.ehcache.clustered.server.KeySegmentMapper; import org.ehcache.clustered.server.state.config.EhcacheStateServiceConfig; +import org.junit.Before; import org.junit.Test; -import org.terracotta.entity.ServiceConfiguration; +import org.terracotta.entity.PlatformConfiguration; import org.terracotta.entity.ServiceProviderCleanupException; import org.terracotta.entity.ServiceProviderConfiguration; +import org.terracotta.offheapresource.OffHeapResources; +import org.terracotta.offheapresource.OffHeapResourcesProvider; +import org.terracotta.offheapresource.config.MemoryUnit; +import org.terracotta.offheapresource.config.OffheapResourcesType; +import org.terracotta.offheapresource.config.ResourceType; + +import java.math.BigInteger; +import java.util.Collection; +import java.util.Collections; -import static org.hamcrest.Matchers.startsWith; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; public class EhcacheStateServiceProviderTest { + private static final KeySegmentMapper DEFAULT_MAPPER = new KeySegmentMapper(16); + + private PlatformConfiguration platformConfiguration; + private ServiceProviderConfiguration serviceProviderConfiguration; + + @Before + public void setUp() { + ResourceType resource = new ResourceType(); + resource.setName("primary"); + resource.setUnit(MemoryUnit.MB); + resource.setValue(BigInteger.valueOf(4L)); + OffheapResourcesType configuration = new OffheapResourcesType(); + configuration.getResource().add(resource); + OffHeapResources offheapResources = new OffHeapResourcesProvider(configuration); + + platformConfiguration = new PlatformConfiguration() { + @Override + public String getServerName() { + return "Server1"; + } + + @Override + public Collection getExtendedConfiguration(Class type) { + if (OffHeapResources.class.isAssignableFrom(type)) { + return Collections.singletonList(type.cast(offheapResources)); + } + throw new UnsupportedOperationException("TODO Implement me!"); + } + }; + + serviceProviderConfiguration = mock(ServiceProviderConfiguration.class); + } + @Test public void testInitialize() { EhcacheStateServiceProvider serviceProvider = new EhcacheStateServiceProvider(); - - ServiceProviderConfiguration serviceProviderConfiguration = mock(ServiceProviderConfiguration.class); - - assertTrue(serviceProvider.initialize(serviceProviderConfiguration)); + assertTrue(serviceProvider.initialize(serviceProviderConfiguration, platformConfiguration)); } @Test public void testGetService() { EhcacheStateServiceProvider serviceProvider = new EhcacheStateServiceProvider(); + serviceProvider.initialize(serviceProviderConfiguration, platformConfiguration); - EhcacheStateService ehcacheStateService = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, null)); + EhcacheStateService ehcacheStateService = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, DEFAULT_MAPPER)); assertNotNull(ehcacheStateService); - EhcacheStateService sameStateService = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, null)); + EhcacheStateService sameStateService = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, DEFAULT_MAPPER)); assertSame(ehcacheStateService, sameStateService); - EhcacheStateService anotherStateService = serviceProvider.getService(2L, new EhcacheStateServiceConfig(null, null)); + EhcacheStateService anotherStateService = serviceProvider.getService(2L, new EhcacheStateServiceConfig(null, DEFAULT_MAPPER)); assertNotNull(anotherStateService); assertNotSame(ehcacheStateService, anotherStateService); @@ -64,14 +103,15 @@ public void testGetService() { @Test public void testClear() throws ServiceProviderCleanupException { EhcacheStateServiceProvider serviceProvider = new EhcacheStateServiceProvider(); + serviceProvider.initialize(serviceProviderConfiguration, platformConfiguration); - EhcacheStateService ehcacheStateService = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, null)); - EhcacheStateService anotherStateService = serviceProvider.getService(2L, new EhcacheStateServiceConfig(null, null)); + EhcacheStateService ehcacheStateService = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, DEFAULT_MAPPER)); + EhcacheStateService anotherStateService = serviceProvider.getService(2L, new EhcacheStateServiceConfig(null, DEFAULT_MAPPER)); - serviceProvider.clear(); + serviceProvider.prepareForSynchronization(); - EhcacheStateService ehcacheStateServiceAfterClear = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, null)); - EhcacheStateService anotherStateServiceAfterClear = serviceProvider.getService(2L, new EhcacheStateServiceConfig(null, null)); + EhcacheStateService ehcacheStateServiceAfterClear = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, DEFAULT_MAPPER)); + EhcacheStateService anotherStateServiceAfterClear = serviceProvider.getService(2L, new EhcacheStateServiceConfig(null, DEFAULT_MAPPER)); assertNotSame(ehcacheStateService, ehcacheStateServiceAfterClear); assertNotSame(anotherStateService, anotherStateServiceAfterClear); diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/state/MessageTrackerTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/state/MessageTrackerTest.java new file mode 100644 index 0000000000..f225d2ec84 --- /dev/null +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/state/MessageTrackerTest.java @@ -0,0 +1,159 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.state; + +import org.junit.Test; + +import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Random; +import java.util.Set; +import java.util.concurrent.Callable; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.ForkJoinTask; +import java.util.concurrent.Future; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.stream.LongStream; + +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertThat; + +public class MessageTrackerTest { + + @Test + public void testMessageTrackAndApplySingleThreaded() throws Exception { + + long[] input = getInputFor(0, 20); + + MessageTracker messageTracker = new MessageTracker(); + + for (int i = 0; i < input.length; i++) { + messageTracker.track(input[i]); + messageTracker.applied(input[i]); + } + + assertLowerWaterMark(messageTracker, 19); + + assertThat(messageTracker.isEmpty(), is(true)); + + LongStream.of(input).forEach(msg -> assertThat(messageTracker.shouldApply(msg), is(false))); + + } + + @Test + public void testMessageTrackAndApplyMultiThreaded() throws Exception { + + long[] input = getInputFor(0, 1000); + final MessageTracker messageTracker = new MessageTracker(); + + ExecutorService executorService = Executors.newWorkStealingPool(); + + List> results = new ArrayList<>(); + + for (int i = 0; i < 50 ; i++) { + int start = 20*i; + int end = start + 20; + results.add(executorService.submit(() -> { + for (int j = start; j < end; j++) { + messageTracker.track(input[j]); + messageTracker.applied(input[j]); + } + return null; + })); + } + + for (Future f : results) { + f.get(); + } + + assertLowerWaterMark(messageTracker, 999); + + assertThat(messageTracker.isEmpty(), is(true)); + + LongStream.of(input).forEach(msg -> assertThat(messageTracker.shouldApply(msg), is(false))); + + } + + @Test + public void testDuplicateMessagesForTrackedMessages() throws Exception { + + Random random = new Random(); + long[] input = getInputFor(0, 1000); + final MessageTracker messageTracker = new MessageTracker(); + + Set nonAppliedMsgs = Collections.newSetFromMap(new ConcurrentHashMap()); + + ExecutorService executorService = Executors.newWorkStealingPool(); + + List> results = new ArrayList<>(); + + for (int i = 0; i < 50 ; i++) { + int start = 20*i; + int end = start + 20; + int randomBreakingPoint = end - 1 - random.nextInt(5); + results.add(executorService.submit(() -> { + for (int j = start; j < end; j++) { + messageTracker.track(input[j]); + if (j < randomBreakingPoint) { + messageTracker.applied(input[j]); + } else { + nonAppliedMsgs.add(input[j]); + } + } + return null; + })); + } + + for (Future f : results) { + f.get(); + } + + assertThat(messageTracker.isEmpty(), is(false)); + + nonAppliedMsgs.forEach(x -> assertThat(messageTracker.shouldApply(x), is(true))); + + assertThat(messageTracker.isEmpty(), is(true)); + + LongStream.of(input).filter(x -> !nonAppliedMsgs.contains(x)).forEach(x -> assertThat(messageTracker.shouldApply(x), is(false))); + + } + + /** + * + * @param start start of range + * @param end exclusive + * @return + */ + private static long[] getInputFor(int start, int end) { + Random random = new Random(); + return random.longs(start, end).unordered().distinct().limit(end - start).toArray(); + } + + private static void assertLowerWaterMark(MessageTracker messageTracker, long lwm) throws NoSuchFieldException, IllegalAccessException { + Field entity = messageTracker.getClass().getDeclaredField("lowerWaterMark"); + entity.setAccessible(true); + assertThat((Long)entity.get(messageTracker), is(lwm)); + } + +} diff --git a/core-spi-test/build.gradle b/core-spi-test/build.gradle index 3d34399c32..333879bf33 100644 --- a/core-spi-test/build.gradle +++ b/core-spi-test/build.gradle @@ -15,6 +15,12 @@ */ dependencies { - compile project(':spi-tester'), project(':core'), - 'org.hamcrest:hamcrest-library:1.3', 'org.mockito:mockito-core:1.9.5', 'junit:junit:4.11' + compile project(':spi-tester'), project(':core'), 'org.hamcrest:hamcrest-library:1.3', 'junit:junit:4.12' + compile ('org.mockito:mockito-core:1.9.5') { + exclude group:'org.hamcrest', module:'hamcrest-core' + } +} + +tasks.withType(JavaCompile) { + options.compilerArgs += ['-Werror'] } diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeIfAbsentTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeIfAbsentTest.java index 1dab5140fe..503c78556c 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeIfAbsentTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeIfAbsentTest.java @@ -50,12 +50,13 @@ public void tearDown() { kvStore = null; } if (kvStore2 != null) { + @SuppressWarnings("unchecked") + Store kvStore2 = (Store) this.kvStore2; factory.close(kvStore2); - kvStore2 = null; + this.kvStore2 = null; } } - @SuppressWarnings({ "rawtypes", "unchecked" }) @SPITest public void testWrongReturnValueType() throws Exception { kvStore = factory.newStore(); @@ -75,10 +76,11 @@ public void testWrongReturnValueType() throws Exception { } try { - kvStore.computeIfAbsent(key, new Function() { + kvStore.computeIfAbsent(key, new Function() { @Override - public Object apply(Object key) { - return badValue; // returning wrong value type from function + @SuppressWarnings("unchecked") + public V apply(K key) { + return (V) badValue; // returning wrong value type from function } }); throw new AssertionError(); @@ -89,8 +91,8 @@ public Object apply(Object key) { } } - @SuppressWarnings({ "rawtypes", "unchecked" }) @SPITest + @SuppressWarnings("unchecked") public void testWrongKeyType() throws Exception { kvStore2 = factory.newStore(); @@ -107,7 +109,7 @@ public void testWrongKeyType() throws Exception { } try { - kvStore2.computeIfAbsent(badKey, new Function() { // wrong key type + kvStore2.computeIfAbsent(badKey, new Function() { // wrong key type @Override public Object apply(Object key) { throw new AssertionError(); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeTest.java index 2094974bc0..a2db378b9c 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeTest.java @@ -50,12 +50,14 @@ public void tearDown() { kvStore = null; } if (kvStore2 != null) { + @SuppressWarnings("unchecked") + Store kvStore2 = this.kvStore2; factory.close(kvStore2); - kvStore2 = null; + this.kvStore2 = null; } } - @SuppressWarnings({ "rawtypes", "unchecked" }) + @SuppressWarnings("unchecked") @SPITest public void testWrongReturnValueType() throws Exception { kvStore = factory.newStore(); @@ -87,7 +89,7 @@ public Object apply(Object key, Object oldValue) { } } - @SuppressWarnings({ "rawtypes", "unchecked" }) + @SuppressWarnings("unchecked") @SPITest public void testWrongKeyType() throws Exception { kvStore2 = factory.newStore(); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreContainsKeyTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreContainsKeyTest.java index 4905da3502..1b48bd924d 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreContainsKeyTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreContainsKeyTest.java @@ -49,8 +49,10 @@ public void tearDown() { kvStore = null; } if (kvStore2 != null) { + @SuppressWarnings("unchecked") + Store kvStore2 = this.kvStore2; factory.close(kvStore2); - kvStore2 = null; + this.kvStore2 = null; } } @@ -89,7 +91,7 @@ public void nullKeyThrowsException() } @SPITest - @SuppressWarnings({ "unchecked", "rawtypes" }) + @SuppressWarnings("unchecked") public void wrongKeyTypeThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { kvStore2 = factory.newStore(); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreCreationEventListenerTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreCreationEventListenerTest.java index 10ee9e0200..fedce4c85a 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreCreationEventListenerTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreCreationEventListenerTest.java @@ -124,6 +124,7 @@ private void verifyListenerInteractions(StoreEventListener listener) { } private StoreEventListener addListener(Store kvStore) { + @SuppressWarnings("unchecked") StoreEventListener listener = mock(StoreEventListener.class); kvStore.getStoreEventSource().addEventListener(listener); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreEvictionEventListenerTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreEvictionEventListenerTest.java index fa9e06c959..8e5b31edb0 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreEvictionEventListenerTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreEvictionEventListenerTest.java @@ -137,6 +137,7 @@ private void verifyListenerInteractions(StoreEventListener listener) { } private StoreEventListener addListener(Store kvStore) { + @SuppressWarnings("unchecked") StoreEventListener listener = mock(StoreEventListener.class); kvStore.getStoreEventSource().addEventListener(listener); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreExpiryEventListenerTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreExpiryEventListenerTest.java index daf9959436..852899071d 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreExpiryEventListenerTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreExpiryEventListenerTest.java @@ -162,6 +162,7 @@ private void verifyListenerInteractions(StoreEventListener listener) { } private StoreEventListener addListener(Store kvStore) { + @SuppressWarnings("unchecked") StoreEventListener listener = mock(StoreEventListener.class); kvStore.getStoreEventSource().addEventListener(listener); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreGetTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreGetTest.java index f02274d6cc..9388f7d13d 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreGetTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreGetTest.java @@ -58,8 +58,10 @@ public void tearDown() { kvStore = null; } if (kvStore2 != null) { + @SuppressWarnings("unchecked") + Store kvStore2 = this.kvStore2; factory.close(kvStore2); - kvStore2 = null; + this.kvStore2 = null; } } diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutIfAbsentTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutIfAbsentTest.java index b3cd97747e..a0eade861f 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutIfAbsentTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutIfAbsentTest.java @@ -56,8 +56,10 @@ public void tearDown() { kvStore = null; } if (kvStore2 != null) { + @SuppressWarnings("unchecked") + Store kvStore2 = this.kvStore2; factory.close(kvStore2); - kvStore2 = null; + this.kvStore2 = null; } } diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutTest.java index 18a7bbfa7f..2f792bb417 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutTest.java @@ -56,8 +56,10 @@ public void tearDown() { kvStore = null; } if (kvStore2 != null) { + @SuppressWarnings("unchecked") + Store kvStore2 = this.kvStore2; factory.close(kvStore2); - kvStore2 = null; + this.kvStore2 = null; } } diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemovalEventListenerTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemovalEventListenerTest.java index e7892c2554..0057ff6ff5 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemovalEventListenerTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemovalEventListenerTest.java @@ -110,6 +110,7 @@ private void verifyListenerInteractions(StoreEventListener listener) { } private StoreEventListener addListener(Store kvStore) { + @SuppressWarnings("unchecked") StoreEventListener listener = mock(StoreEventListener.class); kvStore.getStoreEventSource().addEventListener(listener); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyTest.java index a5d83b2843..30c32bf08f 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyTest.java @@ -49,8 +49,10 @@ public void tearDown() { kvStore = null; } if (kvStore2 != null) { + @SuppressWarnings("unchecked") + Store kvStore2 = this.kvStore2; factory.close(kvStore2); - kvStore2 = null; + this.kvStore2 = null; } } @@ -94,10 +96,8 @@ public void nullKeyThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { kvStore = factory.newStore(); - K key = null; - try { - kvStore.remove(key); + kvStore.remove(null); throw new AssertionError("Expected NullPointerException because the key is null"); } catch (NullPointerException e) { // expected diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyValueTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyValueTest.java index 7d16535134..0d30c1599e 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyValueTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyValueTest.java @@ -25,6 +25,7 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; /** * Test the {@link Store#remove(Object, Object)} contract of the @@ -41,7 +42,7 @@ public StoreRemoveKeyValueTest(final StoreFactory factory) { } protected Store kvStore; - protected Store kvStore2; + protected Store kvStore2; @After public void tearDown() { @@ -68,8 +69,8 @@ public void removeEntryForKeyIfMappedToValue() K equalKey = factory.createKey(1L); V equalValue = factory.createValue(1L); - assertThat(key.equals(equalKey), is(true)); - assertThat(value.equals(equalValue), is(true)); + assertThat(key, is(equalKey)); + assertThat(value, is(equalValue)); try { kvStore.remove(equalKey, equalValue); @@ -110,7 +111,7 @@ public void doNothingForWrongValue() V notEqualValue = factory.createValue(2L); - assertThat(value.equals(notEqualValue), is(false)); + assertThat(value, not(notEqualValue)); try { assertThat(kvStore.remove(key, notEqualValue), is(RemoveStatus.KEY_PRESENT)); @@ -192,7 +193,7 @@ public void nullValueThrowsException() } @SPITest - @SuppressWarnings({ "unchecked", "rawtypes" }) + @SuppressWarnings("unchecked" ) public void wrongKeyTypeThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { kvStore2 = factory.newStore(); @@ -201,9 +202,9 @@ public void wrongKeyTypeThrowsException() try { if (this.factory.getKeyType() == String.class) { - kvStore2.remove(1.0f, value); + kvStore2.remove((K) (Object) 1.0f, value); } else { - kvStore2.remove("key", value); + kvStore2.remove((K) (Object) "key", value); } throw new AssertionError("Expected ClassCastException because the key is of the wrong type"); } catch (ClassCastException e) { @@ -214,7 +215,7 @@ public void wrongKeyTypeThrowsException() } @SPITest - @SuppressWarnings({ "unchecked", "rawtypes" }) + @SuppressWarnings("unchecked") public void wrongValueTypeThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { kvStore2 = factory.newStore(); @@ -223,9 +224,9 @@ public void wrongValueTypeThrowsException() try { if (this.factory.getValueType() == String.class) { - kvStore2.remove(key, 1.0f); + kvStore2.remove(key, (V) (Object) 1.0f); } else { - kvStore2.remove(key, "value"); + kvStore2.remove(key, (V) (Object) "value"); } throw new AssertionError("Expected ClassCastException because the value is of the wrong type"); } catch (ClassCastException e) { diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueTest.java index 35c3936171..a45f90d353 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueTest.java @@ -51,8 +51,10 @@ public void tearDown() { kvStore = null; } if (kvStore2 != null) { + @SuppressWarnings("unchecked") + Store kvStore2 = this.kvStore2; factory.close(kvStore2); - kvStore2 = null; + this.kvStore2 = null; } } @@ -138,10 +140,9 @@ public void nullValueThrowsException() kvStore = factory.newStore(); K key = factory.createKey(1); - V value = null; try { - kvStore.replace(key, value); + kvStore.replace(key, null); throw new AssertionError("Expected NullPointerException because the value is null"); } catch (NullPointerException e) { // expected @@ -151,6 +152,7 @@ public void nullValueThrowsException() } @SPITest + @SuppressWarnings("unchecked") public void wrongKeyTypeThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { kvStore2 = factory.newStore(); @@ -172,6 +174,7 @@ public void wrongKeyTypeThrowsException() } @SPITest + @SuppressWarnings("unchecked") public void wrongValueTypeThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { kvStore2 = factory.newStore(); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueValueTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueValueTest.java index ef4a6a06e7..2c2a4a1c9a 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueValueTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueValueTest.java @@ -53,8 +53,10 @@ public void tearDown() { kvStore = null; } if (kvStore2 != null) { + @SuppressWarnings("unchecked") + Store kvStore2 = this.kvStore2; factory.close(kvStore2); - kvStore2 = null; + this.kvStore2 = null; } } diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreUpdateEventListenerTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreUpdateEventListenerTest.java index 0128c344b2..c810cc24fa 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreUpdateEventListenerTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreUpdateEventListenerTest.java @@ -126,6 +126,7 @@ private void verifyListenerInteractions(StoreEventListener listener) { } private StoreEventListener addListener(Store kvStore) { + @SuppressWarnings("unchecked") StoreEventListener listener = mock(StoreEventListener.class); kvStore.getStoreEventSource().addEventListener(listener); diff --git a/core/build.gradle b/core/build.gradle index 8dcc48fded..84ce9d2858 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -17,6 +17,13 @@ apply plugin: EhDeploy dependencies { - compile project(':api'), "org.terracotta:statistics:$parent.statisticVersion", "org.slf4j:slf4j-api:$parent.slf4jVersion" + compile project(':api'), "org.slf4j:slf4j-api:$parent.slf4jVersion" + compile ("org.terracotta:statistics:$parent.statisticVersion") { + exclude group:'org.slf4j', module:'slf4j-api' + } testCompile project(':spi-tester') -} \ No newline at end of file +} + +tasks.withType(JavaCompile) { + options.compilerArgs += ['-Werror'] +} diff --git a/core/src/main/java/org/ehcache/core/Ehcache.java b/core/src/main/java/org/ehcache/core/Ehcache.java index c7537bcc46..271f09cd9b 100644 --- a/core/src/main/java/org/ehcache/core/Ehcache.java +++ b/core/src/main/java/org/ehcache/core/Ehcache.java @@ -45,7 +45,6 @@ import org.ehcache.core.spi.store.Store.ReplaceStatus; import org.ehcache.core.statistics.CacheOperationOutcomes.ConditionalRemoveOutcome; import org.ehcache.core.statistics.CacheOperationOutcomes.GetAllOutcome; -import org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome; import org.ehcache.core.statistics.CacheOperationOutcomes.PutAllOutcome; import org.ehcache.core.statistics.CacheOperationOutcomes.PutIfAbsentOutcome; import org.ehcache.core.statistics.CacheOperationOutcomes.PutOutcome; @@ -71,6 +70,8 @@ import static org.ehcache.core.exceptions.ExceptionFactory.newCacheLoadingException; import static org.ehcache.core.internal.util.ValueSuppliers.supplierOf; +import org.ehcache.core.statistics.CacheOperationOutcomes.ClearOutcome; +import org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome; import static org.terracotta.statistics.StatisticBuilder.operation; /** @@ -102,6 +103,7 @@ public class Ehcache implements InternalCache { private final OperationObserver putIfAbsentObserver = operation(PutIfAbsentOutcome.class).named("putIfAbsent").of(this).tag("cache").build(); private final OperationObserver replaceObserver = operation(ReplaceOutcome.class).named("replace").of(this).tag("cache").build(); private final Map bulkMethodEntries = new EnumMap(BulkOps.class); + private final OperationObserver clearObserver = operation(ClearOutcome.class).named("clear").of(this).tag("cache").build(); /** * Creates a new {@code Ehcache} based on the provided parameters. @@ -120,6 +122,7 @@ public Ehcache(CacheConfiguration configuration, final Store store, this.store = store; runtimeConfiguration.addCacheConfigurationListener(store.getConfigurationChangeListeners()); StatisticsManager.associate(store).withParent(this); + if (store instanceof RecoveryCache) { this.resilienceStrategy = new LoggingRobustResilienceStrategy(castToRecoveryCache(store)); } else { @@ -168,10 +171,10 @@ public V get(final K key) { // Check for expiry first if (valueHolder == null) { - getObserver.end(GetOutcome.MISS_NO_LOADER); + getObserver.end(GetOutcome.MISS); return null; } else { - getObserver.end(GetOutcome.HIT_NO_LOADER); + getObserver.end(GetOutcome.HIT); return valueHolder.value(); } } catch (StoreAccessException e) { @@ -293,10 +296,13 @@ private boolean removeInternal(final K key) { */ @Override public void clear() { + this.clearObserver.begin(); statusTransitioner.checkAvailable(); try { store.clear(); + this.clearObserver.end(ClearOutcome.SUCCESS); } catch (StoreAccessException e) { + this.clearObserver.end(ClearOutcome.FAILURE); resilienceStrategy.clearFailure(e); } } @@ -735,9 +741,9 @@ public void compute(K key, final BiFunction c @Override public V apply(K mappedKey, V mappedValue) { if (mappedValue == null) { - getObserver.end(GetOutcome.MISS_NO_LOADER); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.MISS); } else { - getObserver.end(GetOutcome.HIT_NO_LOADER); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT); } V newValue = computeFunction.apply(mappedKey, mappedValue); @@ -786,17 +792,17 @@ public V apply(K mappedKey, V mappedValue) { } }); } catch (StoreAccessException e) { - getObserver.end(GetOutcome.FAILURE); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.FAILURE); removeObserver.end(RemoveOutcome.FAILURE); throw new RuntimeException(e); } V returnValue = existingValue.get(); if (returnValue != null) { - getObserver.end(GetOutcome.HIT_NO_LOADER); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT); removeObserver.end(RemoveOutcome.SUCCESS); } else { - getObserver.end(GetOutcome.MISS_NO_LOADER); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.MISS); } return returnValue; } @@ -821,17 +827,17 @@ public V apply(K mappedKey, V mappedValue) { } }); } catch (StoreAccessException e) { - getObserver.end(GetOutcome.FAILURE); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.FAILURE); putObserver.end(PutOutcome.FAILURE); throw new RuntimeException(e); } V returnValue = existingValue.get(); if (returnValue != null) { - getObserver.end(GetOutcome.HIT_NO_LOADER); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT); putObserver.end(PutOutcome.UPDATED); } else { - getObserver.end(GetOutcome.MISS_NO_LOADER); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.MISS); putObserver.end(PutOutcome.PUT); } return returnValue; @@ -902,12 +908,12 @@ public Entry next() { if (!quiet) getObserver.begin(); if (nextException == null) { - if (!quiet) getObserver.end(GetOutcome.HIT_NO_LOADER); + if (!quiet) getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT); current = next; advance(); return new ValueHolderBasedEntry(current); } else { - if (!quiet) getObserver.end(GetOutcome.FAILURE); + if (!quiet) getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.FAILURE); StoreAccessException cae = nextException; nextException = null; return resilienceStrategy.iteratorFailure(cae); diff --git a/core/src/main/java/org/ehcache/core/EhcacheManager.java b/core/src/main/java/org/ehcache/core/EhcacheManager.java index f7e67db449..5062b065b8 100644 --- a/core/src/main/java/org/ehcache/core/EhcacheManager.java +++ b/core/src/main/java/org/ehcache/core/EhcacheManager.java @@ -17,6 +17,7 @@ package org.ehcache.core; import org.ehcache.Cache; +import org.ehcache.CachePersistenceException; import org.ehcache.PersistentCacheManager; import org.ehcache.Status; import org.ehcache.config.Builder; @@ -27,36 +28,35 @@ import org.ehcache.core.config.BaseCacheConfiguration; import org.ehcache.core.config.DefaultConfiguration; import org.ehcache.core.config.store.StoreEventSourceConfiguration; -import org.ehcache.core.internal.store.StoreConfigurationImpl; import org.ehcache.core.events.CacheEventDispatcher; import org.ehcache.core.events.CacheEventDispatcherFactory; +import org.ehcache.core.events.CacheEventListenerConfiguration; +import org.ehcache.core.events.CacheEventListenerProvider; import org.ehcache.core.events.CacheManagerListener; -import org.ehcache.core.spi.LifeCycledAdapter; import org.ehcache.core.internal.service.ServiceLocator; -import org.ehcache.core.spi.store.InternalCacheManager; -import org.ehcache.core.spi.store.Store; +import org.ehcache.core.internal.store.StoreConfigurationImpl; import org.ehcache.core.internal.store.StoreSupport; -import org.ehcache.core.spi.service.CacheManagerProviderService; import org.ehcache.core.internal.util.ClassLoading; -import org.ehcache.event.CacheEventListener; -import org.ehcache.core.events.CacheEventListenerConfiguration; -import org.ehcache.core.events.CacheEventListenerProvider; -import org.ehcache.CachePersistenceException; import org.ehcache.core.spi.LifeCycled; -import org.ehcache.spi.service.ServiceProvider; +import org.ehcache.core.spi.LifeCycledAdapter; +import org.ehcache.core.spi.service.CacheManagerProviderService; +import org.ehcache.core.spi.store.InternalCacheManager; +import org.ehcache.core.spi.store.Store; +import org.ehcache.event.CacheEventListener; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.loaderwriter.CacheLoaderWriterProvider; import org.ehcache.spi.loaderwriter.WriteBehindConfiguration; import org.ehcache.spi.loaderwriter.WriteBehindProvider; +import org.ehcache.spi.persistence.PersistableResourceService; import org.ehcache.spi.serialization.SerializationProvider; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.serialization.UnsupportedTypeException; import org.ehcache.spi.service.MaintainableService; -import org.ehcache.spi.persistence.PersistableResourceService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceCreationConfiguration; import org.ehcache.spi.service.ServiceDependencies; +import org.ehcache.spi.service.ServiceProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -73,6 +73,8 @@ import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CopyOnWriteArrayList; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; + /** * Implementation class for the {@link org.ehcache.CacheManager} and {@link PersistentCacheManager} *

@@ -82,17 +84,6 @@ */ public class EhcacheManager implements PersistentCacheManager, InternalCacheManager { - @ServiceDependencies({ Store.Provider.class, - CacheLoaderWriterProvider.class, - WriteBehindProvider.class, - CacheEventDispatcherFactory.class, - CacheEventListenerProvider.class }) - private static class ServiceDeps { - private ServiceDeps() { - throw new UnsupportedOperationException("This is an annotation placeholder, not to be instantiated"); - } - } - private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheManager.class); private final DefaultConfiguration configuration; @@ -119,10 +110,9 @@ public EhcacheManager(Configuration config, Collection services, boolea this.simpleName = (simpleName.isEmpty() ? this.getClass().getName() : simpleName); this.configuration = new DefaultConfiguration(config); this.cacheManagerClassLoader = config.getClassLoader() != null ? config.getClassLoader() : ClassLoading.getDefaultClassLoader(); - this.serviceLocator = new ServiceLocator(services.toArray(new Service[services.size()])); this.useLoaderInAtomics = useLoaderInAtomics; validateServicesConfigs(); - resolveServices(); + this.serviceLocator = resolveServices(services); } private void validateServicesConfigs() { @@ -134,17 +124,30 @@ private void validateServicesConfigs() { } } - private void resolveServices() { - if (serviceLocator.getService(CacheManagerProviderService.class) == null) { - this.serviceLocator.addService(new DefaultCacheManagerProviderService(this)); + private ServiceLocator resolveServices(Collection services) { + ServiceLocator.DependencySet builder = dependencySet() + .with(Store.Provider.class) + .with(CacheLoaderWriterProvider.class) + .with(WriteBehindProvider.class) + .with(CacheEventDispatcherFactory.class) + .with(CacheEventListenerProvider.class) + .with(services); + if (!builder.contains(CacheManagerProviderService.class)) { + builder = builder.with(new DefaultCacheManagerProviderService(this)); } for (ServiceCreationConfiguration serviceConfig : configuration.getServiceCreationConfigurations()) { - Service service = serviceLocator.getOrCreateServiceFor(serviceConfig); - if (service == null) { - throw new IllegalArgumentException("Couldn't resolve Service " + serviceConfig.getServiceType().getName()); - } + builder = builder.with(serviceConfig); } - serviceLocator.loadDependenciesOf(ServiceDeps.class); + return builder.build(); + } + + /** + * Exposed for testing purpose + * + * @return the status transitioner keeping the current cache manager state + */ + StatusTransitioner getStatusTransitioner() { + return statusTransitioner; } @Override @@ -660,21 +663,35 @@ public void destroyCache(final String alias) throws CachePersistenceException { StatusTransitioner.Transition maintenance = null; try { maintenance = statusTransitioner.maintenance(); - maintenance.succeeded(); } catch(IllegalStateException e) { // the cache manager is already started, no need to put it in maintenance - // however, we need to check that we are in maintenance. Note that right after the check, the is a window - // for someone to go in maintenance + // however, we need to check that some other thread ISN'T in maintenance + // Note that right after the check, there is a window for someone to go in maintenance statusTransitioner.checkAvailable(); } + if(maintenance != null) { + try { + startMaintainableServices(MaintainableService.MaintenanceScope.CACHE); + maintenance.succeeded(); + } catch (Throwable t) { + throw maintenance.failed(t); + } + } + try { removeAndCloseWithoutNotice(alias); destroyPersistenceSpace(alias); } finally { // if it was started, stop it if(maintenance != null) { - statusTransitioner.exitMaintenance().succeeded(); + StatusTransitioner.Transition st = statusTransitioner.exitMaintenance(); + try { + stopMaintainableServices(); + st.succeeded(); + } catch (Throwable t) { + throw st.failed(t); + } } } @@ -692,7 +709,7 @@ private void destroyPersistenceSpace(String alias) throws CachePersistenceExcept public void destroy() throws CachePersistenceException { StatusTransitioner.Transition st = statusTransitioner.maintenance(); try { - startMaintainableServices(); + startMaintainableServices(MaintainableService.MaintenanceScope.CACHE_MANAGER); st.succeeded(); } catch (Throwable t) { throw st.failed(t); @@ -708,11 +725,11 @@ public void destroy() throws CachePersistenceException { LOGGER.info("All persistent data destroyed for {}", simpleName); } - private void startMaintainableServices() { + private void startMaintainableServices(MaintainableService.MaintenanceScope maintenanceScope) { ServiceProvider provider = getMaintainableServiceProvider(); Collection services = serviceLocator.getServicesOfType(MaintainableService.class); for (MaintainableService service : services) { - service.startForMaintenance(provider); + service.startForMaintenance(provider, maintenanceScope); } } diff --git a/core/src/main/java/org/ehcache/core/EhcacheRuntimeConfiguration.java b/core/src/main/java/org/ehcache/core/EhcacheRuntimeConfiguration.java index 7622339cc3..3786523c61 100644 --- a/core/src/main/java/org/ehcache/core/EhcacheRuntimeConfiguration.java +++ b/core/src/main/java/org/ehcache/core/EhcacheRuntimeConfiguration.java @@ -126,13 +126,13 @@ public synchronized void deregisterCacheEventListener(CacheEventListener listener, EventOrdering ordering, EventFiring firing, Set forEventTypes) { - EventListenerWrapper listenerWrapper = new EventListenerWrapper(listener, firing, ordering, EnumSet.copyOf(forEventTypes)); + EventListenerWrapper listenerWrapper = new EventListenerWrapper(listener, firing, ordering, EnumSet.copyOf(forEventTypes)); fireCacheConfigurationChange(CacheConfigurationProperty.ADD_LISTENER, listenerWrapper, listenerWrapper); } @Override public void registerCacheEventListener(CacheEventListener listener, EventOrdering ordering, EventFiring firing, EventType eventType, EventType... eventTypes) { - EventListenerWrapper listenerWrapper = new EventListenerWrapper(listener, firing, ordering, EnumSet.of(eventType, eventTypes)); + EventListenerWrapper listenerWrapper = new EventListenerWrapper(listener, firing, ordering, EnumSet.of(eventType, eventTypes)); fireCacheConfigurationChange(CacheConfigurationProperty.ADD_LISTENER, listenerWrapper, listenerWrapper); } @@ -185,4 +185,4 @@ public String readableString() { "expiry: " + ((expiry != null) ? expiry.getClass().getSimpleName() : "") + "\n" + "resourcePools: " + "\n " + ((resourcePools instanceof HumanReadable) ? ((HumanReadable)resourcePools).readableString() : "").replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/ehcache/core/EhcacheWithLoaderWriter.java b/core/src/main/java/org/ehcache/core/EhcacheWithLoaderWriter.java index e67c19be24..faf7231931 100644 --- a/core/src/main/java/org/ehcache/core/EhcacheWithLoaderWriter.java +++ b/core/src/main/java/org/ehcache/core/EhcacheWithLoaderWriter.java @@ -190,10 +190,10 @@ private V getNoLoader(K key) { // Check for expiry first if (valueHolder == null) { - getObserver.end(GetOutcome.MISS_NO_LOADER); + getObserver.end(GetOutcome.MISS); return null; } else { - getObserver.end(GetOutcome.HIT_NO_LOADER); + getObserver.end(GetOutcome.HIT); return valueHolder.value(); } } catch (StoreAccessException e) { @@ -235,10 +235,10 @@ public V apply(final K k) { // Check for expiry first if (valueHolder == null) { - getObserver.end(GetOutcome.MISS_WITH_LOADER); + getObserver.end(GetOutcome.MISS); return null; } else { - getObserver.end(GetOutcome.HIT_WITH_LOADER); + getObserver.end(GetOutcome.HIT); return valueHolder.value(); } } catch (StoreAccessException e) { @@ -1234,9 +1234,9 @@ public void compute(K key, final BiFunction c @Override public V apply(K mappedKey, V mappedValue) { if (mappedValue == null) { - getObserver.end(GetOutcome.MISS_NO_LOADER); + getObserver.end(GetOutcome.MISS); } else { - getObserver.end(GetOutcome.HIT_NO_LOADER); + getObserver.end(GetOutcome.HIT); } V newValue = computeFunction.apply(mappedKey, mappedValue); @@ -1309,10 +1309,10 @@ public V apply(K mappedKey, V mappedValue) { V returnValue = existingValue.get(); if (returnValue != null) { - getObserver.end(GetOutcome.HIT_NO_LOADER); + getObserver.end(GetOutcome.HIT); removeObserver.end(RemoveOutcome.SUCCESS); } else { - getObserver.end(GetOutcome.MISS_NO_LOADER); + getObserver.end(GetOutcome.MISS); } return returnValue; } @@ -1350,10 +1350,10 @@ public V apply(K mappedKey, V mappedValue) { V returnValue = existingValue.get(); if (returnValue != null) { - getObserver.end(GetOutcome.HIT_NO_LOADER); + getObserver.end(GetOutcome.HIT); putObserver.end(PutOutcome.UPDATED); } else { - getObserver.end(GetOutcome.MISS_NO_LOADER); + getObserver.end(GetOutcome.MISS); putObserver.end(PutOutcome.PUT); } return returnValue; @@ -1424,7 +1424,7 @@ public Entry next() { if (!quiet) getObserver.begin(); if (nextException == null) { - if (!quiet) getObserver.end(GetOutcome.HIT_NO_LOADER); + if (!quiet) getObserver.end(GetOutcome.HIT); current = next; advance(); return new ValueHolderBasedEntry(current); diff --git a/core/src/main/java/org/ehcache/core/PersistentUserManagedEhcache.java b/core/src/main/java/org/ehcache/core/PersistentUserManagedEhcache.java index fb4eea3ba4..f74a0c987f 100644 --- a/core/src/main/java/org/ehcache/core/PersistentUserManagedEhcache.java +++ b/core/src/main/java/org/ehcache/core/PersistentUserManagedEhcache.java @@ -22,8 +22,8 @@ import org.ehcache.config.CacheRuntimeConfiguration; import org.ehcache.config.ResourceType; import org.ehcache.core.events.CacheEventDispatcher; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.spi.loaderwriter.BulkCacheLoadingException; import org.ehcache.spi.loaderwriter.BulkCacheWritingException; import org.ehcache.spi.loaderwriter.CacheLoadingException; @@ -51,7 +51,7 @@ public class PersistentUserManagedEhcache implements PersistentUserManaged private final StatusTransitioner statusTransitioner; private final Logger logger; private final InternalCache cache; - private final LocalPersistenceService localPersistenceService; + private final DiskResourceService diskPersistenceService; private final String id; /** @@ -59,12 +59,12 @@ public class PersistentUserManagedEhcache implements PersistentUserManaged * * @param configuration the cache configuration * @param store the underlying store - * @param localPersistenceService the persistence service + * @param diskPersistenceService the persistence service * @param cacheLoaderWriter the optional loader writer * @param eventDispatcher the event dispatcher * @param id an id for this cache */ - public PersistentUserManagedEhcache(CacheConfiguration configuration, Store store, LocalPersistenceService localPersistenceService, CacheLoaderWriter cacheLoaderWriter, CacheEventDispatcher eventDispatcher, String id) { + public PersistentUserManagedEhcache(CacheConfiguration configuration, Store store, DiskResourceService diskPersistenceService, CacheLoaderWriter cacheLoaderWriter, CacheEventDispatcher eventDispatcher, String id) { this.logger = LoggerFactory.getLogger(PersistentUserManagedEhcache.class.getName() + "-" + id); this.statusTransitioner = new StatusTransitioner(logger); if (cacheLoaderWriter == null) { @@ -72,7 +72,7 @@ public PersistentUserManagedEhcache(CacheConfiguration configuration, Stor } else { this.cache = new EhcacheWithLoaderWriter(new EhcacheRuntimeConfiguration(configuration), store, cacheLoaderWriter, eventDispatcher, true, logger, statusTransitioner); } - this.localPersistenceService = localPersistenceService; + this.diskPersistenceService = diskPersistenceService; this.id = id; } @@ -98,7 +98,7 @@ void create() { if (!getRuntimeConfiguration().getResourcePools().getPoolForResource(ResourceType.Core.DISK).isPersistent()) { destroy(); } - localPersistenceService.getPersistenceSpaceIdentifier(id, cache.getRuntimeConfiguration()); + diskPersistenceService.getPersistenceSpaceIdentifier(id, cache.getRuntimeConfiguration()); } catch (CachePersistenceException e) { throw new RuntimeException("Unable to create persistence space for user managed cache " + id, e); } @@ -106,7 +106,7 @@ void create() { void destroyInternal() throws CachePersistenceException { statusTransitioner.checkMaintenance(); - localPersistenceService.destroy(id); + diskPersistenceService.destroy(id); } /** @@ -125,7 +125,7 @@ public void close() { cache.close(); if (!getRuntimeConfiguration().getResourcePools().getPoolForResource(ResourceType.Core.DISK).isPersistent()) { try { - localPersistenceService.destroy(id); + diskPersistenceService.destroy(id); } catch (CachePersistenceException e) { logger.debug("Unable to clear persistence space for user managed cache {}", id, e); } diff --git a/core/src/main/java/org/ehcache/core/config/BaseCacheConfiguration.java b/core/src/main/java/org/ehcache/core/config/BaseCacheConfiguration.java index 7cb77a5f33..63c66cc299 100644 --- a/core/src/main/java/org/ehcache/core/config/BaseCacheConfiguration.java +++ b/core/src/main/java/org/ehcache/core/config/BaseCacheConfiguration.java @@ -32,8 +32,8 @@ */ public class BaseCacheConfiguration implements CacheConfiguration { - private final Class keyType; - private final Class valueType; + private final Class keyType; + private final Class valueType; private final EvictionAdvisor evictionAdvisor; private final Collection> serviceConfigurations; private final ClassLoader classLoader; @@ -51,7 +51,7 @@ public class BaseCacheConfiguration implements CacheConfiguration { * @param resourcePools the resource pools * @param serviceConfigurations the service configurations */ - public BaseCacheConfiguration(Class keyType, Class valueType, + public BaseCacheConfiguration(Class keyType, Class valueType, EvictionAdvisor evictionAdvisor, ClassLoader classLoader, Expiry expiry, ResourcePools resourcePools, ServiceConfiguration... serviceConfigurations) { @@ -81,7 +81,7 @@ public Collection> getServiceConfigurations() { */ @Override public Class getKeyType() { - return (Class) keyType; + return keyType; } /** @@ -89,7 +89,7 @@ public Class getKeyType() { */ @Override public Class getValueType() { - return (Class) valueType; + return valueType; } /** diff --git a/core/src/main/java/org/ehcache/core/config/ResourcePoolsImpl.java b/core/src/main/java/org/ehcache/core/config/ResourcePoolsImpl.java index c91abd953c..ddaa0473a9 100644 --- a/core/src/main/java/org/ehcache/core/config/ResourcePoolsImpl.java +++ b/core/src/main/java/org/ehcache/core/config/ResourcePoolsImpl.java @@ -24,6 +24,7 @@ import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.Comparator; import java.util.EnumMap; import java.util.HashMap; @@ -100,26 +101,42 @@ public ResourcePools validateAndMerge(ResourcePools toBeUpdated) { * @param pools the resource pools to validate */ public static void validateResourcePools(Collection pools) { - EnumMap coreResources = new EnumMap(ResourceType.Core.class); - for (ResourcePool pool : pools) { - if (pool.getType() instanceof ResourceType.Core) { - coreResources.put((ResourceType.Core)pool.getType(), (SizedResourcePool)pool); + List ordered = new ArrayList(pools.size()); + for(ResourcePool pool : pools) { + if (pool instanceof SizedResourcePool) { + ordered.add((SizedResourcePool)pool); } } + Collections.sort(ordered, new Comparator() { + @Override + public int compare(final SizedResourcePool o1, final SizedResourcePool o2) { + int retVal = o2.getType().getTierHeight() - o1.getType().getTierHeight(); + if(retVal == 0) { + return o1.toString().compareTo(o2.toString()); + } else { + return retVal; + } + } + }); - List ordered = new ArrayList(coreResources.values()); for (int i = 0; i < ordered.size(); i++) { for (int j = 0; j < i; j++) { SizedResourcePool upper = ordered.get(j); SizedResourcePool lower = ordered.get(i); boolean inversion; + boolean ambiguity; try { + ambiguity = upper.getType().getTierHeight() == lower.getType().getTierHeight(); inversion = (upper.getUnit().compareTo(upper.getSize(), lower.getSize(), lower.getUnit()) >= 0) - || (lower.getUnit().compareTo(lower.getSize(), upper.getSize(), upper.getUnit()) <= 0); + || (lower.getUnit().compareTo(lower.getSize(), upper.getSize(), upper.getUnit()) <= 0); } catch (IllegalArgumentException e) { + ambiguity = false; inversion = false; } + if (ambiguity) { + throw new IllegalArgumentException("Tiering Ambiguity: '" + upper + "' has the same tier height as '" + lower + "'"); + } if (inversion) { throw new IllegalArgumentException("Tiering Inversion: '" + upper + "' is not smaller than '" + lower + "'"); } diff --git a/core/src/main/java/org/ehcache/core/internal/events/EventListenerWrapper.java b/core/src/main/java/org/ehcache/core/internal/events/EventListenerWrapper.java index 13ee9968f6..e8474f8e1f 100644 --- a/core/src/main/java/org/ehcache/core/internal/events/EventListenerWrapper.java +++ b/core/src/main/java/org/ehcache/core/internal/events/EventListenerWrapper.java @@ -27,20 +27,20 @@ /** * Internal wrapper for {@link CacheEventListener} and their configuration. */ -public final class EventListenerWrapper implements CacheEventListener { - private final CacheEventListener listener; +public final class EventListenerWrapper implements CacheEventListener { + private final CacheEventListener listener; private final EventFiring firing; private final EventOrdering ordering; private final EnumSet forEvents; - public EventListenerWrapper(CacheEventListener listener) { + public EventListenerWrapper(CacheEventListener listener) { this.listener = listener; this.firing = null; this.ordering = null; this.forEvents = null; } - public EventListenerWrapper(CacheEventListener listener, final EventFiring firing, final EventOrdering ordering, + public EventListenerWrapper(CacheEventListener listener, final EventFiring firing, final EventOrdering ordering, final EnumSet forEvents) { if (listener == null) { throw new NullPointerException("listener cannot be null"); @@ -78,7 +78,7 @@ public boolean equals(Object other) { } @Override - public void onEvent(CacheEvent event) { + public void onEvent(CacheEvent event) { listener.onEvent(event); } diff --git a/core/src/main/java/org/ehcache/core/internal/service/ServiceLocator.java b/core/src/main/java/org/ehcache/core/internal/service/ServiceLocator.java index b388420467..03085f4bdd 100644 --- a/core/src/main/java/org/ehcache/core/internal/service/ServiceLocator.java +++ b/core/src/main/java/org/ehcache/core/internal/service/ServiceLocator.java @@ -16,6 +16,7 @@ package org.ehcache.core.internal.service; +import org.ehcache.config.Builder; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.spi.service.PluralService; import org.ehcache.spi.service.Service; @@ -27,175 +28,375 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Deque; +import java.util.HashMap; import java.util.HashSet; import java.util.IdentityHashMap; +import java.util.Iterator; import java.util.LinkedHashSet; +import java.util.LinkedList; import java.util.List; +import java.util.Map; import java.util.ServiceLoader; import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; +import static java.util.Collections.*; + /** * Provides discovery and tracking services for {@link Service} implementations. */ public final class ServiceLocator implements ServiceProvider { private static final Logger LOGGER = LoggerFactory.getLogger(ServiceLocator.class); - private final ConcurrentMap, Set> services = - new ConcurrentHashMap, Set>(); - - @SuppressWarnings("rawtypes") - private final ServiceLoader serviceFactory = ClassLoading.libraryServiceLoaderFor(ServiceFactory.class); + private final ServiceMap services; private final ReadWriteLock runningLock = new ReentrantReadWriteLock(); private final AtomicBoolean running = new AtomicBoolean(false); - public ServiceLocator(Service... services) { - for (Service service : services) { - addService(service); + public static DependencySet dependencySet() { + return new DependencySet(); + } + + private ServiceLocator(ServiceMap services) { + this.services = services; + } + + @Override + public T getService(Class serviceType) { + if (serviceType.isAnnotationPresent(PluralService.class)) { + throw new IllegalArgumentException(serviceType.getName() + " is marked as a PluralService"); } + final Collection registeredServices = getServicesOfType(serviceType); + if (registeredServices.size() > 1) { + throw new AssertionError("The non-PluralService type" + serviceType.getName() + + " has more than one service registered"); + } + return (registeredServices.isEmpty() ? null : registeredServices.iterator().next()); + } + + @Override + public Collection getServicesOfType(Class serviceType) { + return services.get(serviceType); } - /** - * For the {@link Service} class specified, attempt to instantiate the service using the - * {@link ServiceFactory} infrastructure. If a configuration is provided, only the first - * discovered factory is used to instantiate one copy of the service; if no configuration - * is provided, use each discovered factory for the service type to attempt to create a - * service from that factory. - * - * @param serviceClass the {@code Service} type to create - * @param config the service configuration to use; may be null - * @param the type of the {@code Service} - * - * @return the collection of created services; may be empty - * - * @throws IllegalStateException if the configured service is already registered or the configured service - * implements a {@code Service} subtype that is not marked with the {@link PluralService} annotation - * but is already registered - */ - private Collection discoverServices(Class serviceClass, ServiceCreationConfiguration config) { - final List addedServices = new ArrayList(); - for (ServiceFactory factory : ServiceLocator. getServiceFactories(serviceFactory)) { - final Class factoryServiceType = factory.getServiceType(); - if (serviceClass.isAssignableFrom(factoryServiceType)) { - if (services.containsKey(factoryServiceType)) { - // Can have only one service registered under a concrete type - continue; + public boolean knowsServiceFor(ServiceConfiguration serviceConfig) { + return services.contains(serviceConfig.getServiceType()); + } + + public void startAllServices() throws Exception { + Deque started = new LinkedList(); + final Lock lock = runningLock.writeLock(); + lock.lock(); + try { + if (!running.compareAndSet(false, true)) { + throw new IllegalStateException("Already started!"); + } + + /* + * This ensures that we start services in dependency order + */ + LinkedList unstarted = new LinkedList(services.all()); + int totalServices = unstarted.size(); + long start = System.currentTimeMillis(); + LOGGER.debug("Starting {} Services...", totalServices); + while (!unstarted.isEmpty()) { + boolean startedSomething = false; + for (Iterator it = unstarted.iterator(); it.hasNext(); ) { + Service s = it.next(); + if (hasUnstartedDependencies(s, unstarted)) { + LOGGER.trace("Delaying starting {}", s); + } else { + LOGGER.trace("Starting {}", s); + s.start(this); + started.push(s); + it.remove(); + startedSomething = true; + } } - T service = factory.create(config); - addService(service); - addedServices.add(service); - if (config != null) { - // Each configuration should be manifested in exactly one service; look no further - return addedServices; + if (startedSomething) { + LOGGER.trace("Cycle complete: " + unstarted.size() + " Services remaining"); + } else { + throw new IllegalStateException("Cyclic dependency in Service set: " + unstarted); } } + LOGGER.debug("All Services successfully started, {} Services in {}ms", totalServices, System.currentTimeMillis() - start); + } catch (Exception e) { + while(!started.isEmpty()) { + Service toBeStopped = started.pop(); + try { + toBeStopped.stop(); + } catch (Exception e1) { + LOGGER.error("Stopping Service failed due to ", e1); + } + } + throw e; + } finally { + lock.unlock(); } - return addedServices; } - @SuppressWarnings("unchecked") - private static Iterable> getServiceFactories(@SuppressWarnings("rawtypes") ServiceLoader serviceFactory) { - List> list = new ArrayList>(); - for (ServiceFactory factory : serviceFactory) { - list.add((ServiceFactory)factory); + public void stopAllServices() throws Exception { + Exception firstException = null; + Lock lock = runningLock.writeLock(); + lock.lock(); + try { + if(!running.compareAndSet(true, false)) { + throw new IllegalStateException("Already stopped!"); + } + + /* + * This ensures that we stop services in dependency order + */ + Collection running = new LinkedList(services.all()); + int totalServices = running.size(); + long start = System.currentTimeMillis(); + LOGGER.debug("Stopping {} Services...", totalServices); + while (!running.isEmpty()) { + boolean stoppedSomething = false; + for (Iterator it = running.iterator(); it.hasNext(); ) { + Service s = it.next(); + if (hasRunningDependencies(s, running)) { + LOGGER.trace("Delaying stopping {}", s); + } else { + LOGGER.trace("Stopping {}", s); + try { + s.stop(); + } catch (Exception e) { + if (firstException == null) { + firstException = e; + } else { + LOGGER.error("Stopping Service failed due to ", e); + } + } + it.remove(); + stoppedSomething = true; + } + } + if (stoppedSomething) { + LOGGER.trace("Cycle complete: " + running.size() + " Services remaining"); + } else { + throw new AssertionError("Cyclic dependency in Service set: " + running); + } + } + LOGGER.debug("All Services successfully stopped, {} Services in {}ms", totalServices, System.currentTimeMillis() - start); + } finally { + lock.unlock(); + } + if(firstException != null) { + throw firstException; } - return list; } - /** - * Registers the {@code Service} provided with this {@code ServiceLocator}. If the service is - * already registered, the registration fails. The service specified is also registered under - * each {@code Service} subtype it implements. Duplicate registration of implemented {@code Service} - * subtypes causes registration failure unless that subtype is marked with the {@link PluralService} - * annotation. - * - * @param service the concrete {@code Service} to register - * - * @throws IllegalStateException if the configured service is already registered or {@code service} - * implements a {@code Service} subtype that is not marked with the {@link PluralService} annotation - * but is already registered - */ - public void addService(final Service service) { - final Lock lock = runningLock.readLock(); - lock.lock(); - try { - Set> serviceClazzes = new HashSet>(); + private boolean hasUnstartedDependencies(Service service, Iterable unstarted) { + for (Class dep : identifyTransitiveDependenciesOf(service.getClass())) { + for (Service s : unstarted) { + if (dep.isInstance(s)) { + return true; + } + } + } + return false; + } - for (Class i : getAllInterfaces(service.getClass())) { - if (Service.class != i && Service.class.isAssignableFrom(i)) { + private boolean hasRunningDependencies(Service service, Iterable running) { + for (Class dep : identifyTransitiveDependenciesOf(service.getClass())) { + for (Service s : running) { + if (dep.isInstance(s)) { + return true; + } + } + } + return false; + } - @SuppressWarnings("unchecked") - Class serviceClass = (Class) i; + public static class DependencySet implements Builder { - serviceClazzes.add(serviceClass); + @SuppressWarnings("rawtypes") + private final ServiceLoader serviceLoader = ClassLoading.libraryServiceLoaderFor(ServiceFactory.class); + + private final ServiceMap provided = new ServiceMap(); + private final Set> requested = new HashSet>(); + + public DependencySet with(Service service) { + provided.add(service); + return this; + } + + public DependencySet with(Iterable services) { + for (Service s : services) { + with(s); + } + return this; + } + + public DependencySet with(ServiceCreationConfiguration config) { + Class serviceType = config.getServiceType(); + + //TODO : This stanza is due to the way we use configure the JSR-107 service + if (provided.contains(serviceType) && !serviceType.isAnnotationPresent(PluralService.class)) { + return this; + } + + Iterable> serviceFactories = ServiceLocator.getServiceFactories(serviceLoader); + boolean success = false; + for (ServiceFactory factory : serviceFactories) { + final Class factoryServiceType = factory.getServiceType(); + if (serviceType.isAssignableFrom(factoryServiceType)) { + @SuppressWarnings("unchecked") + ServiceFactory serviceFactory = (ServiceFactory) factory; + with(serviceFactory.create(config)); + success = true; } } + if (success) { + return this; + } else { + throw new IllegalStateException("No factories exist for " + serviceType); + } + } - if (services.putIfAbsent(service.getClass(), Collections.singleton(service)) != null) { - throw new IllegalStateException("Registration of duplicate service " + service.getClass()); + public DependencySet with(Class clazz) { + requested.add(clazz); + return this; + } + + public boolean contains(Class serviceClass) { + return provided.contains(serviceClass); + } + + public T providerOf(Class serviceClass) { + if (serviceClass.isAnnotationPresent(PluralService.class)) { + throw new IllegalArgumentException("Cannot retrieve single provider for plural service"); + } else { + Collection providers = providersOf(serviceClass); + switch (providers.size()) { + case 0: + return null; + case 1: + return providers.iterator().next(); + default: + throw new AssertionError(); + } } + } - /* - * Register the concrete service under all Service subtypes it implements. If - * the Service subtype is annotated with @PluralService, permit multiple registrations; - * otherwise, fail the registration, - */ - for (Class serviceClazz : serviceClazzes) { - if (serviceClazz.isAnnotationPresent(PluralService.class)) { - // Permit multiple registrations - Set registeredServices = services.get(serviceClazz); - if (registeredServices == null) { - registeredServices = new LinkedHashSet(); - services.put(serviceClazz, registeredServices); - } - registeredServices.add(service); + public Collection providersOf(Class serviceClass) { + return provided.get(serviceClass); + } - } else { - // Only a single registration permitted - if (services.putIfAbsent(serviceClazz, Collections.singleton(service)) != null) { - final StringBuilder message = new StringBuilder("Duplicate service implementation(s) found for ") - .append(service.getClass()); - for (Class serviceClass : serviceClazzes) { - if (!serviceClass.isAnnotationPresent(PluralService.class)) { - final Service declaredService = services.get(serviceClass).iterator().next(); - if (declaredService != null) { - message - .append("\n\t\t- ") - .append(serviceClass) - .append(" already has ") - .append(declaredService.getClass()); - } + @Override + public ServiceLocator build() { + try { + ServiceMap resolvedServices = new ServiceMap(); + + for (Service service : provided.all()) { + resolvedServices = lookupDependenciesOf(resolvedServices, service.getClass()).add(service); + } + + for (Class request : requested) { + if (request.isAnnotationPresent(PluralService.class)) { + try { + resolvedServices = lookupService(resolvedServices, request); + } catch (DependencyException e) { + if (!resolvedServices.contains(request)) { + throw e; } } - throw new IllegalStateException(message.toString()); + } else if (!resolvedServices.contains(request)) { + resolvedServices = lookupService(resolvedServices, request); } } + + return new ServiceLocator(resolvedServices); + } catch (DependencyException e) { + throw new IllegalStateException(e); } + } - if (running.get()) { - loadDependenciesOf(service.getClass()); - service.start(this); + ServiceMap lookupDependenciesOf(ServiceMap resolved, Class requested) throws DependencyException { + for (Class dependency : identifyImmediateDependenciesOf(requested)) { + resolved = lookupService(resolved, dependency); } - } finally { - lock.unlock(); + return resolved; + } + + private ServiceMap lookupService(ServiceMap resolved, Class requested) throws DependencyException { + //Have we already resolved this dependency? + if (resolved.contains(requested) && !requested.isAnnotationPresent(PluralService.class)) { + return resolved; + } + //Attempt resolution from the provided services + resolved = new ServiceMap(resolved).addAll(provided.get(requested)); + if (resolved.contains(requested) && !requested.isAnnotationPresent(PluralService.class)) { + return resolved; + } + Collection> serviceFactories = discoverServices(resolved, requested); + if (serviceFactories.size() > 1 && !requested.isAnnotationPresent(PluralService.class)) { + throw new DependencyException("Multiple factories for non-plural service"); + } + for(ServiceFactory factory : serviceFactories) { + if (!resolved.contains(factory.getServiceType())) { + try { + resolved = lookupDependenciesOf(resolved, factory.getServiceType()); + } catch (DependencyException e) { + continue; + } + + T service = factory.create(null); + + //we copy the service map so that if upstream dependency resolution fails we don't pollute the real resolved set + resolved = new ServiceMap(resolved).add(service); + } + } + if (resolved.contains(requested)) { + return resolved; + } else { + throw new DependencyException("Failed to find provider with satisfied dependency set for " + requested + " [candidates " + serviceFactories + "]"); + } + } + + /** + * For the {@link Service} class specified, attempt to instantiate the service using the + * {@link ServiceFactory} infrastructure. + * + * @param serviceClass the {@code Service} type to create + * @param the type of the {@code Service} + * + * @return the collection of created services; may be empty + * + * @throws IllegalStateException if the configured service is already registered or the configured service + * implements a {@code Service} subtype that is not marked with the {@link PluralService} annotation + * but is already registered + */ + private Collection> discoverServices(ServiceMap resolved, Class serviceClass) { + Collection> serviceFactories = new ArrayList>(); + for (ServiceFactory factory : ServiceLocator.getServiceFactories(serviceLoader)) { + final Class factoryServiceType = factory.getServiceType(); + if (serviceClass.isAssignableFrom(factoryServiceType) && !factory.getClass().isAnnotationPresent(ServiceFactory.RequiresConfiguration.class)) { + if (provided.contains(factoryServiceType) || resolved.contains(factoryServiceType)) { + // Can have only one service registered under a concrete type + continue; + } + @SuppressWarnings("unchecked") + ServiceFactory serviceFactory = (ServiceFactory) factory; + serviceFactories.add(serviceFactory); + } + } + return serviceFactories; } } - private Collection> getAllInterfaces(final Class clazz) { + private static Collection> getAllInterfaces(final Class clazz) { ArrayList> interfaces = new ArrayList>(); - for(Class c = clazz; c != null; c = c.getSuperclass()) { + for (Class c = clazz; c != null; c = c.getSuperclass()) { for (Class i : c.getInterfaces()) { interfaces.add(i); interfaces.addAll(getAllInterfaces(i)); @@ -204,60 +405,57 @@ private Collection> getAllInterfaces(final Class clazz) { return interfaces; } - /** - * Obtains the service supporting the configuration provided. If a registered service - * is not available, an attempt to create the service using the {@link ServiceFactory} - * discovery process will be made. - * - * @param config the configuration for the service - * @param the expected service type - * @return the service instance for {@code T} type, or {@code null} if it couldn't be located or instantiated - * - * @throws IllegalArgumentException if {@link ServiceCreationConfiguration#getServiceType() config.getServiceType()} - * is marked with the {@link org.ehcache.spi.service.PluralService PluralService} annotation - */ - public T getOrCreateServiceFor(ServiceCreationConfiguration config) { - return getServiceInternal(config.getServiceType(), config, true); - } + private static Set> identifyImmediateDependenciesOf(final Class clazz) { + if (clazz == null) { + return emptySet(); + } - /** - * Obtains the identified service. If a registered service is not available, an attempt - * to create the service using the {@link ServiceFactory} discovery process will be made. - * - * @param serviceType the {@code class} of the service being looked up - * @param the expected service type - * @return the service instance for {@code T} type, or {@code null} if a {@code Service} of type - * {@code serviceType} is not available - * - * @throws IllegalArgumentException if {@code serviceType} is marked with the - * {@link org.ehcache.spi.service.PluralService PluralService} annotation; - * use {@link #getServicesOfType(Class)} for plural services - */ - @Override - public T getService(Class serviceType) { - return getServiceInternal(serviceType, null, false); - } + Set> dependencies = new HashSet>(); + final ServiceDependencies annotation = clazz.getAnnotation(ServiceDependencies.class); + if (annotation != null) { + for (final Class dependency : annotation.value()) { + if (Service.class.isAssignableFrom(dependency)) { + @SuppressWarnings("unchecked") + Class serviceDependency = (Class) dependency; + dependencies.add(serviceDependency); + } else { + throw new IllegalStateException("Service dependency declared by " + clazz.getName() + + " is not a Service: " + dependency.getName()); + } + } + } - private T getServiceInternal( - final Class serviceType, final ServiceCreationConfiguration config, final boolean shouldCreate) { - if (serviceType.isAnnotationPresent(PluralService.class)) { - throw new IllegalArgumentException(serviceType.getName() + " is marked as a PluralService"); + for (Class interfaceClazz : clazz.getInterfaces()) { + if (Service.class.isAssignableFrom(interfaceClazz)) { + dependencies.addAll(identifyImmediateDependenciesOf(Service.class.getClass().cast(interfaceClazz))); + } } - final Collection registeredServices = findServices(serviceType, config, shouldCreate); - if (registeredServices.size() > 1) { - throw new AssertionError("The non-PluralService type" + serviceType.getName() - + " has more than one service registered"); + + dependencies.addAll(identifyImmediateDependenciesOf(clazz.getSuperclass())); + + return dependencies; + } + + private static Set> identifyTransitiveDependenciesOf(final Class clazz) { + Set> transitive = new HashSet>(); + + Set> dependencies = identifyImmediateDependenciesOf(clazz); + transitive.addAll(dependencies); + + for (Class klazz : dependencies) { + transitive.addAll(identifyTransitiveDependenciesOf(klazz)); } - return (registeredServices.isEmpty() ? null : registeredServices.iterator().next()); + + return transitive; } - private Collection findServices( - Class serviceType, ServiceCreationConfiguration config, boolean shouldCreate) { - final Collection registeredServices = getServicesOfTypeInternal(serviceType); - if (shouldCreate && (registeredServices.isEmpty() || serviceType.isAnnotationPresent(PluralService.class))) { - registeredServices.addAll(discoverServices(serviceType, config)); + @SuppressWarnings("unchecked") + private static Iterable> getServiceFactories(@SuppressWarnings("rawtypes") ServiceLoader serviceFactory) { + List> list = new ArrayList>(); + for (ServiceFactory factory : serviceFactory) { + list.add((ServiceFactory)factory); } - return registeredServices; + return list; } public static Collection findAmongst(Class clazz, Collection instances) { @@ -289,186 +487,112 @@ public static T findSingletonAmongst(Class clazz, Object ... instances) { } } - public void startAllServices() throws Exception { - Deque started = new ArrayDeque(); - final Lock lock = runningLock.writeLock(); - lock.lock(); - try { - resolveMissingDependencies(); + private static class DependencyException extends Exception { + public DependencyException(String s) { + super(s); + } + } - if (!running.compareAndSet(false, true)) { - throw new IllegalStateException("Already started!"); - } + private static class ServiceMap { - for (Set registeredServices : services.values()) { - for (Service service : registeredServices) { - if (!started.contains(service)) { - service.start(this); - started.push(service); - } - } - } - LOGGER.debug("All Services successfully started."); - } catch (Exception e) { - while(!started.isEmpty()) { - Service toBeStopped = started.pop(); - try { - toBeStopped.stop(); - } catch (Exception e1) { - LOGGER.error("Stopping Service failed due to ", e1); - } + private final Map, Set> services; + + public ServiceMap(ServiceMap resolved) { + this.services = new HashMap, Set>(); + for (Map.Entry, Set> e : resolved.services.entrySet()) { + Set copy = newSetFromMap(new IdentityHashMap()); + copy.addAll(e.getValue()); + this.services.put(e.getKey(), copy); } - throw e; - } finally { - lock.unlock(); } - } - private void resolveMissingDependencies() { - for (Set registeredServices : services.values()) { - for (Service service : registeredServices) { - loadDependenciesOf(service.getClass()); - } + public ServiceMap() { + this.services = new HashMap, Set>(); } - } - public void stopAllServices() throws Exception { - Exception firstException = null; - Lock lock = runningLock.writeLock(); - lock.lock(); - try { - if(!running.compareAndSet(true, false)) { - throw new IllegalStateException("Already stopped!"); + public Set get(Class serviceType) { + @SuppressWarnings("unchecked") + Set s = (Set) services.get(serviceType); + if (s == null) { + return emptySet(); + } else { + return unmodifiableSet(s); } - Set stoppedServices = Collections.newSetFromMap(new IdentityHashMap()); - for (Set registeredServices : services.values()) { - for (Service service : registeredServices) { - if (stoppedServices.contains(service)) { - continue; - } - try { - service.stop(); - } catch (Exception e) { - if (firstException == null) { - firstException = e; - } else { - LOGGER.error("Stopping Service failed due to ", e); - } - } - stoppedServices.add(service); - } - } - } finally { - lock.unlock(); } - if(firstException != null) { - throw firstException; - } - } - /** - * Ensures the dependencies, as declared using the {@link ServiceDependencies} annotation, - * of the specified class are registered in this {@code ServiceLocator}. If a dependency - * is not registered when this method is invoked, an attempt to load it will be made using - * the {@link ServiceFactory} infrastructure. - * - * @param clazz the class for which dependency availability is checked - */ - public void loadDependenciesOf(Class clazz) { - final Collection> transitiveDependencies = identifyTransitiveDependenciesOf(clazz); - for (Class aClass : transitiveDependencies) { - if (findServices(aClass, null, true).isEmpty()) { - throw new IllegalStateException("Unable to resolve dependent service: " + aClass.getName()); + public ServiceMap addAll(Iterable services) { + for (Service s : services) { + add(s); } + return this; } - } - /** - * Identifies, transitively, all dependencies declared for the designated class through - * {@link ServiceDependencies} annotations. This method intentionally accepts - * {@code ServiceDependencies} annotations on non-{@code Service} implementations to - * permit classes like cache manager implementations to declare dependencies on - * services. All types referred to by the {@code ServiceDependencies} annotation - * must be subtypes of {@link Service}. - * - * @param clazz the top-level class instance for which the dependencies are to be determined - * - * @return the collection of declared dependencies - * - * @see #identifyTransitiveDependenciesOf(Class, Set) - */ - // Package-private for unit tests - Collection> identifyTransitiveDependenciesOf(final Class clazz) { - return identifyTransitiveDependenciesOf(clazz, new LinkedHashSet>()); - } + public ServiceMap add(Service service) { + Set> serviceClazzes = new HashSet>(); - /** - * Identifies the transitive dependencies of the designated class as declared through - * {@link ServiceDependencies} annotations. - * - * @param clazz the class to check for declared dependencies - * @param dependencies the current set of declared dependencies; this set will be added updated - * - * @return the set {@code dependencies} - * - * @see #identifyTransitiveDependenciesOf(Class) - */ - @SuppressWarnings("unchecked") - private Collection> identifyTransitiveDependenciesOf(final Class clazz, final Set> dependencies) { - if (clazz == null || clazz == Object.class) { - return dependencies; - } + serviceClazzes.add(service.getClass()); + for (Class i : getAllInterfaces(service.getClass())) { + if (Service.class != i && Service.class.isAssignableFrom(i)) { - final ServiceDependencies annotation = clazz.getAnnotation(ServiceDependencies.class); - if (annotation != null) { - for (final Class dependency : annotation.value()) { - if (!dependencies.contains(dependency)) { - if (!Service.class.isAssignableFrom(dependency)) { - throw new IllegalStateException("Service dependency declared by " + clazz.getName() + - " is not a Service: " + dependency.getName()); + @SuppressWarnings("unchecked") + Class serviceClass = (Class) i; + + serviceClazzes.add(serviceClass); + } + } + + /* + * Register the concrete service under all Service subtypes it implements. If + * the Service subtype is annotated with @PluralService, permit multiple registrations; + * otherwise, fail the registration, + */ + for (Class serviceClazz : serviceClazzes) { + if (serviceClazz.isAnnotationPresent(PluralService.class)) { + // Permit multiple registrations + Set registeredServices = services.get(serviceClazz); + if (registeredServices == null) { + registeredServices = new LinkedHashSet(); + services.put(serviceClazz, registeredServices); + } + registeredServices.add(service); + } else { + // Only a single registration permitted + Set registeredServices = services.get(serviceClazz); + if (registeredServices == null || registeredServices.isEmpty()) { + services.put(serviceClazz, singleton(service)); + } else if (!registeredServices.contains(service)) { + final StringBuilder message = new StringBuilder("Duplicate service implementation(s) found for ") + .append(service.getClass()); + for (Class serviceClass : serviceClazzes) { + if (!serviceClass.isAnnotationPresent(PluralService.class)) { + Set s = this.services.get(serviceClass); + final Service declaredService = s == null ? null : s.iterator().next(); + if (declaredService != null) { + message + .append("\n\t\t- ") + .append(serviceClass) + .append(" already has ") + .append(declaredService.getClass()); + } + } + } + throw new IllegalStateException(message.toString()); } - dependencies.add((Class) dependency); - identifyTransitiveDependenciesOf(dependency, dependencies); } } + return this; } - for (Class interfaceClazz : clazz.getInterfaces()) { - if (Service.class != interfaceClazz && Service.class.isAssignableFrom(interfaceClazz)) { - identifyTransitiveDependenciesOf(interfaceClazz, dependencies); + public Set all() { + Set all = newSetFromMap(new IdentityHashMap()); + for (Set s : services.values()) { + all.addAll(s); } + return unmodifiableSet(all); } - identifyTransitiveDependenciesOf(clazz.getSuperclass(), dependencies); - - return dependencies; - } - - public boolean knowsServiceFor(ServiceConfiguration serviceConfig) { - return !getServicesOfType(serviceConfig.getServiceType()).isEmpty(); - } - - @Override - public Collection getServicesOfType(Class serviceType) { - return getServicesOfTypeInternal(serviceType); - } - - /** - * Gets the collection of services implementing the type specified. - * - * @param serviceType the subtype of {@code Service} to return - * @param the {@code Service} subtype - * - * @return a collection, possibly empty, of the registered services implementing {@code serviceType} - */ - private Collection getServicesOfTypeInternal(final Class serviceType) { - HashSet result = new LinkedHashSet(); - final Set registeredServices = this.services.get(serviceType); - if (registeredServices != null) { - for (Service service : registeredServices) { - result.add(serviceType.cast(service)); - } + public boolean contains(Class request) { + return services.containsKey(request); } - return result; } } diff --git a/core/src/main/java/org/ehcache/core/spi/service/DiskResourceService.java b/core/src/main/java/org/ehcache/core/spi/service/DiskResourceService.java new file mode 100644 index 0000000000..b2afe0c857 --- /dev/null +++ b/core/src/main/java/org/ehcache/core/spi/service/DiskResourceService.java @@ -0,0 +1,34 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.spi.service; + +import org.ehcache.CachePersistenceException; +import org.ehcache.spi.persistence.PersistableResourceService; + +/** + * Resource service handling file level operations for disk tiers. + */ +public interface DiskResourceService extends PersistableResourceService { + + /** + * Creates a new persistence context within the given space. + * + * @param identifier space to create within + * @param name name of the context to create + * @return a {@link FileBasedPersistenceContext} + */ + FileBasedPersistenceContext createPersistenceContextWithin(PersistenceSpaceIdentifier identifier, String name) throws CachePersistenceException; +} \ No newline at end of file diff --git a/core/src/main/java/org/ehcache/core/spi/service/LocalPersistenceService.java b/core/src/main/java/org/ehcache/core/spi/service/LocalPersistenceService.java index a100f517ab..0745bdd17e 100644 --- a/core/src/main/java/org/ehcache/core/spi/service/LocalPersistenceService.java +++ b/core/src/main/java/org/ehcache/core/spi/service/LocalPersistenceService.java @@ -17,22 +17,63 @@ package org.ehcache.core.spi.service; import org.ehcache.CachePersistenceException; -import org.ehcache.spi.persistence.PersistableResourceService; +import org.ehcache.spi.service.MaintainableService; + +import java.io.File; /** - * Service to provide persistence context to caches requiring it. - *

- * Will be used by caches with a disk store, whether or not the data should survive a program restart. - *

+ * Service that provides isolated persistence spaces to any service that requires it + * under the local root directory. */ -public interface LocalPersistenceService extends PersistableResourceService { +public interface LocalPersistenceService extends MaintainableService { /** - * Creates a new persistence context within the given space. + * Creates a logical safe directory space for the owner and returns an identifying space Id. + * + * @param owner Service owner that owns the safe space. + * @param name Identifying name for the space. * - * @param identifier space to create within - * @param name name of the context to create - * @return a {@link FileBasedPersistenceContext} + * @return Opaque Identifier that can be used to identify the safe space. + */ + SafeSpaceIdentifier createSafeSpaceIdentifier(String owner, String name); + + /** + * Creates the safe space represented by {@code safeSpaceId}, if it does not exist in the underlying physical space. + * + * @param safeSpaceId Identifier to the created logical space on which the physical space needs to be created + * @throws CachePersistenceException If the space cannot be created or found, due to system errors + */ + void createSafeSpace(SafeSpaceIdentifier safeSpaceId) throws CachePersistenceException; + + /** + * Destroys the safe space. + * + * @param safeSpaceId Safe space identifier. + * @param verbose Log more information. + */ + void destroySafeSpace(SafeSpaceIdentifier safeSpaceId, boolean verbose); + + /** + * Destroys all safe spaces provided to this owner. + * + * @param owner owner of safe spaces. + */ + void destroyAll(String owner); + + /** + * Identifier to the logical safe space */ - FileBasedPersistenceContext createPersistenceContextWithin(PersistenceSpaceIdentifier identifier, String name) throws CachePersistenceException; -} + interface SafeSpaceIdentifier { + /** + * Represents the root directory of the given logical safe space. + *

+ * Note that the directory represented by {@code File} may or may not be created in the physical space. + * The existence of the physical space depends on whether the {@code createSafeSpace} method was invoked + * for the space at some time in the past or not. + *

+ * + * @return Root directory of the safe space. + */ + File getRoot(); + } +} \ No newline at end of file diff --git a/core/src/main/java/org/ehcache/core/spi/service/ServiceFactory.java b/core/src/main/java/org/ehcache/core/spi/service/ServiceFactory.java index c0ea1b2a31..df1275a7fb 100644 --- a/core/src/main/java/org/ehcache/core/spi/service/ServiceFactory.java +++ b/core/src/main/java/org/ehcache/core/spi/service/ServiceFactory.java @@ -20,6 +20,13 @@ import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceCreationConfiguration; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import static java.lang.annotation.RetentionPolicy.RUNTIME; + /** * A factory abstraction that can create {@link Service} instances. */ @@ -41,5 +48,12 @@ public interface ServiceFactory { * * @return the class of the produced service. */ - Class getServiceType(); + Class getServiceType(); + + + @Retention(RUNTIME) + @Target(ElementType.TYPE) + @interface RequiresConfiguration { + + } } diff --git a/core/src/main/java/org/ehcache/core/spi/store/tiering/HigherCachingTier.java b/core/src/main/java/org/ehcache/core/spi/store/tiering/HigherCachingTier.java index d37b7c26bd..c20ef8bc5e 100644 --- a/core/src/main/java/org/ehcache/core/spi/store/tiering/HigherCachingTier.java +++ b/core/src/main/java/org/ehcache/core/spi/store/tiering/HigherCachingTier.java @@ -20,6 +20,7 @@ import org.ehcache.core.spi.store.StoreAccessException; import org.ehcache.core.spi.function.Function; import org.ehcache.core.spi.store.Store; +import org.ehcache.spi.service.PluralService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; @@ -62,6 +63,7 @@ public interface HigherCachingTier extends CachingTier { /** * {@link Service} interface for providing {@link HigherCachingTier} instances. */ + @PluralService interface Provider extends Service { /** diff --git a/core/src/main/java/org/ehcache/core/spi/store/tiering/LowerCachingTier.java b/core/src/main/java/org/ehcache/core/spi/store/tiering/LowerCachingTier.java index 666a170817..ab3d169233 100644 --- a/core/src/main/java/org/ehcache/core/spi/store/tiering/LowerCachingTier.java +++ b/core/src/main/java/org/ehcache/core/spi/store/tiering/LowerCachingTier.java @@ -20,6 +20,7 @@ import org.ehcache.core.spi.store.ConfigurationChangeSupport; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.service.PluralService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; @@ -100,6 +101,7 @@ public interface LowerCachingTier extends ConfigurationChangeSupport { /** * {@link Service} interface for providing {@link LowerCachingTier} instances. */ + @PluralService interface Provider extends Service { /** diff --git a/core/src/main/java/org/ehcache/core/statistics/AuthoritativeTierOperationOutcomes.java b/core/src/main/java/org/ehcache/core/statistics/AuthoritativeTierOperationOutcomes.java index 8c044e2143..500c1eda2e 100644 --- a/core/src/main/java/org/ehcache/core/statistics/AuthoritativeTierOperationOutcomes.java +++ b/core/src/main/java/org/ehcache/core/statistics/AuthoritativeTierOperationOutcomes.java @@ -31,7 +31,11 @@ enum GetAndFaultOutcome implements AuthoritativeTierOperationOutcomes { /** * miss */ - MISS + MISS, + /** + * timeout + */ + TIMEOUT } /** diff --git a/core/src/main/java/org/ehcache/core/statistics/CacheOperationOutcomes.java b/core/src/main/java/org/ehcache/core/statistics/CacheOperationOutcomes.java index e83cc30b7a..052f2df30e 100755 --- a/core/src/main/java/org/ehcache/core/statistics/CacheOperationOutcomes.java +++ b/core/src/main/java/org/ehcache/core/statistics/CacheOperationOutcomes.java @@ -21,18 +21,28 @@ */ public interface CacheOperationOutcomes { + /** + * Outcomes for cache Clear operations. + */ + enum ClearOutcome implements CacheOperationOutcomes { + /** + * success + */ + SUCCESS, + /** + * failure + */ + FAILURE + } + /** * Outcomes for cache Get operations. */ enum GetOutcome implements CacheOperationOutcomes { - /** hit, no loader */ - HIT_NO_LOADER, - /** miss, no loader */ - MISS_NO_LOADER, - /** hit */ - HIT_WITH_LOADER, - /** miss */ - MISS_WITH_LOADER, + /** hit, loader or not is Cache impl specific */ + HIT, + /** miss, loader or not is Cache impl specific*/ + MISS, /** failure */ FAILURE }; diff --git a/core/src/main/java/org/ehcache/core/statistics/TierOperationOutcomes.java b/core/src/main/java/org/ehcache/core/statistics/TierOperationOutcomes.java new file mode 100644 index 0000000000..bae4fbe995 --- /dev/null +++ b/core/src/main/java/org/ehcache/core/statistics/TierOperationOutcomes.java @@ -0,0 +1,83 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.statistics; + +import java.util.EnumMap; +import java.util.Map; +import java.util.Set; + +import static java.util.Collections.unmodifiableMap; +import static java.util.EnumSet.of; + +public class TierOperationOutcomes { + + public static final Map> GET_TRANSLATION; + + static { + Map> translation = new EnumMap>(GetOutcome.class); + translation.put(GetOutcome.HIT, of(StoreOperationOutcomes.GetOutcome.HIT)); + translation.put(GetOutcome.MISS, of(StoreOperationOutcomes.GetOutcome.MISS, StoreOperationOutcomes.GetOutcome.TIMEOUT)); + GET_TRANSLATION = unmodifiableMap(translation); + } + + public static final Map> GET_AND_FAULT_TRANSLATION; + + static { + Map> translation = new EnumMap>(GetOutcome.class); + translation.put(GetOutcome.HIT, of(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.HIT)); + translation.put(GetOutcome.MISS, of(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS, AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.TIMEOUT)); + GET_AND_FAULT_TRANSLATION = unmodifiableMap(translation); + } + + public static final Map> GET_AND_REMOVE_TRANSLATION; + + static { + Map> translation = new EnumMap>(GetOutcome.class); + translation.put(GetOutcome.HIT, of(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.HIT_REMOVED)); + translation.put(GetOutcome.MISS, of(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.MISS)); + GET_AND_REMOVE_TRANSLATION = unmodifiableMap(translation); + } + + public static final Map> GET_OR_COMPUTEIFABSENT_TRANSLATION; + + static { + Map> translation = new EnumMap>(GetOutcome.class); + translation.put(GetOutcome.HIT, of(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.HIT)); + translation.put(GetOutcome.MISS, of(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULTED, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED, + CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED_MISS, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS)); + GET_OR_COMPUTEIFABSENT_TRANSLATION = unmodifiableMap(translation); + } + + public static final Map> EVICTION_TRANSLATION; + + static { + Map> translation = new EnumMap>(EvictionOutcome.class); + translation.put(EvictionOutcome.SUCCESS, of(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); + translation.put(EvictionOutcome.FAILURE, of(StoreOperationOutcomes.EvictionOutcome.FAILURE)); + EVICTION_TRANSLATION = unmodifiableMap(translation); + } + + public enum GetOutcome { + HIT, + MISS, + } + + public enum EvictionOutcome { + SUCCESS, + FAILURE + } + +} diff --git a/core/src/test/java/org/ehcache/core/CacheTest.java b/core/src/test/java/org/ehcache/core/CacheTest.java index c96549585b..7a3b4c1aba 100644 --- a/core/src/test/java/org/ehcache/core/CacheTest.java +++ b/core/src/test/java/org/ehcache/core/CacheTest.java @@ -47,11 +47,11 @@ @SuppressWarnings({ "unchecked", "rawtypes" }) public abstract class CacheTest { - protected abstract InternalCache getCache(Store store); + protected abstract InternalCache getCache(Store store); @Test public void testTransistionsState() { - Store store = mock(Store.class); + Store store = mock(Store.class); InternalCache ehcache = getCache(store); assertThat(ehcache.getStatus(), CoreMatchers.is(Status.UNINITIALIZED)); @@ -63,10 +63,10 @@ public void testTransistionsState() { @Test public void testThrowsWhenNotAvailable() throws StoreAccessException { - Store store = mock(Store.class); + Store store = mock(Store.class); Store.Iterator mockIterator = mock(Store.Iterator.class); when(store.iterator()).thenReturn(mockIterator); - InternalCache ehcache = getCache(store); + InternalCache ehcache = getCache(store); try { ehcache.get("foo"); diff --git a/core/src/test/java/org/ehcache/core/EhcacheBasicGetTest.java b/core/src/test/java/org/ehcache/core/EhcacheBasicGetTest.java index c142ef411d..5fb8a11b82 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheBasicGetTest.java +++ b/core/src/test/java/org/ehcache/core/EhcacheBasicGetTest.java @@ -68,7 +68,7 @@ public void testGetNoStoreEntry() throws Exception { assertThat(ehcache.get("key"), is(nullValue())); verify(this.store).get(eq("key")); verifyZeroInteractions(this.spiedResilienceStrategy); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS)); } /** @@ -110,7 +110,7 @@ public void testGetHasStoreEntry() throws Exception { verify(this.store).get(eq("key")); verifyZeroInteractions(this.spiedResilienceStrategy); assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT)); } /** diff --git a/core/src/test/java/org/ehcache/core/EhcacheBasicIteratorTest.java b/core/src/test/java/org/ehcache/core/EhcacheBasicIteratorTest.java index c04901a2e0..b0b9f509ec 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheBasicIteratorTest.java +++ b/core/src/test/java/org/ehcache/core/EhcacheBasicIteratorTest.java @@ -207,13 +207,16 @@ public void testIteratorNonEmptyNextAfterLast() throws Exception { */ @Test public void testIteratorStoreAccessException() throws Exception { + @SuppressWarnings("unchecked") Store.ValueHolder valueHolder = mock(Store.ValueHolder.class); doReturn("bar").when(valueHolder).value(); + @SuppressWarnings("unchecked") Cache.Entry> storeEntry = mock(Cache.Entry.class); doReturn(valueHolder).when(storeEntry).getValue(); doReturn("foo").when(storeEntry).getKey(); + @SuppressWarnings("unchecked") Store.Iterator>> storeIterator = mock(Store.Iterator.class); doReturn(true).when(storeIterator).hasNext(); doReturn(storeEntry).when(storeIterator).next(); diff --git a/core/src/test/java/org/ehcache/core/EhcacheBasicRemoveAllTest.java b/core/src/test/java/org/ehcache/core/EhcacheBasicRemoveAllTest.java index ba4c1ce0e3..5af8761f2f 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheBasicRemoveAllTest.java +++ b/core/src/test/java/org/ehcache/core/EhcacheBasicRemoveAllTest.java @@ -259,14 +259,15 @@ public void testRemoveAllStoreSomeOverlapStoreAccessExceptionAfterNoWriter() thr } @Test + @SuppressWarnings("unchecked") public void removeAllStoreCallsMethodTwice() throws Exception { - this.store = mock(Store.class); - CacheLoaderWriter cacheLoaderWriter = mock(CacheLoaderWriter.class); + CacheLoaderWriter cacheLoaderWriter = mock(CacheLoaderWriter.class); final List removed = new ArrayList(); doAnswer(new Answer() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { - Iterable i = (Iterable) invocation.getArguments()[0]; + @SuppressWarnings("unchecked") + Iterable i = (Iterable) invocation.getArguments()[0]; for (String key : i) { removed.add(key); } @@ -275,13 +276,13 @@ public Object answer(InvocationOnMock invocation) throws Throwable { }).when(cacheLoaderWriter).deleteAll(any(Iterable.class)); final EhcacheWithLoaderWriter ehcache = this.getEhcacheWithLoaderWriter(cacheLoaderWriter); - final ArgumentCaptor functionArgumentCaptor = ArgumentCaptor.forClass(Function.class); + final ArgumentCaptor>, Iterable>>> functionArgumentCaptor = (ArgumentCaptor) ArgumentCaptor.forClass(Function.class); when(store.bulkCompute(anySet(), functionArgumentCaptor.capture())).then(new Answer() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { - Function function = functionArgumentCaptor.getValue(); - Iterable arg = new HashMap((Map) function.getClass().getDeclaredField("val$entriesToRemove").get(function)).entrySet(); + Function>, Iterable>> function = functionArgumentCaptor.getValue(); + Iterable> arg = new HashMap((Map) function.getClass().getDeclaredField("val$entriesToRemove").get(function)).entrySet(); function.apply(arg); function.apply(arg); return null; diff --git a/core/src/test/java/org/ehcache/core/EhcacheManagerTest.java b/core/src/test/java/org/ehcache/core/EhcacheManagerTest.java index c0683e74e2..4ee9ca9ef3 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheManagerTest.java +++ b/core/src/test/java/org/ehcache/core/EhcacheManagerTest.java @@ -18,7 +18,9 @@ import org.ehcache.Cache; import org.ehcache.CacheManager; +import org.ehcache.CachePersistenceException; import org.ehcache.PersistentCacheManager; +import org.ehcache.StateTransitionException; import org.ehcache.Status; import org.ehcache.UserManagedCache; import org.ehcache.config.CacheConfiguration; @@ -30,28 +32,29 @@ import org.ehcache.core.config.ResourcePoolsHelper; import org.ehcache.core.events.CacheEventDispatcher; import org.ehcache.core.events.CacheEventDispatcherFactory; +import org.ehcache.core.events.CacheEventListenerProvider; import org.ehcache.core.events.CacheManagerListener; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.core.internal.util.ClassLoading; -import org.ehcache.core.events.CacheEventListenerProvider; -import org.ehcache.StateTransitionException; -import org.ehcache.spi.service.ServiceProvider; +import org.ehcache.core.spi.service.LocalPersistenceService; +import org.ehcache.core.spi.store.Store; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.loaderwriter.CacheLoaderWriterProvider; import org.ehcache.spi.loaderwriter.WriteBehindProvider; +import org.ehcache.spi.service.MaintainableService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.spi.service.ServiceProvider; import org.hamcrest.CoreMatchers; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.ExpectedException; import org.mockito.Matchers; import org.mockito.Mockito; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -74,6 +77,7 @@ import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyCollection; import static org.mockito.Matchers.anySet; +import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; @@ -85,6 +89,23 @@ @SuppressWarnings({ "unchecked", "rawtypes" }) public class EhcacheManagerTest { + @Rule + public ExpectedException expectedException = ExpectedException.none(); + + private static Map> newCacheMap() { + return new HashMap>(); + } + + private List minimunCacheManagerServices() { + return new ArrayList(Arrays.asList( + mock(Store.Provider.class), + mock(CacheLoaderWriterProvider.class), + mock(WriteBehindProvider.class), + mock(CacheEventDispatcherFactory.class), + mock(CacheEventListenerProvider.class), + mock(LocalPersistenceService.class))); + } + @Test public void testCanDestroyAndClose() throws Exception { CacheConfiguration cacheConfiguration = new BaseCacheConfiguration(Long.class, String.class, null, @@ -99,7 +120,7 @@ public void testCanDestroyAndClose() throws Exception { when(store.getConfigurationChangeListeners()).thenReturn(new ArrayList()); when(cacheEventNotificationListenerServiceProvider.createCacheEventDispatcher(store)).thenReturn(mock(CacheEventDispatcher.class)); - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put("aCache", cacheConfiguration); DefaultConfiguration config = new DefaultConfiguration(caches, null); PersistentCacheManager cacheManager = new EhcacheManager(config, Arrays.asList( @@ -121,7 +142,7 @@ public void testCanDestroyAndClose() throws Exception { @Test public void testConstructionThrowsWhenNotBeingToResolveService() { - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); final DefaultConfiguration config = new DefaultConfiguration(caches, null, new ServiceCreationConfiguration() { @Override public Class getServiceType() { @@ -131,14 +152,15 @@ public Class getServiceType() { try { new EhcacheManager(config); fail("Should have thrown..."); - } catch (IllegalArgumentException e) { + } catch (IllegalStateException e) { assertThat(e.getMessage(), containsString(NoSuchService.class.getName())); } } @Test public void testCreationFailsOnDuplicateServiceCreationConfiguration() { - DefaultConfiguration config = new DefaultConfiguration(Collections.>emptyMap(), null, new ServiceCreationConfiguration() { + Map> caches = newCacheMap(); + DefaultConfiguration config = new DefaultConfiguration(caches, null, new ServiceCreationConfiguration() { @Override public Class getServiceType() { return NoSuchService.class; @@ -159,18 +181,13 @@ public Class getServiceType() { @Test public void testStopAllServicesWhenCacheInitializationFails() { - Store.Provider storeProvider = mock(Store.Provider.class); - - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put("myCache", mock(CacheConfiguration.class)); DefaultConfiguration config = new DefaultConfiguration(caches, null); - CacheManager cacheManager = new EhcacheManager(config, Arrays.asList( - storeProvider, - mock(CacheLoaderWriterProvider.class), - mock(WriteBehindProvider.class), - mock(CacheEventDispatcherFactory.class), - mock(CacheEventListenerProvider.class), - mock(LocalPersistenceService.class))); + List services = minimunCacheManagerServices(); + EhcacheManager cacheManager = new EhcacheManager(config, services); + + Store.Provider storeProvider = (Store.Provider) services.get(0); // because I know it's the first of the list try { cacheManager.init(); @@ -182,7 +199,7 @@ public void testStopAllServicesWhenCacheInitializationFails() { @Test public void testNoClassLoaderSpecified() { - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put("foo", new BaseCacheConfiguration(Object.class, Object.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools())); DefaultConfiguration config = new DefaultConfiguration(caches, null); @@ -216,7 +233,7 @@ public void testClassLoaderSpecified() { assertNotSame(cl1, cl2); assertNotSame(cl1.getClass(), cl2.getClass()); - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put("foo1", new BaseCacheConfiguration(Object.class, Object.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools())); caches.put("foo2", new BaseCacheConfiguration(Object.class, Object.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools())); caches.put("foo3", new BaseCacheConfiguration(Object.class, Object.class, null, cl2, null, ResourcePoolsHelper.createHeapOnlyPools())); @@ -245,7 +262,7 @@ public void testClassLoaderSpecified() { @Test public void testReturnsNullForNonExistCache() { - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); DefaultConfiguration config = new DefaultConfiguration(caches, null); EhcacheManager cacheManager = new EhcacheManager(config, getServices(null, null)); cacheManager.init(); @@ -267,7 +284,7 @@ public void testThrowsWhenAddingExistingCache() { when(storeProvider .createStore(Matchers.anyObject(), Matchers.anyVararg())).thenReturn(mock); - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put("bar", cacheConfiguration); DefaultConfiguration config = new DefaultConfiguration(caches, null); @@ -297,7 +314,7 @@ public void testThrowsWhenNotInitialized() { .createStore(Matchers.anyObject(), Matchers.anyVararg())).thenReturn(mock); final CacheConfiguration cacheConfiguration = new BaseCacheConfiguration(Integer.class, String.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools()); - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put("bar", cacheConfiguration); DefaultConfiguration config = new DefaultConfiguration(caches, null); EhcacheManager cacheManager = new EhcacheManager(config, services); @@ -335,7 +352,7 @@ public void testThrowsWhenRetrievingCacheWithWrongTypes() { .createStore(Matchers.anyObject(), Matchers.anyVararg())).thenReturn(mock); final CacheConfiguration cacheConfiguration = new BaseCacheConfiguration(Integer.class, String.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools()); - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put("bar", cacheConfiguration); DefaultConfiguration config = new DefaultConfiguration(caches, null); EhcacheManager cacheManager = new EhcacheManager(config, services); @@ -379,27 +396,26 @@ public void testLifeCyclesCacheLoaders() throws Exception { when(cacheLoaderWriterProvider.createCacheLoaderWriter("foo", fooConfig)).thenReturn(fooLoaderWriter); + Map> caches = newCacheMap(); + caches.put("bar", barConfig); + caches.put("foo", fooConfig); - @SuppressWarnings("serial") - final Configuration cfg = new DefaultConfiguration( - new HashMap>() {{ - put("bar", barConfig); - put("foo", fooConfig); - }}, + Configuration cfg = new DefaultConfiguration( + caches, getClass().getClassLoader() ); - final Store.Provider storeProvider = mock(Store.Provider.class); + Store.Provider storeProvider = mock(Store.Provider.class); when(storeProvider.rank(anySet(), anyCollection())).thenReturn(1); - final Store mock = mock(Store.class); - final CacheEventDispatcherFactory cenlProvider = mock(CacheEventDispatcherFactory.class); - final CacheEventDispatcher cenlServiceMock = mock(CacheEventDispatcher.class); + Store mock = mock(Store.class); + CacheEventDispatcherFactory cenlProvider = mock(CacheEventDispatcherFactory.class); + CacheEventDispatcher cenlServiceMock = mock(CacheEventDispatcher.class); when(cenlProvider.createCacheEventDispatcher(mock)).thenReturn(cenlServiceMock); - final Collection services = getServices(cacheLoaderWriterProvider, decoratorLoaderWriterProvider, storeProvider, cenlProvider); + Collection services = getServices(cacheLoaderWriterProvider, decoratorLoaderWriterProvider, storeProvider, cenlProvider); when(storeProvider .createStore(Matchers.anyObject(), Matchers.anyVararg())).thenReturn(mock); - final EhcacheManager manager = new EhcacheManager(cfg, services); + EhcacheManager manager = new EhcacheManager(cfg, services); manager.init(); verify(cacheLoaderWriterProvider).createCacheLoaderWriter("bar", barConfig); @@ -423,7 +439,7 @@ public void testDoesNotifyAboutCache() { final Collection services = getServices(mock, cenlProvider); when(mock.createStore(Matchers.anyObject())).thenReturn(mock(Store.class)); - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); DefaultConfiguration config = new DefaultConfiguration(caches, null); EhcacheManager cacheManager = new EhcacheManager(config, services); final CacheManagerListener listener = mock(CacheManagerListener.class); @@ -450,7 +466,7 @@ public void testDoesNotNotifyAboutCacheOnInitOrClose() { final Collection services = getServices(mock, cenlProvider); when(mock.createStore(Matchers.anyObject())).thenReturn(mock(Store.class)); final String cacheAlias = "bar"; - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put(cacheAlias, cacheConfiguration); DefaultConfiguration config = new DefaultConfiguration(caches, null); EhcacheManager cacheManager = new EhcacheManager(config, services); @@ -472,7 +488,7 @@ public void testClosesStartedCachesDownWhenInitThrows() { final Collection services = getServices(storeProvider, null); final RuntimeException thrown = new RuntimeException(); when(storeProvider.createStore(Matchers.anyObject())).thenReturn(mock(Store.class)); - Map> cacheMap = new HashMap>(); + Map> cacheMap = newCacheMap(); cacheMap.put("foo", cacheConfiguration); cacheMap.put("bar", cacheConfiguration); cacheMap.put("foobar", cacheConfiguration); @@ -526,7 +542,7 @@ public void testClosesAllCachesDownWhenCloseThrows() { final Collection services = getServices(storeProvider, cenlProvider); final RuntimeException thrown = new RuntimeException(); when(storeProvider.createStore(Matchers.anyObject())).thenReturn(mock(Store.class)); - Map> cacheMap = new HashMap>(); + Map> cacheMap = newCacheMap(); cacheMap.put("foo", cacheConfiguration); cacheMap.put("bar", cacheConfiguration); cacheMap.put("foobar", cacheConfiguration); @@ -564,7 +580,7 @@ protected void closeEhcache(final String alias, final InternalCache ehcach @Test public void testDoesNotifyAboutLifecycle() { - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); DefaultConfiguration config = new DefaultConfiguration(caches, null); EhcacheManager cacheManager = new EhcacheManager(config, getServices(null, null)); final CacheManagerListener listener = mock(CacheManagerListener.class); @@ -636,7 +652,7 @@ public void releaseCacheEventDispatcher(CacheEventDispatcher eventD when(mockStore.getConfigurationChangeListeners()).thenReturn(configurationChangeListenerList); when(storeProvider.createStore(Matchers.anyObject())).thenReturn(mockStore); - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put("foo", cacheConfiguration); DefaultConfiguration config = new DefaultConfiguration(caches, null); EhcacheManager cacheManager = new EhcacheManager(config, services) { @@ -665,7 +681,7 @@ public void testChangesToManagerAreReflectedInConfig() { when(cacheEventNotificationListenerServiceProvider.createCacheEventDispatcher(store)).thenReturn(mock(CacheEventDispatcher.class)); CacheConfiguration cache1Configuration = new BaseCacheConfiguration(Long.class, String.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools()); - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put("cache1", cache1Configuration); DefaultConfiguration config = new DefaultConfiguration(caches, null); @@ -710,7 +726,7 @@ public void testCachesAddedAtRuntimeGetReInited() { when(cacheEventNotificationListenerServiceProvider.createCacheEventDispatcher(store)).thenReturn(mock(CacheEventDispatcher.class)); CacheConfiguration cache1Configuration = new BaseCacheConfiguration(Long.class, String.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools()); - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put("cache1", cache1Configuration); DefaultConfiguration config = new DefaultConfiguration(caches, null); CacheManager cacheManager = new EhcacheManager(config, Arrays.asList( @@ -744,7 +760,7 @@ public void testCloseWhenRuntimeCacheCreationFails() throws Exception { when(storeProvider.rank(anySet(), anyCollection())).thenReturn(1); doThrow(new Error("Test EhcacheManager close.")).when(storeProvider).createStore(any(Store.Configuration.class), Matchers.anyVararg()); - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); DefaultConfiguration config = new DefaultConfiguration(caches, null); final CacheManager cacheManager = new EhcacheManager(config, Arrays.asList( storeProvider, @@ -778,7 +794,7 @@ public void testCloseWhenCacheCreationFailsDuringInitialization() throws Excepti doThrow(new Error("Test EhcacheManager close.")).when(storeProvider).createStore(any(Store.Configuration.class), Matchers.anyVararg()); CacheConfiguration cacheConfiguration = new BaseCacheConfiguration(Long.class, String.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools()); - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put("cache1", cacheConfiguration); DefaultConfiguration config = new DefaultConfiguration(caches, null); final CacheManager cacheManager = new EhcacheManager(config, Arrays.asList( @@ -813,6 +829,47 @@ public void run() { } + @Test + public void testDestroyCacheFailsIfAlreadyInMaintenanceMode() throws CachePersistenceException, InterruptedException { + Map> caches = newCacheMap(); + DefaultConfiguration config = new DefaultConfiguration(caches, null); + final EhcacheManager manager = new EhcacheManager(config, minimunCacheManagerServices()); + + Thread thread = new Thread(new Runnable() { + @Override + public void run() { + manager.getStatusTransitioner().maintenance().succeeded(); + } + }); + thread.start(); + thread.join(1000); + + expectedException.expect(IllegalStateException.class); + expectedException.expectMessage("State is MAINTENANCE, yet you don't own it!"); + + manager.destroyCache("test"); + } + + @Test + public void testDestroyCacheFailsAndStopIfStartingServicesFails() throws CachePersistenceException, InterruptedException { + Map> caches = newCacheMap(); + DefaultConfiguration config = new DefaultConfiguration(caches, null); + List services = minimunCacheManagerServices(); + MaintainableService service = mock(MaintainableService.class); + doThrow(new RuntimeException("failed")).when(service) + .startForMaintenance(Mockito.>anyObject(), eq(MaintainableService.MaintenanceScope.CACHE)); + services.add(service); + + EhcacheManager manager = new EhcacheManager(config, services); + + expectedException.expect(StateTransitionException.class); + expectedException.expectMessage("failed"); + + manager.destroyCache("test"); + + assertThat(manager.getStatus(), equalTo(Status.UNINITIALIZED)); + } + private Collection getServices(Store.Provider storeProvider, CacheEventDispatcherFactory cenlProvider) { return getServices(mock(CacheLoaderWriterProvider.class), mock(WriteBehindProvider.class), storeProvider != null ? storeProvider : mock(Store.Provider.class), diff --git a/core/src/test/java/org/ehcache/core/EhcacheTest.java b/core/src/test/java/org/ehcache/core/EhcacheTest.java index ef4d995f57..7f59733cd2 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheTest.java +++ b/core/src/test/java/org/ehcache/core/EhcacheTest.java @@ -31,9 +31,10 @@ public class EhcacheTest extends CacheTest { @Override - protected InternalCache getCache(Store store) { + protected InternalCache getCache(Store store) { final CacheConfiguration config = new BaseCacheConfiguration(Object.class, Object.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools()); + @SuppressWarnings("unchecked") CacheEventDispatcher cacheEventDispatcher = mock(CacheEventDispatcher.class); return new Ehcache(config, store, cacheEventDispatcher, LoggerFactory.getLogger(Ehcache.class + "-" + "EhcacheTest")); } diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicGetTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicGetTest.java index 12f7cca041..b2a7bfd3d4 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicGetTest.java +++ b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicGetTest.java @@ -81,7 +81,7 @@ public void testGetNoStoreEntry() throws Exception { assertThat(ehcache.get("key"), is(nullValue())); verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); verifyZeroInteractions(this.spiedResilienceStrategy); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS)); validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); } @@ -104,7 +104,7 @@ public void testGetNoStoreEntryNoCacheLoaderWriterEntry() throws Exception { verify(this.cacheLoaderWriter).load(eq("key")); verifyZeroInteractions(this.spiedResilienceStrategy); assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.CacheLoadingOutcome.SUCCESS)); } @@ -128,7 +128,7 @@ public void testGetNoStoreEntryHasCacheLoaderWriterEntry() throws Exception { verify(this.cacheLoaderWriter).load(eq("key")); verifyZeroInteractions(this.spiedResilienceStrategy); assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.CacheLoadingOutcome.SUCCESS)); } @@ -257,7 +257,7 @@ public void testGetHasStoreEntry() throws Exception { verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); verifyZeroInteractions(this.spiedResilienceStrategy); assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT)); validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); } @@ -281,7 +281,7 @@ public void testGetHasStoreEntryNoCacheLoaderWriterEntry() throws Exception { verify(this.cacheLoaderWriter, never()).load(eq("key")); verifyZeroInteractions(this.spiedResilienceStrategy); assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT)); validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); } @@ -306,7 +306,7 @@ public void testGetHasStoreEntryHasCacheLoaderWriterEntry() throws Exception { verify(this.cacheLoaderWriter, never()).load(eq("key")); verifyZeroInteractions(this.spiedResilienceStrategy); assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT)); validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); } @@ -331,7 +331,7 @@ public void testGetHasStoreEntryCacheLoadingException() throws Exception { verify(this.cacheLoaderWriter, never()).load(eq("key")); verifyZeroInteractions(this.spiedResilienceStrategy); assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT)); validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); } diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicPutAllTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicPutAllTest.java index 4d3efb77b9..c45d3321a2 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicPutAllTest.java +++ b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicPutAllTest.java @@ -1871,6 +1871,7 @@ public void testPutAllPartialIntersectionsImmediatelyExpiredCreatedEntries() thr final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); this.cacheLoaderWriter = spy(fakeLoaderWriter); + @SuppressWarnings("unchecked") final Expiry expiry = mock(Expiry.class); when(expiry.getExpiryForCreation(any(String.class), any(String.class))).thenReturn(Duration.ZERO); @@ -1901,6 +1902,7 @@ public void testPutAllPartialIntersectionsImmediatelyExpiredUpdatedEntries() thr final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); this.cacheLoaderWriter = spy(fakeLoaderWriter); + @SuppressWarnings("unchecked") final Expiry expiry = mock(Expiry.class); when(expiry.getExpiryForUpdate(any(String.class), argThat(org.ehcache.core.util.Matchers.holding(instanceOf(String.class))), any(String.class))).thenReturn(Duration.ZERO); diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicReplaceTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicReplaceTest.java index 157db437af..4d9b220e0d 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicReplaceTest.java +++ b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicReplaceTest.java @@ -520,6 +520,7 @@ public void testReplaceWithImmediatelyExpiredEntry() throws Exception { final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "old-value")); + @SuppressWarnings("unchecked") final Expiry expiry = mock(Expiry.class); when(expiry.getExpiryForUpdate(eq("key"), argThat(holding("old-value")), eq("value"))).thenReturn(Duration.ZERO); diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicReplaceValueTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicReplaceValueTest.java index 29b8cba0f9..b6f4c4bf26 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicReplaceValueTest.java +++ b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicReplaceValueTest.java @@ -895,6 +895,7 @@ public void testReplaceWithImmediatelyExpiredEntry() throws Exception { final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "old-value")); + @SuppressWarnings("unchecked") final Expiry expiry = mock(Expiry.class); when(expiry.getExpiryForUpdate(eq("key"), argThat(holding("old-value")), eq("value"))).thenReturn(Duration.ZERO); diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterTest.java index 10961bb601..74da460183 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterTest.java +++ b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterTest.java @@ -52,18 +52,21 @@ public class EhcacheWithLoaderWriterTest extends CacheTest { @Override - protected InternalCache getCache(Store store) { + protected InternalCache getCache(Store store) { final CacheConfiguration config = new BaseCacheConfiguration(Object.class, Object.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools()); + @SuppressWarnings("unchecked") CacheEventDispatcher cacheEventDispatcher = mock(CacheEventDispatcher.class); + @SuppressWarnings("unchecked") CacheLoaderWriter cacheLoaderWriter = mock(CacheLoaderWriter.class); - return new EhcacheWithLoaderWriter(config, store, cacheLoaderWriter, cacheEventDispatcher, LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "-" + "EhcacheWithLoaderWriterTest")); + return new EhcacheWithLoaderWriter(config, store, cacheLoaderWriter, cacheEventDispatcher, LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "-" + "EhcacheWithLoaderWriterTest")); } @Test public void testIgnoresKeysReturnedFromCacheLoaderLoadAll() { LoadAllVerifyStore store = new LoadAllVerifyStore(); KeyFumblingCacheLoaderWriter loader = new KeyFumblingCacheLoaderWriter(); + @SuppressWarnings("unchecked") CacheEventDispatcher cacheEventDispatcher = mock(CacheEventDispatcher.class); CacheConfiguration config = new BaseCacheConfiguration(String.class, String.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools()); diff --git a/core/src/test/java/org/ehcache/core/config/ResourcePoolsImplTest.java b/core/src/test/java/org/ehcache/core/config/ResourcePoolsImplTest.java index dc31d3fcf2..0f02d65022 100644 --- a/core/src/test/java/org/ehcache/core/config/ResourcePoolsImplTest.java +++ b/core/src/test/java/org/ehcache/core/config/ResourcePoolsImplTest.java @@ -25,10 +25,11 @@ import org.hamcrest.Matchers; import org.junit.Test; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.List; -import static java.util.Arrays.asList; import static org.ehcache.config.ResourceType.Core.HEAP; import static org.ehcache.config.ResourceType.Core.OFFHEAP; import static org.ehcache.config.units.EntryUnit.ENTRIES; @@ -45,6 +46,39 @@ */ public class ResourcePoolsImplTest { + private static class ArbitraryType implements ResourceType { + private final int tierHeight; + + public ArbitraryType(int tierHeight) { + this.tierHeight = tierHeight; + } + + @Override + public Class getResourcePoolClass() { + return SizedResourcePool.class; + } + + @Override + public boolean isPersistable() { + return false; + } + + @Override + public boolean requiresSerialization() { + return false; + } + + @Override + public int getTierHeight() { + return tierHeight; + } + + @Override + public String toString() { + return "arbitrary"; + } + } + @Test public void testMismatchedUnits() { Collection> pools = asList( @@ -69,6 +103,61 @@ public void testMatchingUnequalUnitsWellTiered() { validateResourcePools(pools); } + @Test + public void testArbitraryPoolWellTieredHeap() { + Collection> pools = asList( + new SizedResourcePoolImpl(HEAP, 9, MB, false), + new SizedResourcePoolImpl(new ArbitraryType(HEAP.getTierHeight() - 1), 10, MB, false)); + validateResourcePools(pools); + } + + @Test + public void testArbitraryPoolWellTieredOffHeap() { + Collection> pools = asList( + new SizedResourcePoolImpl(new ArbitraryType(OFFHEAP.getTierHeight() + 1), 9, MB, false), + new SizedResourcePoolImpl(OFFHEAP, 10, MB, false)); + validateResourcePools(pools); + } + + @Test + public void testArbitraryPoolInversionHeap() { + Collection> pools = asList( + new SizedResourcePoolImpl(HEAP, 10, MB, false), + new SizedResourcePoolImpl(new ArbitraryType(HEAP.getTierHeight() - 1), 10, MB, false)); + try { + validateResourcePools(pools); + fail("Expected IllegalArgumentException"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), is("Tiering Inversion: 'Pool {10 MB heap}' is not smaller than 'Pool {10 MB arbitrary}'")); + } + } + + @Test + public void testArbitraryPoolInversionOffHeap() { + Collection> pools = asList( + new SizedResourcePoolImpl(new ArbitraryType(OFFHEAP.getTierHeight() + 1), 10, MB, false), + new SizedResourcePoolImpl(OFFHEAP, 10, MB, false)); + try { + validateResourcePools(pools); + fail("Expected IllegalArgumentException"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), is("Tiering Inversion: 'Pool {10 MB arbitrary}' is not smaller than 'Pool {10 MB offheap}'")); + } + } + + @Test + public void testArbitraryPoolAmbiguity() { + Collection> pools = asList( + new SizedResourcePoolImpl(new ArbitraryType(OFFHEAP.getTierHeight()), 10, MB, false), + new SizedResourcePoolImpl(OFFHEAP, 10, MB, false)); + try { + validateResourcePools(pools); + fail("Expected IllegalArgumentException"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), is("Tiering Ambiguity: 'Pool {10 MB arbitrary}' has the same tier height as 'Pool {10 MB offheap}'")); + } + } + @Test public void testEntryResourceMatch() { Collection> pools = asList( @@ -210,4 +299,10 @@ public void testUpdateResourceUnitFailure() { assertThat(existing.getPoolForResource(ResourceType.Core.HEAP).getUnit(), Matchers.is(MemoryUnit.MB)); } + private Collection asList(T value1, T value2) { + @SuppressWarnings("unchecked") + List list = Arrays.asList(value1, value2); + return list; + } + } diff --git a/core/src/test/java/org/ehcache/core/internal/service/ServiceLocatorPluralTest.java b/core/src/test/java/org/ehcache/core/internal/service/ServiceLocatorPluralTest.java index 408297a008..b1541e1534 100644 --- a/core/src/test/java/org/ehcache/core/internal/service/ServiceLocatorPluralTest.java +++ b/core/src/test/java/org/ehcache/core/internal/service/ServiceLocatorPluralTest.java @@ -27,11 +27,12 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.everyItem; import static org.hamcrest.Matchers.isOneOf; +import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; @@ -46,27 +47,27 @@ public class ServiceLocatorPluralTest { */ @Test public void testMultipleInstanceRegistration() throws Exception { - final ServiceLocator serviceLocator = new ServiceLocator(); + final ServiceLocator.DependencySet serviceLocator = dependencySet(); final ConcreteService firstSingleton = new ConcreteService(); final ConcreteService secondSingleton = new ConcreteService(); - serviceLocator.addService(firstSingleton); + serviceLocator.with(firstSingleton); - assertThat(serviceLocator.getServicesOfType(ConcreteService.class), contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(AdditionalService.class), Matchers.contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(AggregateService.class), Matchers.contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(FooService.class), Matchers.contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(BarService.class), Matchers.contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(FoundationService.class), Matchers.contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(AugmentedService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(ConcreteService.class), contains(firstSingleton)); + assertThat(serviceLocator.providersOf(AdditionalService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(AggregateService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(FooService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(BarService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(FoundationService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(AugmentedService.class), Matchers.contains(firstSingleton)); try { - serviceLocator.addService(secondSingleton); + serviceLocator.with(secondSingleton); fail(); } catch (IllegalStateException e) { // expected - assertThat(e.getMessage(), containsString("duplicate service class " + ConcreteService.class.getName())); + assertThat(e.getMessage(), containsString(ConcreteService.class.getName())); } } @@ -76,30 +77,26 @@ public void testMultipleInstanceRegistration() throws Exception { */ @Test public void testMultipleImplementationRegistration() throws Exception { - final ServiceLocator serviceLocator = new ServiceLocator(); + final ServiceLocator.DependencySet serviceLocator = dependencySet(); final ConcreteService firstSingleton = new ConcreteService(); final ExtraConcreteService secondSingleton = new ExtraConcreteService(); - serviceLocator.addService(firstSingleton); + serviceLocator.with(firstSingleton); - assertThat(serviceLocator.getServicesOfType(ConcreteService.class), contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(AdditionalService.class), Matchers.contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(AggregateService.class), Matchers.contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(FooService.class), Matchers.contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(BarService.class), Matchers.contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(FoundationService.class), Matchers.contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(AugmentedService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(ConcreteService.class), contains(firstSingleton)); + assertThat(serviceLocator.providersOf(AdditionalService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(AggregateService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(FooService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(BarService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(FoundationService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(AugmentedService.class), Matchers.contains(firstSingleton)); try { - serviceLocator.addService(secondSingleton); + serviceLocator.with(secondSingleton); fail(); } catch (IllegalStateException e) { // expected - - // This assertion is here to point out a potentially unwanted side-effect -- a partial registration - assertThat(serviceLocator.getServicesOfType(ExtraConcreteService.class), contains(secondSingleton)); - final String message = e.getMessage(); assertThat(message, containsString(AdditionalService.class.getName())); assertThat(message, containsString(AggregateService.class.getName())); @@ -116,73 +113,29 @@ public void testMultipleImplementationRegistration() throws Exception { */ @Test public void testPluralRegistration() throws Exception { - final ServiceLocator serviceLocator = new ServiceLocator(); + final ServiceLocator.DependencySet dependencySet = dependencySet(); final AlphaServiceProviderImpl alphaServiceProvider = new AlphaServiceProviderImpl(); final BetaServiceProviderImpl betaServiceProvider = new BetaServiceProviderImpl(); - serviceLocator.addService(alphaServiceProvider); + dependencySet.with(alphaServiceProvider); - assertThat(serviceLocator.getServicesOfType(AlphaServiceProviderImpl.class), + assertThat(dependencySet.providersOf(AlphaServiceProviderImpl.class), everyItem(isOneOf(alphaServiceProvider))); - assertThat(serviceLocator.getServicesOfType(AlphaServiceProvider.class), + assertThat(dependencySet.providersOf(AlphaServiceProvider.class), everyItem(Matchers.isOneOf(alphaServiceProvider))); - assertThat(serviceLocator.getServicesOfType(PluralServiceProvider.class), + assertThat(dependencySet.providersOf(PluralServiceProvider.class), everyItem(Matchers.isOneOf(alphaServiceProvider))); - serviceLocator.addService(betaServiceProvider); + dependencySet.with(betaServiceProvider); - assertThat(serviceLocator.getServicesOfType(BetaServiceProviderImpl.class), + assertThat(dependencySet.providersOf(BetaServiceProviderImpl.class), everyItem(isOneOf(betaServiceProvider))); - assertThat(serviceLocator.getServicesOfType(BetaServiceProvider.class), + assertThat(dependencySet.providersOf(BetaServiceProvider.class), everyItem(Matchers.isOneOf(betaServiceProvider))); - assertThat(serviceLocator.getServicesOfType(PluralServiceProvider.class), + assertThat(dependencySet.providersOf(PluralServiceProvider.class), everyItem(Matchers.isOneOf(alphaServiceProvider, betaServiceProvider))); } - - /** - * Ensures dependencies declared in {@link ServiceDependencies} on a {@code Service} subtype - * can be discovered. - */ - @Test - public void testDependencyDiscoveryOverService() throws Exception { - final ServiceLocator serviceLocator = new ServiceLocator(); - - final Collection> concreteServiceDependencies = - serviceLocator.identifyTransitiveDependenciesOf(ConcreteService.class); - assertThat(concreteServiceDependencies, - everyItem(Matchers.>isOneOf( - BetaService.class, - BetaServiceProvider.class, - InitialService.class, - FooService.Provider.class, - BarService.Provider.class, - AlphaService.class, - AlphaServiceProvider.class, - FoundationService.Provider.class - ))); - } - - /** - * Ensures dependencies declared in {@link ServiceDependencies} on a non-{@code Service} type - * can be discovered. - */ - @Test - public void testDependencyDiscoveryOverNonService() throws Exception { - final ServiceLocator serviceLocator = new ServiceLocator(); - - final Collection> nonServiceDependencies = - serviceLocator.identifyTransitiveDependenciesOf(NotAService.class); - System.out.printf("NotAService dependencies : %s%n", nonServiceDependencies); - assertThat(nonServiceDependencies, - everyItem(Matchers.>isOneOf( - BetaService.class, - BetaServiceProvider.class, - AlphaService.class, - AlphaServiceProvider.class - ))); - } - } class StartStopCounter { diff --git a/core/src/test/java/org/ehcache/core/internal/service/ServiceLocatorTest.java b/core/src/test/java/org/ehcache/core/internal/service/ServiceLocatorTest.java index 1ab7da22c4..b6247263c5 100644 --- a/core/src/test/java/org/ehcache/core/internal/service/ServiceLocatorTest.java +++ b/core/src/test/java/org/ehcache/core/internal/service/ServiceLocatorTest.java @@ -37,8 +37,10 @@ import org.ehcache.core.spi.services.TestProvidedService; import org.ehcache.core.spi.services.TestService; import org.hamcrest.CoreMatchers; +import org.junit.Ignore; import org.junit.Test; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; @@ -60,14 +62,14 @@ public class ServiceLocatorTest { @Test public void testClassHierarchies() { - ServiceLocator provider = new ServiceLocator(); + ServiceLocator.DependencySet dependencySet = dependencySet(); final Service service = new ChildTestService(); - provider.addService(service); - assertThat(provider.getService(FooProvider.class), sameInstance(service)); + dependencySet.with(service); + assertThat(dependencySet.providerOf(FooProvider.class), sameInstance(service)); final Service fancyCacheProvider = new FancyCacheProvider(); - provider.addService(fancyCacheProvider); + dependencySet.with(fancyCacheProvider); - final Collection servicesOfType = provider.getServicesOfType(CacheProvider.class); + final Collection servicesOfType = dependencySet.providersOf(CacheProvider.class); assertThat(servicesOfType, is(not(empty()))); assertThat(servicesOfType.iterator().next(), sameInstance(fancyCacheProvider)); } @@ -81,8 +83,7 @@ public Enumeration getResources(String name) throws IOException { } }); - ServiceLocator serviceLocator = new ServiceLocator(); - serviceLocator.getService(TestService.class); + dependencySet().with(TestService.class).build().getService(TestService.class); } @Test @@ -90,7 +91,7 @@ public void testAttemptsToStopStartedServicesOnInitFailure() { Service s1 = new ParentTestService(); FancyCacheProvider s2 = new FancyCacheProvider(); - ServiceLocator locator = new ServiceLocator(s1, s2); + ServiceLocator locator = dependencySet().with(s1).with(s2).build(); try { locator.startAllServices(); fail(); @@ -108,7 +109,7 @@ public void testAttemptsToStopAllServicesOnCloseFailure() { Service s2 = mock(FooProvider.class); Service s3 = mock(CacheLoaderWriterProvider.class); - ServiceLocator locator = new ServiceLocator(s1, s2, s3); + ServiceLocator locator = dependencySet().with(s1).with(s2).with(s3).build(); try { locator.startAllServices(); } catch (Exception e) { @@ -132,7 +133,7 @@ public void testAttemptsToStopAllServicesOnCloseFailure() { public void testStopAllServicesOnlyStopsEachServiceOnce() throws Exception { Service s1 = mock(CacheProvider.class, withSettings().extraInterfaces(CacheLoaderWriterProvider.class)); - ServiceLocator locator = new ServiceLocator(s1); + ServiceLocator locator = dependencySet().with(s1).build(); try { locator.startAllServices(); } catch (Exception e) { @@ -145,7 +146,7 @@ public void testStopAllServicesOnlyStopsEachServiceOnce() throws Exception { @Test public void testCanOverrideDefaultServiceFromServiceLoader() { - ServiceLocator locator = new ServiceLocator(new ExtendedTestService()); + ServiceLocator locator = dependencySet().with(new ExtendedTestService()).build(); TestService testService = locator.getService(TestService.class); assertThat(testService, instanceOf(ExtendedTestService.class)); } @@ -153,8 +154,8 @@ public void testCanOverrideDefaultServiceFromServiceLoader() { @Test public void testCanOverrideServiceDependencyWithoutOrderingProblem() throws Exception { final AtomicBoolean started = new AtomicBoolean(false); - ServiceLocator serviceLocator = new ServiceLocator(new TestServiceConsumerService()); - serviceLocator.addService(new TestService() { + ServiceLocator serviceLocator = dependencySet().with(new TestServiceConsumerService()) + .with(new TestService() { @Override public void start(ServiceProvider serviceProvider) { started.set(true); @@ -164,7 +165,7 @@ public void start(ServiceProvider serviceProvider) { public void stop() { // no-op } - }); + }).build(); serviceLocator.startAllServices(); assertThat(started.get(), is(true)); } @@ -200,12 +201,12 @@ public void stop() { Consumer1 consumer1 = spy(new Consumer1()); Consumer2 consumer2 = new Consumer2(); - ServiceLocator serviceLocator = new ServiceLocator(); + ServiceLocator.DependencySet dependencySet = dependencySet(); // add some services - serviceLocator.addService(consumer1); - serviceLocator.addService(consumer2); - serviceLocator.addService(new TestService() { + dependencySet.with(consumer1); + dependencySet.with(consumer2); + dependencySet.with(new TestService() { @Override public void start(ServiceProvider serviceProvider) { } @@ -217,7 +218,8 @@ public void stop() { }); // simulate what is done in ehcachemanager - serviceLocator.loadDependenciesOf(TestServiceConsumerService.class); + dependencySet.with(TestService.class); + ServiceLocator serviceLocator = dependencySet.build(); serviceLocator.startAllServices(); serviceLocator.stopAllServices(); @@ -232,12 +234,13 @@ public void stop() { @Test public void testRedefineDefaultServiceWhileDependingOnIt() throws Exception { - ServiceLocator serviceLocator = new ServiceLocator(new YetAnotherCacheProvider()); + ServiceLocator serviceLocator = dependencySet().with(new YetAnotherCacheProvider()).build(); serviceLocator.startAllServices(); } @Test + @Ignore public void testCircularDeps() throws Exception { final class StartStopCounter { @@ -311,7 +314,7 @@ public void stop() { } } - ServiceLocator serviceLocator = new ServiceLocator(); + ServiceLocator.DependencySet dependencySet = dependencySet(); Consumer1 consumer1 = new Consumer1(); Consumer2 consumer2 = new Consumer2(); @@ -319,11 +322,12 @@ public void stop() { DependsOnMe dependsOnMe = new DependsOnMe(); // add some services - serviceLocator.addService(consumer1); - serviceLocator.addService(consumer2); - serviceLocator.addService(myTestProvidedService); - serviceLocator.addService(dependsOnMe); + dependencySet.with(consumer1); + dependencySet.with(consumer2); + dependencySet.with(myTestProvidedService); + dependencySet.with(dependsOnMe); + ServiceLocator serviceLocator = dependencySet.build(); // simulate what is done in ehcachemanager serviceLocator.startAllServices(); diff --git a/core/src/test/java/org/ehcache/core/internal/store/StoreSupportTest.java b/core/src/test/java/org/ehcache/core/internal/store/StoreSupportTest.java index 2ab77db6a3..ae223eac4e 100644 --- a/core/src/test/java/org/ehcache/core/internal/store/StoreSupportTest.java +++ b/core/src/test/java/org/ehcache/core/internal/store/StoreSupportTest.java @@ -31,6 +31,8 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; +import static java.util.Arrays.asList; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; @@ -69,14 +71,13 @@ public String toString() { public void testSelectStoreProvider() throws Exception { final TestBaseProvider expectedProvider = new PrimaryProvider1(); - final TestBaseProvider[] storeProviders = { + Collection storeProviders = asList( new SecondaryProvider1(), new ZeroProvider(), expectedProvider - }; - - final ServiceLocator serviceLocator = new ServiceLocator(storeProviders); + ); + final ServiceLocator serviceLocator = dependencySet().with(storeProviders).build(); final Store.Provider selectedProvider = StoreSupport.selectStoreProvider(serviceLocator, Collections.>singleton(anyResourceType), Collections.>emptyList()); @@ -92,15 +93,15 @@ public void testSelectStoreProvider() throws Exception { public void testSelectStoreProviderMultiple() throws Exception { final TestBaseProvider expectedProvider = new PrimaryProvider1(); - final TestBaseProvider[] storeProviders = { + final Collection storeProviders = asList( new SecondaryProvider1(), new ZeroProvider(), expectedProvider, new SecondaryProvider2(), new PrimaryProvider2() - }; + ); - final ServiceLocator serviceLocator = new ServiceLocator(storeProviders); + final ServiceLocator serviceLocator = dependencySet().with(storeProviders).build(); try { StoreSupport.selectStoreProvider(serviceLocator, @@ -119,10 +120,8 @@ public void testSelectStoreProviderMultiple() throws Exception { @Test public void testSelectStoreProviderNoProviders() throws Exception { - - final ServiceLocator serviceLocator = new ServiceLocator(); try { - StoreSupport.selectStoreProvider(serviceLocator, + StoreSupport.selectStoreProvider(dependencySet().build(), Collections.>singleton(anyResourceType), Collections.>emptyList()); fail(); @@ -154,13 +153,13 @@ public int getTierHeight() { } }; - final TestBaseProvider[] storeProviders = { + final Collection storeProviders = asList( new SecondaryProvider1(), new ZeroProvider(), new PrimaryProvider1() - }; + ); - final ServiceLocator serviceLocator = new ServiceLocator(storeProviders); + final ServiceLocator serviceLocator = dependencySet().with(storeProviders).build(); try { StoreSupport.selectStoreProvider(serviceLocator, Collections.>singleton(otherResourceType), @@ -252,4 +251,4 @@ public void stop() { throw new UnsupportedOperationException("TestBaseProvider.stop not implemented"); } } -} \ No newline at end of file +} diff --git a/demos/00-NoCache/src/main/resources/logback.xml b/demos/00-NoCache/src/main/resources/logback.xml new file mode 100644 index 0000000000..4ea574f5f5 --- /dev/null +++ b/demos/00-NoCache/src/main/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + diff --git a/demos/00-NoCache/src/main/webapp/WEB-INF/web.xml b/demos/00-NoCache/src/main/webapp/WEB-INF/web.xml index 627a8c4f83..f9df833ddb 100755 --- a/demos/00-NoCache/src/main/webapp/WEB-INF/web.xml +++ b/demos/00-NoCache/src/main/webapp/WEB-INF/web.xml @@ -14,9 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. --> - - + org.ehcache.demos.peeper.PeeperServletContextListener @@ -31,5 +32,5 @@ PeeperServlet /* - - \ No newline at end of file + + diff --git a/demos/01-CacheAside/src/main/resources/logback.xml b/demos/01-CacheAside/src/main/resources/logback.xml new file mode 100644 index 0000000000..4ea574f5f5 --- /dev/null +++ b/demos/01-CacheAside/src/main/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + diff --git a/demos/01-CacheAside/src/main/webapp/WEB-INF/web.xml b/demos/01-CacheAside/src/main/webapp/WEB-INF/web.xml index 207674fa34..2993f41a64 100755 --- a/demos/01-CacheAside/src/main/webapp/WEB-INF/web.xml +++ b/demos/01-CacheAside/src/main/webapp/WEB-INF/web.xml @@ -14,9 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. --> - - + org.ehcache.demos.peeper.PeeperServletContextListener @@ -32,4 +33,4 @@ /* - \ No newline at end of file + diff --git a/demos/build.gradle b/demos/build.gradle index 44a858b5e3..b85f66cd92 100644 --- a/demos/build.gradle +++ b/demos/build.gradle @@ -1,8 +1,26 @@ +plugins { + id 'org.akhikhl.gretty' version '1.4.0' +} + subprojects { + configurations.all { + resolutionStrategy { + // It seems jetty has some internal conflict and so those need to be forced + force 'org.ow2.asm:asm:5.0.3', 'org.ow2.asm:asm-commons:5.0.3', 'org.glassfish:javax.el:3.0.1-b08' + } + } + apply plugin: 'war' - apply plugin: 'jetty' + apply plugin: 'org.akhikhl.gretty' + + gretty { + port = 8080 + contextPath = '/' + servletContainer = 'jetty9' + } dependencies { - compile 'ch.qos.logback:logback-classic:1.0.13', 'javax.servlet:servlet-api:2.5', 'com.h2database:h2:1.4.186', project(':impl') + compile 'javax.servlet:servlet-api:2.5', project(':impl') + runtime 'ch.qos.logback:logback-classic:1.1.3', 'com.h2database:h2:1.4.192' } -} \ No newline at end of file +} diff --git a/dist/build.gradle b/dist/build.gradle index f762212b14..cca9c9d950 100644 --- a/dist/build.gradle +++ b/dist/build.gradle @@ -28,5 +28,5 @@ dependencies { apply plugin: EhDistribute dependencies { - shadow "org.slf4j:slf4j-api:$parent.slf4jVersion" -} \ No newline at end of file + shadowCompile "org.slf4j:slf4j-api:$parent.slf4jVersion" +} diff --git a/dist/gradle.properties b/dist/gradle.properties index 2b9a7a0d0d..944561ba5a 100644 --- a/dist/gradle.properties +++ b/dist/gradle.properties @@ -14,6 +14,6 @@ # limitations under the License. # -subPomName = Ehcache 3 Jar Distribution +subPomName = Ehcache subPomDesc = End-user ehcache3 jar artifact javadocExclude = **/core/**, **/impl/**, **/xml/**, **/jsr107/**, **/transactions/**, **/management/**, **/tck/** diff --git a/docs/build.gradle b/docs/build.gradle index 042b896d65..a980e66ef1 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -26,6 +26,10 @@ buildscript { apply plugin: 'org.asciidoctor.convert' +configurations.asciidoctor.dependencies.matching({it.group == 'org.asciidoctor' && it.name == 'asciidoctorj-groovy-dsl'}).all { + exclude group:'org.asciidoctor', module:'asciidoctorj' +} + task copyCSS(type: Copy) { from ('css') { include '**' diff --git a/docs/src/docs/asciidoc/user/107.adoc b/docs/src/docs/asciidoc/user/107.adoc index 944562009c..08ce0e657d 100644 --- a/docs/src/docs/asciidoc/user/107.adoc +++ b/docs/src/docs/asciidoc/user/107.adoc @@ -1,9 +1,9 @@ --- --- = The Ehcache 3.x JSR-107 Provider -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -11,15 +11,15 @@ endif::notBuildingForSite[] == JCache overview -JCache (aka JSR-107) specification defines the standard caching API for Java. +The JCache (aka JSR-107) specification defines the standard caching API for Java. The specification was developed under the Java Community Process v2.9 by an expert group including members from the Ehcache developer community. JCache provides a very simple API set that is easy to use and vendor neutral. Being one of the pioneers in the Java caching domain, Ehcache had to offer an implementation that is fully compliant with the JCache specification. -For years the biggest problems that application developers have faced while wanting to try cache implementations by different vendors is the stark contrast in the APIs offered by these vendors. -Developers were forced to rewrite a whole lot of their caching related code in an application just to try out a new caching solution. -This lead to developers sticking with what they had as the bar to investigating other products was too high. +For years, the biggest problem that application developers have faced while wanting to try cache implementations by different vendors is the stark contrast in the APIs offered by these vendors. +Developers were forced to rewrite a lot of their caching related code in an application just to try out a new caching solution. +This leads to developers sticking with what they had, as the bar to investigating other products was too high. The availability of the JCache specification gives real added value for developers as there is now a standard caching API they can use. So it is easier for an application developer to switch between products by different vendors and choose the one that suits them best without changing a single line of their application code interacting with caches. @@ -37,11 +37,11 @@ In addition to the `Cache` interface, JCache specification has defined two more Applications need to use a `CacheManager` to create/retrieve a `Cache`. Similarly a `CachingProvider` is required to get/access a `CacheManager`. -Here is a sample code that demonstrates the usage of basic JCache configuration APIs: +Here is some sample code that demonstrates the usage of the basic JCache configuration APIs: [source,java] ---- -include::{sourcedir31}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=basicConfigurationExample] +include::{sourcedir32}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=basicConfigurationExample] ---- <1> Retrieves the default `CachingProvider` implementation from the application's classpath. This method will work if and only if there is only one JCache implementation jar in the classpath. @@ -50,7 +50,7 @@ include::{sourcedir31}/107/src/test/java/org/ehcache/docs/EhCache107Configuratio <2> Retrieve the default `CacheManager` instance using the provider. <3> Create a cache configuration using `MutableConfiguration`... <4> with key type and value type as `Long` and `String` respectively... -<5> configured to store the cache entries by reference(not by value)... +<5> configured to store the cache entries by reference (not by value)... <6> and with an expiry time of one minute defined for entries from the moment they are created. <7> Using the cache manager, create a cache named `jCache` with the configuration created in step <3> <8> Put some data into the cache. @@ -58,39 +58,55 @@ include::{sourcedir31}/107/src/test/java/org/ehcache/docs/EhCache107Configuratio == JSR-107 and Ehcache configuration integration -As mentioned already the JCache specification offers a minimal set of configuration that is ideal for an in-memory cache. -But Ehcache native APIs support topologies that are much more complex and provides more features. -At times application developers might want to configure caches that are much complex (in terms of topology or features) -than the ones that JCache `MutableConfiguration` permits and still be able to use the JCache's caching APIs. -Ehcache provides several ways to achieve that and this section covers the same. +As mentioned already, the JCache specification offers a minimal set of configurations that is ideal for an in-memory cache. +But Ehcache native APIs support topologies that are much more complex and provide more features. +At times, application developers might want to configure caches that are much complex (in terms of topology or features) +than the ones that JCache `MutableConfiguration` permits and still be able to use JCache's caching APIs. +Ehcache provides several ways to achieve this, as described in the following section. === Starting from JSR-107 created caches -When you create a `Cache` on a `CacheManager` using a `MutableConfiguration` - that is you only use JSR-107 types - -you can still get to the underlying Ehcache `RuntimeCacheConfiguration`: +When you create a `Cache` on a `CacheManager` using a `MutableConfiguration` - in other words, using only JSR-107 types - +you can still get to the underlying Ehcache `CacheRuntimeConfiguration`: [source,java,indent=0] ---- -include::{sourcedir31}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=mutableConfigurationExample] +include::{sourcedir32}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=mutableConfigurationExample] ---- <1> Create a JSR-107 cache using the `MutableConfiguration` from the specification <2> Get to the JSR-107 `CompleteConfiguration` <3> Get to the Ehcache JSR-107 configuration bridge -<4> Unwrap to the Ehcache `RuntimeCacheConfiguration` type +<4> Unwrap to the Ehcache `CacheRuntimeConfiguration` type === Building the configuration using Ehcache APIs +==== CacheManager level configuration + +If you need to configure features at the `CacheManager` level, like persistence directory, you will have to use provider specific APIs. + +The way you do this is as follows: + +[source,java,indent=0] +---- +include::{sourcedir32}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=ehcacheCacheManagerConfigurationExample] +---- +<1> Cast the `CachingProvider` into the Ehcache specific implementation `org.ehcache.jsr107.EhcacheCachingProvider`, +<2> Create a configuration using the specific Ehcache `DefaultConfiguration` and pass it some `CacheManager` level configurations, +<3> Create the `CacheManager` using the method that takes an Ehcache configuration in parameter. + +==== Cache level configuration + You can also create a JSR-107 `Cache` using an Ehcache `CacheConfiguration`. When using this mechanism, no JSR-107 `CompleteConfiguration` is used and so you cannot get to one. [source,java,indent=0] ---- -include::{sourcedir31}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=ehcacheBasedConfigurationExample] +include::{sourcedir32}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=ehcacheBasedConfigurationExample] ---- -<1> Create an Ehcache `CacheConfiguration` - through a builder as shown here or even through XML +<1> Create an Ehcache `CacheConfiguration` - through a builder as shown here or alternatively use an XML configuration (as described in the following section). <2> Use the configuration with JSR-107 API by wrapping it <3> Get back to the Ehcache `CacheConfiguration` ... -<4> or to the runtime configuration even. +<4> or even to the runtime configuration. <5> No JSR-107 `CompleteConfiguration` is available in this context === Getting JSR-107 caches configured through Ehcache XML @@ -102,12 +118,12 @@ Find below the XML configuration followed by the code to use it from JSR-107: [source,xml,indent=0] ---- -include::{sourcedir31}/107/src/test/resources/org/ehcache/docs/ehcache-jsr107-config.xml[] +include::{sourcedir32}/107/src/test/resources/org/ehcache/docs/ehcache-jsr107-config.xml[] ---- [source,java,indent=0] ---- -include::{sourcedir31}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=jsr107UsingXMLConfigExample] +include::{sourcedir32}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=jsr107UsingXMLConfigExample] ---- <1> Invoking `javax.cache.spi.CachingProvider.getCacheManager(java.net.URI, java.lang.ClassLoader)` <2> and passing in a URI that resolves to an Ehcache XLM configuration file. @@ -119,10 +135,10 @@ NOTE: You can also use the `CachingProvider.getCacheManager()` method that takes The `URI` and `ClassLoader` used to configure the `CacheManager` will then use the vendor specific values returned by `CachingProvider.getDefaultURI` and `.getDefaultClassLoader` respectively. -==== Control JSR-107 MBeans from XML +==== Controlling JSR-107 MBeans from XML When using Ehcache XML, you may want to enable management and / or statistics MBeans for JSR-107 caches. -This is giving you control over the following: +This gives you control over the following: * `javax.cache.configuration.CompleteConfiguration.isStatisticsEnabled` * `javax.cache.configuration.CompleteConfiguration.isManagementEnabled` @@ -131,7 +147,7 @@ You can do this at two different levels: [source,xml,indent=0] ---- -include::{sourcedir31}/107/src/test/resources/org/ehcache/docs/ehcache-107-mbeans-cache-manager-config.xml[lines=17..-1] +include::{sourcedir32}/107/src/test/resources/org/ehcache/docs/ehcache-107-mbeans-cache-manager-config.xml[lines=17..-1] ---- <1> Using the JSR-107 service extension, you can enable MBeans by default @@ -153,26 +169,26 @@ constraint. All that's needed is adding a `jsr107` service in your XML configura [source,xml,indent=0] ---- -include::{sourcedir31}/107/src/test/resources/org/ehcache/docs/ehcache-jsr107-template-override.xml[] +include::{sourcedir32}/107/src/test/resources/org/ehcache/docs/ehcache-jsr107-template-override.xml[] ---- <1> First, declare a namespace for the 107 extension, e.g. `jsr107` -<2> Within a `service` element at the top of you configuration, add a `jsr107:defaults` element +<2> Within a `service` element at the top of your configuration, add a `jsr107:defaults` element <3> The element takes an optional attribute `default-template`, which references the `cache-template` to use for all `javax.cache.Cache` created by the application at runtime using `javax.cache.CacheManager.createCache`. In - this example, the default `cache-template` used will be `tinyCache`, meaning that atop of their particular config, + this example, the default `cache-template` used will be `tinyCache`, meaning that in addition to their particular configuration, programmatically created `Cache` instances will have their capacity constrained to 20 entries. -<4> Nested within the `jsr107:defaults`, add specific `cache-templates` to use for given named `Cache`, e.g. when +<4> Nested within the `jsr107:defaults`, add specific `cache-templates` to use for the given named `Cache`. So, for example, when creating the `Cache` named `foos` at runtime, Ehcache will enhance its config, giving it a capacity of 2000 entries, as well as insuring both key and value types are `String`. -NOTE: See <> for complete definition +NOTE: See <> for a complete definition Using the above configuration, you can not only supplement but also override the configuration of JSR-107 created caches without modifying the application code. [source,java,indent=0] ---- -include::{sourcedir31}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=jsr107SupplementWithTemplatesExample] +include::{sourcedir32}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=jsr107SupplementWithTemplatesExample] ---- <1> Assume existing JSR-107 configuration code, which is store-by-value by default <2> that creates JSR-107 `Cache` @@ -180,25 +196,25 @@ include::{sourcedir31}/107/src/test/java/org/ehcache/docs/EhCache107Configuratio <4> you could verify that the template configured capacity is applied to the cache and returns _20_ here. <5> The cache template will override the JSR-107 cache's store-by-value config to store-by-ref since the `byRefTemplate` template that is used to create the cache is configured explicitly using `IdentityCopier`. -<6> Templates will also override JSR-107 config, see here a configuration with TTL 1 minute -<7> used to create a cache where the template says TTL 2 minutes. -<8> And we can indeed verify that the template provided configuration has been applied, duration will be _2 minutes_ and not _1_. +<6> Templates will also override the JSR-107 configuration, in this case using a configuration with TTL 1 minute +<7> used to create a cache where the template sets the TTL to 2 minutes. +<8> And we can indeed verify that the configuration provided in the template has been applied; the duration will be _2 minutes_ and not _1_. <9> One drawback of this is that when getting at the `CompleteConfiguration`, you no longer have access to the factories from JSR-107. NOTE: As mentioned in step 5, in order to override store-by-value configuration of a JSR-107 cache using templates you can explicitly configure the template using `IdentityCopier`. But the usage of `IdentityCopier` is not mandatory to get a store-by-ref cache. You can use any custom copier implementation that does not perform any "copying" but returns -the exact same reference that gets passed into the copy methods. `IdentityCopier` is just one that we have provided to -make your life easier. +the exact same reference that gets passed into the copy methods. `IdentityCopier` is just an example that we have +provided for your convenience. == A word on defaults -Ehcache 3 and Ehcache 3 through JCache do not always agree on default behavior. +Ehcache 3 used natively and Ehcache 3 through JCache do not always agree on default behavior. While native Ehcache 3 can behave the way JCache specifies, depending on the used configuration mechanism, you may see differences in defaults. === _by-reference_ or _by-value_ -Ehcache 3 and Ehcache 3 through JCache disagree on the default mode for heap only caching. +Native Ehcache 3 and Ehcache 3 through JCache disagree on the default mode for heap only caching. ==== Ehcache configuration with JCache `MutableConfiguration` @@ -220,7 +236,7 @@ See the documentation < Create a `CacheEventListenerConfiguration` using the builder indicating the listener and the events to receive (in this case create and update events) @@ -50,7 +50,7 @@ Eviction and expiration events can be triggered by both internal processes and b |{K, V} | remove(K) | {} | removed {K, V, null} |================================================================================================= -NOTE: Ehcache provides an abstract class `CacheEventAdapter` for convenient implementation of event listeners when you are interested only on specific events. +NOTE: Ehcache provides an abstract class `CacheEventAdapter` for convenient implementation of event listeners when you are interested only in specific events. == Registering Event Listeners during runtime @@ -59,7 +59,7 @@ Cache event listeners may also be added and removed while the cache is being use [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=registerListenerAtRuntime] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=registerListenerAtRuntime] ---- <1> Create a `CacheEventListener` implementation instance. @@ -75,5 +75,5 @@ Advanced users may want to tune the level of concurrency which may be used for d [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=configuringEventProcessingQueues] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=configuringEventProcessingQueues] ---- diff --git a/docs/src/docs/asciidoc/user/caching-concepts.adoc b/docs/src/docs/asciidoc/user/caching-concepts.adoc index d013ae1340..a92443926c 100644 --- a/docs/src/docs/asciidoc/user/caching-concepts.adoc +++ b/docs/src/docs/asciidoc/user/caching-concepts.adoc @@ -1,9 +1,9 @@ --- --- = Concepts Related to Caching -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -14,8 +14,8 @@ endif::notBuildingForSite[] === Data Freshness -Data _freshness_ refers to aspect of a copy of data (e.g. in a cache) being up-to-date with the source version of the -data (e.g. in the syste-of-record (SoR)). A _stale_ copy is considered to be out of sync (or likely to be out of +Data _freshness_ refers to how up-to-date a copy of data (e.g. in a cache) is compared to the source version of the +data (e.g. in the system-of-record (SoR)). A _stale_ copy is considered to be out of sync (or likely to be out of sync) with the SoR. Databases (and other SORs) weren't built with caching outside of the database in mind, and therefore don't normally @@ -27,7 +27,7 @@ components that have loaded data from the SoR have no direct way of ensuring tha Ehcache can assist you with reducing the likelihood that stale data is used by your application by _expiring_ cache entries after some amount of configured time. Once expired, the entry is automatically removed from the cache. -For instance the cache could be configured to expire entries five seconds after they are put in - which is a +For instance the cache could be configured to expire entries five seconds after they are put into the cache - which is a time-to-live _TTL_ setting. Or to expire entries 17 seconds after the last time the entry was retrieved from the cache - which is a time-to-idle _TTI_ setting. @@ -37,27 +37,31 @@ and technical decision based upon the requirements and assumptions of your appli [[storage-tiers]] == Storage Tiers -You can configure Ehcache to use various data storage areas. When a cache is configured to use more than one data -store, they are referred to as tiers. +You can configure Ehcache to use various data storage areas. +When a cache is configured to use more than one storage area, those areas are arranged and managed as `tiers`. +They are organized in a hierarchy, with the lowest tier being called the `authority` tier and the others being part of the `caching` tier. +The caching tier can itself be composed of more than one storage area. +The _hottest_ data is kept in the caching tier, which is typically less abundant but faster than the authority tier. +All the data is kept in the authority tier, which is slower but more abundant. Data stores supported by Ehcache include: -* On-Heap Store – Utilizes Java's on-heap RAM memory to store cache entries. This tier utilizes the same heap memory as +* On-Heap Store - Utilizes Java's on-heap RAM memory to store cache entries. This tier utilizes the same heap memory as your Java application, all of which must be scanned by the JVM's garbage collector. The more heap space your JVM -utilizes the more your application's performance will be impacted by garbage collection pauses. This store is +utilizes, the more your application's performance will be impacted by garbage collection pauses. This store is extremely fast, but is typically your most limited storage resource. -* Off-Heap Store – Limited in size only by available RAM (tested to as much as 6TB on a single machine!). Not subject -to Java garbage collection (GC). Is quite fast, yet slower than the On-Heap Store because data must be moved off and -on the JVM's heap as it is stored and re-accessed. -* Disk Store – Utilizes a disk (filesystem) to store cache entries. This type of storage resource is typically very -abundant but much slower than the RAM-based stores. +* Off-Heap Store - Limited in size only by available RAM. +Not subject to Java garbage collection (GC). +Is quite fast, yet slower than the On-Heap Store because data must be moved to and from the JVM's heap as it is stored and re-accessed. +* Disk Store - Utilizes a disk (file system) to store cache entries. +This type of storage resource is typically very abundant but much slower than the RAM-based stores. +* Clustered Store - This data store is a cache on a remote server. +The remote server may optionally have a failover server providing improved high availability. +Since clustered storage comes with performance penalties due to such factors as network latency as well as for establishing client/server consistency, +this tier, by nature, is slower than local off-heap storage. image::EhcacheTerminology.png[] -When a cache is configured to use more than one storage area, those areas are arranged and managed as tiers - where -the _hottest_ (most recently accessed) data is kept in the faster (and typically less abundant) tiers, and data that -is less hot remains in the slower (and more abundant) tiers. - == Topology Types === Standalone @@ -71,7 +75,7 @@ same application, then their caches are completely independent. The data is held in a remote server (or array of servers) with a subset of hot data held in each application node. This topology offers offers a selection of consistency options. A distributed topology is the recommended approach in a clustered or scaled-out application environment. -It provides the highest level of performance, availability, and scalability. +It provides the best combination of performance, availability, and scalability. image::ClusteredEhcacheTopology.png[] diff --git a/docs/src/docs/asciidoc/user/caching-patterns.adoc b/docs/src/docs/asciidoc/user/caching-patterns.adoc index 6f1801f06c..9f39cb1641 100644 --- a/docs/src/docs/asciidoc/user/caching-patterns.adoc +++ b/docs/src/docs/asciidoc/user/caching-patterns.adoc @@ -1,9 +1,9 @@ --- --- = Cache Usage Patterns -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] diff --git a/docs/src/docs/asciidoc/user/caching-terms.adoc b/docs/src/docs/asciidoc/user/caching-terms.adoc index 0b3ca8b16e..dae27e66a0 100644 --- a/docs/src/docs/asciidoc/user/caching-terms.adoc +++ b/docs/src/docs/asciidoc/user/caching-terms.adoc @@ -1,9 +1,9 @@ --- --- = Terms Related to Caching -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -18,7 +18,7 @@ computation. Data that is already in the cache can be repeatedly accessed with m resources. === Cache Entry -A cache entry is a key and its mapped data value within the cache. +A cache entry consists of a key and its mapped data value within the cache. === Cache Hit When a data entry is requested from cache and the entry exists for the given key, it is referred to as a cache hit @@ -34,14 +34,14 @@ system-of-record (SOR). The SOR is often a traditional database, although it mig other reliable long-term storage. It can also be a conceptual component such as an expensive computation. === Eviction -The removal of entries from the cache in order to make room for newer entries (typically when the cache has ran out of +The removal of entries from the cache in order to make room for newer entries (typically when the cache has run out of data storage capacity). === Expiration -The removal of entries from the cache after the passing of some amount of time, typically as a strategy to avoid stale +The removal of entries from the cache after some amount of time has passed, typically as a strategy to avoid stale data in the cache. === Hot Data Data that has recently been used by an application is very likely to be accessed again soon. Such data is considered -_hot_. A cache may attempt to keep the _most hot_ data most quickly available, while attemping to choose the +_hot_. A cache may attempt to keep the _hottest_ data most quickly available, while attemping to choose the _least hot_ data for eviction. diff --git a/docs/src/docs/asciidoc/user/clustered-cache.adoc b/docs/src/docs/asciidoc/user/clustered-cache.adoc index dba59be53f..60ecb57d42 100644 --- a/docs/src/docs/asciidoc/user/clustered-cache.adoc +++ b/docs/src/docs/asciidoc/user/clustered-cache.adoc @@ -1,9 +1,9 @@ --- --- = Clustered Cache -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -34,31 +34,31 @@ Caches can reserve a storage area for their clustered tiers within these server Clustered Tier Manager:: The Ehcache _Clustered Tier Manager_ is the server-side component that gives clustering capabilities to a cache manager. Cache managers connect to these to get access to the server's storage resources so that the clustered tiers of caches defined in them can consume those resources. -Ehcache _Clustered Tier Manager_ at the server side are identified by a unique identifier. +Each Ehcache _Clustered Tier Manager_ at the server side has a unique identifier. Using it, multiple cache managers can connect to the same clustered tier manager to share cache data. The clustered tier manager is also responsible for managing the storage of the clustered tier of caches, with the following different options. Dedicated pool:: -Dedicated pools are a fixed-amount of storage pools allocated to the clustered tiers of caches. +Dedicated pools are storage pools with a fixed amount of storage allocated to the clustered tier of caches. A dedicated amount of storage is allocated directly from server off-heap resources to these pools. And this storage space is used exclusively by a given clustered tier. Shared pool:: -Shared pools are also fixed-amount storage pools, but can be shared by the clustered tiers of multiple caches. +Shared pools are also storage pools with a fixed amount of storage, but can be shared by the clustered tiers of multiple caches. As in the case of dedicated pools, shared pools are also carved out from server off-heap resources. The storage available in these shared pools is strictly shared. -That is no cluster tier can ask for a fixed-amount of storage from a shared pool. +In other words, no cluster tier can ask for a fixed amount of storage from a shared pool. + Sharing of storage via shared pools does not mean that the data is shared. -That is, if two caches are using a shared pool as their clustered tier, the data of each cache is still isolated but the underlying storage is shared. -Consequently, when resource capacity is reached and triggers eviction, the evicted mapping can come from any of the clustered tiers sharing the pool. +This means that if two caches are using a shared pool as their clustered tier, the data of each cache is still isolated but the underlying storage is shared. +Consequently, when resource capacity is reached and eviction is triggered, the evicted mapping can come from any of the clustered tiers sharing the pool. Here is a pictorial representation of the concepts explained above: image::StoragePools.png[] [[start-server]] -== Starting Terracotta server +== Starting the Terracotta server You can start the server with the following configuration. It contains the bare minimum configuration required for the samples in the rest of the document to work. @@ -66,7 +66,7 @@ Detailed instructions on how to configure and start a Terracotta server array ca [source,xml] ---- -include::{sourcedir31}/clustered/client/src/test/resources/configs/docs/tc-config.xml[] +include::{sourcedir32}/clustered/client/src/test/resources/configs/docs/tc-config.xml[] ---- The above configuration defines two named _server off-heap resources_: @@ -76,7 +76,7 @@ The above configuration defines two named _server off-heap resources_: The rest of the document explains in detail how you can configure cache managers and caches to consume the server's off-heap resources. -Assuming that you have the clustered EhCache kit available locally, start with extracting the *ehcache-clustered* kit. +Assuming that you have the clustered Ehcache kit available locally, start with extracting the *ehcache-clustered* kit. Change to your extracted directory and then execute the *start-tc-server* script as below to start the Terracotta server with the above configuration: On Windows: @@ -98,7 +98,7 @@ NOTE: You will need to have `JAVA_HOME` set to JDK8 while starting the Terracott Check for the below `INFO` log to confirm if the server started successfully, `Terracotta Server instance has started up as ACTIVE node on 0:0:0:0:0:0:0:0:9510 successfully, and is now ready for work.` -== Creating cache manager with clustering capabilities +== Creating a cache manager with clustering capabilities After <>, you can now proceed to create the cache manager. For creating the cache manager with clustering support you will need to provide the clustering service configuration. @@ -106,13 +106,13 @@ Here is a code sample that shows how to configure a cache manager with clusterin [source,java,indent=0] ---- -include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerExample] +include::{sourcedir32}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerExample] ---- <1> Returns the `org.ehcache.config.builders.CacheManagerBuilder` instance; <2> Use the `ClusteringServiceConfigurationBuilder`{empty}'s static method `.cluster(URI)` for connecting the cache manager to the clustered storage at the URI specified that returns the clustering service configuration builder instance. - Sample URI provided in the example is pointing to the clustered storage instance named `my-application` on the Terracotta server (Assuming the server is running on localhost and port *9510*). + The sample URI provided in the example points to the clustered storage instance named `my-application` on the Terracotta server (Assuming the server is running on localhost and port *9510*). <3> Auto-create the clustered storage if it doesn't already exist. <4> Returns a fully initialized cache manager that can be used to create clustered caches. <5> Close the cache manager. @@ -123,7 +123,7 @@ This code sample demonstrates the usage of the concepts explained in the previou [source,java,indent=0] ---- -include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerWithServerSideConfigExample] +include::{sourcedir32}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerWithServerSideConfigExample] ---- <1> `defaultServerResource(String)` on `ClusteringServiceConfigurationBuilder` instance sets the default server off-heap resource for the cache manager. @@ -131,21 +131,21 @@ include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/clie <2> Adds a resource pool for the cache manager with the specified name (`resource-pool-a`) and size (`28MB`) consumed out of the named server off-heap resource `secondary-server-resource`. A resource pool at the cache manager level maps directly to a shared pool at the server side. <3> Adds another resource pool for the cache manager with the specified name (`resource-pool-b`) and size (`32MB`). - Since the server resource identifier is not explicitly passed, this resource pool will be consumed out of default server resource provided in Step 3. + Since the server resource identifier is not explicitly passed, this resource pool will be consumed out of the default server resource provided in Step 3. This demonstrates that a cache manager with clustering support can have multiple resource pools created out of several server off-heap resources. <4> Provide the cache configuration to be created. -<5> `ClusteredResourcePoolBuilder.fixed(String , long , MemoryUnit)` allocates a fixed pool of storage to the cache from the specified server off-heap resource. - In this example, a fixed pool of 32MB is allocated for `clustered-cache` from `primary-server-resource`. -<6> `ClusteredResourcePoolBuilder.shared(String)`, passing the name of the resource pool specifies that `shared-cache-1` shares the storage resources with other caches using the same resource pool (`resource-pool-a`). +<5> `ClusteredResourcePoolBuilder.clusteredDedicated(String , long , MemoryUnit)` allocates a dedicated pool of storage to the cache from the specified server off-heap resource. + In this example, a dedicated pool of 32MB is allocated for `clustered-cache` from `primary-server-resource`. +<6> `ClusteredResourcePoolBuilder.clusteredShared(String)`, passing the name of the resource pool specifies that `shared-cache-1` shares the storage resources with other caches using the same resource pool (`resource-pool-a`). <7> Configures another cache (`shared-cache-2`) that shares the resource pool (`resource-pool-a`) with `shared-cache-1`. -<8> Creates fully initialized cache manager with the clustered caches. +<8> Creates a fully initialized cache manager with the clustered caches. == Ehcache Clustered Tier Manager Lifecycle When configuring a cache manager to connect to a clustered tier manager there are three possible connection modes: [source,java,indent=0] ---- -include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerLifecycle] +include::{sourcedir32}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerLifecycle] ---- <1> In auto-create mode if no clustered tier manager exists then one is created with the supplied configuration. If it exists and its configuration matches the supplied configuration then a connection is established. @@ -162,17 +162,17 @@ If it does not exist then the cache manager will fail to initialize. [source,java,indent=0] ---- -include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheTieredExample] +include::{sourcedir32}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheTieredExample] ---- -<1> Configuring heap tier for cache. -<2> Configuring clustered tier of fixed size from server off-heap resource using `ClusteredResourcePoolBuilder`. +<1> Configuring the heap tier for cache. +<2> Configuring the clustered tier of dedicated size from the server off-heap resource using `ClusteredResourcePoolBuilder`. The equivalent XML configuration is as follows: [source,xml,indent=0] ---- -include::{sourcedir31}/clustered/client/src/test/resources/configs/docs/ehcache-clustered.xml[tag=tieringSample] +include::{sourcedir32}/clustered/client/src/test/resources/configs/docs/ehcache-clustered.xml[tag=tieringSample] ---- <1> Specify the heap tier for cache. @@ -180,37 +180,37 @@ include::{sourcedir31}/clustered/client/src/test/resources/configs/docs/ehcache- === Specifying consistency level -Ehcache offers two level of consistency: +Ehcache offers two levels of consistency: Eventual:: This consistency level indicates that the visibility of a write operation is not guaranteed when the operation returns. Other clients may still see a stale value for the given key. However this consistency level guarantees that for a mapping `(K, V1)` updated to `(K, V2)`, once a client sees `(K, V2)` it will never see `(K, V1)` again. Strong:: -This consistency level provides strong visibility guarantees ensuring that when a write operation returns other clients will be able to observe it immediately. +This consistency level provides strong visibility guarantees, ensuring that when a write operation returns other clients will be able to observe it immediately. This comes with a latency penalty on the write operation required to give this guarantee. [source,java,indent=0] ---- -include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheConsistency] +include::{sourcedir32}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheConsistency] ---- -<1> Specify the consistency level through the use of additional service configuration, using _strong_ consistency here, +<1> Specify the consistency level through the use of an additional service configuration, using _strong_ consistency here, <2> With the consistency used above, this `put` operation will return only when all other clients have had the corresponding mapping invalidated. The equivalent XML configuration is as follows: [source,xml,indent=0] ---- -include::{sourcedir31}/clustered/client/src/test/resources/configs/docs/ehcache-clustered.xml[tag=consistencySample] +include::{sourcedir32}/clustered/client/src/test/resources/configs/docs/ehcache-clustered.xml[tag=consistencySample] ---- <1> Specify the consistency level through a custom service configuration from the `clustered` namespace. === Clustered Cache Expiry -Expiry in clustered caches work with an exception that `Expiry#getExpiryForAccess` is handled on a best effort basis for clustered tiers. +Expiry in clustered caches works with the exception that `Expiry#getExpiryForAccess` is handled on a best effort basis for clustered tiers. It may not be as accurate as in the case of local tiers. === Clustered Unspecified Inheritance @@ -221,7 +221,7 @@ When you create the second cache with the same alias in a different cache manage It will then inherit the clustered resource pool as it was configured when creating the clustered tier. This option provides many benefits. -The main benefit is it simplifies clustered configuration by allowing clustered resource pool configuration to be handled by one client, then all subsequent clients can inherit this configuration. +The main benefit is that it simplifies clustered configuration by allowing clustered resource pool configuration to be handled by one client, then all subsequent clients can inherit this configuration. In addition, it also reduces clustered pool allocation configuration errors. More importantly, sizing calculations only need to be done by one person and updated in one location. Thus any programmer can use the cache without having to worry about creating the right size resource pool allocations. @@ -230,11 +230,11 @@ Please review the example code below to see how this can be implemented. [source,java,indent=0] ---- -include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=unspecifiedClusteredCacheExample] +include::{sourcedir32}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=unspecifiedClusteredCacheExample] ---- <1> Configure the first cache manager with auto create <2> Build a cache configuration for a clustered _dedicated_ resource pool <3> Create cache `my-dedicated-cache` using the cache configuration <4> Configure the second cache manager as _expecting_ (auto create off) <5> Build a cache configuration for a clustered _unspecified_ resource pool, which will use the previously configured clustered _dedicated_ resource pool. -<6> Create cache with the same name `my-dedicated-cache` and use the clustered _unspecified_ cache configuration +<6> Create a cache with the same name `my-dedicated-cache` and use the clustered _unspecified_ cache configuration diff --git a/docs/src/docs/asciidoc/user/common.adoc b/docs/src/docs/asciidoc/user/common.adoc index 94b73f7f46..c4111a2e32 100644 --- a/docs/src/docs/asciidoc/user/common.adoc +++ b/docs/src/docs/asciidoc/user/common.adoc @@ -1,6 +1,6 @@ --- --- -ifndef::sourcedir31[] +ifndef::sourcedir32[] :notBuildingForSite: true ifdef::basebackend-html[:outfilesuffix: .html] :source-highlighter: coderay @@ -10,9 +10,9 @@ ifdef::basebackend-html[:outfilesuffix: .html] :icons: font :iconfont-remote!: :iconfont-name: font-awesome.min -:sourcedir31: ../../../../../ +:sourcedir32: ../../../../../ :imagesdir: images :sectanchors: :idprefix: :idseparator: - -endif::sourcedir31[] +endif::sourcedir32[] diff --git a/docs/src/docs/asciidoc/user/eviction-advisor.adoc b/docs/src/docs/asciidoc/user/eviction-advisor.adoc index 7b50667dad..2b8d1b227c 100644 --- a/docs/src/docs/asciidoc/user/eviction-advisor.adoc +++ b/docs/src/docs/asciidoc/user/eviction-advisor.adoc @@ -1,9 +1,9 @@ --- --- = Eviction Advisor -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -17,19 +17,23 @@ NOTE: This is an advanced topic/feature that will not be of interest to most use You can affect which elements are selected for eviction from the cache by providing a class that implements the `org.ehcache.config.EvictionAdvisor` interface. -`EvictionAdvisor` implementations are invoked when Ehcache is attempting to evict entries from the cache +NOTE: Eviction advisors are not used for clustered storage tiers. +For example, in a cache with a heap tier and clustered storage tier, +the heap tier will use the eviction advisor but the clustered storage tier will evict independently, irrespective of the eviction advisor. +The description below applies to using an eviction advisor for the cache tiers other than a clustered storage tier. + +`EvictionAdvisor` implementations are invoked when Ehcache attempts to evict entries from the cache (in order to make room for new entries) in order to determine whether the given entry should not be considered a good candidate for eviction. If the eviction is advised against, Ehcache will try to honor the preference of preserving that entry in the cache, though there is no full guarantee of such. [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=cacheEvictionAdvisor] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=cacheEvictionAdvisor] ---- <1> Configure a constrained heap, as the eviction advisor is only relevant when mappings get evicted from the cache. -<2> If you want to hint the eviction algorithm to advisor against the eviction of some mappings, you have to - configure an instance of `EvictionAdvisor`. +<2> If you want to give the the eviction algorithm a hint to advise against the eviction of some mappings, you have to configure an instance of `EvictionAdvisor`. In this particular example, the `OddKeysEvictionAdvisor` class will advise against eviction of any key that is an odd number. The cache is constrained to only be allowed to contain two entries, however the code has put three entries @@ -37,7 +41,7 @@ into the cache - which will trigger capacity eviction. By the time the cache manager gets closed, only mappings with odd keys should be left in the cache as their prime candidacy for eviction would have been advised against. -NOTE: Eviction advise status is computed when a mapping is written to the cache. +NOTE: Eviction advisory status is computed when a mapping is written to the cache. This means that proper eviction advisor implementations are expected to be constant for a key-value pair. NOTE: Please keep in mind that configuring an eviction advisor can slow down eviction: the more often you advise against diff --git a/docs/src/docs/asciidoc/user/examples.adoc b/docs/src/docs/asciidoc/user/examples.adoc index 26a3b657c1..65660e5a4d 100644 --- a/docs/src/docs/asciidoc/user/examples.adoc +++ b/docs/src/docs/asciidoc/user/examples.adoc @@ -1,9 +1,9 @@ --- --- = Examples -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -13,11 +13,11 @@ endif::notBuildingForSite[] The `demo` directory in the Ehcache 3 sources includes a sample applications with two (2) implementations demonstrating Ehcache use. Implemented as a simple -browser-based web service, the sample application, Peeper, displays any messages (_peeps_) -previously entered and accepts new peeps recording the peeps in a database. The peeps +browser-based web service, the sample application Peeper displays any messages (_peeps_) +previously entered and accepts new peeps, recording the peeps in a database. The peeps database, shared among implementations of the Peeper application, is located at +$HOME/ehcache-demo-peeper.mv.db+. This file may be safely erased while the application -is not running. While running, information about the operation of Peeper application +is not running. While running, information about the operation of the Peeper application (database access, cache access, etc.) is written to the console. While the sample application may be run, the application is _very_ simplistic -- the code @@ -40,7 +40,7 @@ from the database to display the Peeper web page. To run this implementation: [source,bash] ---- cd ehcache3/demos/00-NoCache -../../gradlew jettyRun +../../gradlew appStart ---- This builds the necessary components, starts a http://eclipse.org/jetty/[Jetty] web service, @@ -66,7 +66,7 @@ Note the absence of indications of interactions with a cache. === Peeper with Cache-aside Caching -- +01-CacheAside+ The second sample, located in +demos/01-CacheAside+, is a version of the Peeper application -that makes use of Ehcache. As each peep is being read from the database (for display in the web +that makes use of Ehcache. As each peep is read from the database (for display in the web page), it is written to an Ehcache instance. If the Peeper web page is refreshed (without adding a new peep) or a new Peeper client connects, the peeps are read from the cache (instead of the database) to form the web page. If a new peep is posted, @@ -75,7 +75,7 @@ the cache is cleared. To run this implementation: [source,bash] ---- cd ehcache3/demos/01-CacheAside -../../gradlew jettyRun +../../gradlew appStart ---- This builds the necessary components, starts a http://eclipse.org/jetty/[Jetty] web service, @@ -104,5 +104,5 @@ Note the presence of the +Filling cache with peeps+, +Clearing peeps cache+, and [source,xml,indent=0] ---- -include::{sourcedir31}/107/src/test/resources/ehcache-example.xml[] +include::{sourcedir32}/107/src/test/resources/ehcache-example.xml[] ---- diff --git a/docs/src/docs/asciidoc/user/expiry.adoc b/docs/src/docs/asciidoc/user/expiry.adoc index 89b3472ebc..bd33a9e8b4 100644 --- a/docs/src/docs/asciidoc/user/expiry.adoc +++ b/docs/src/docs/asciidoc/user/expiry.adoc @@ -1,9 +1,9 @@ --- --- = Expiry -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -20,7 +20,7 @@ Expiry is configured at the cache level, in Java or in XML: [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=expiry] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=expiry] ---- <1> Expiry is configured at the cache level, so start by defining a cache configuration, @@ -28,7 +28,7 @@ include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[t [source,xml,indent=0] ---- -include::{sourcedir31}/xml/src/test/resources/configs/docs/getting-started.xml[tags=expiry] +include::{sourcedir32}/xml/src/test/resources/configs/docs/getting-started.xml[tags=expiry] ---- <1> At the cache level, using the predefined _time-to-live_ again. @@ -38,7 +38,7 @@ Both Java and XML offer direct support for three types of expiry: [horizontal] no expiry:: this means cache mappings will never expire, time-to-live:: this means cache mappings will expire after a fixed duration following their creation, -time-to-idle:: this means cache mappings will expire after a fixed duration following their last access time. +time-to-idle:: this means cache mappings will expire after a fixed duration following the time they were last accessed. For Java, see `org.ehcache.expiry.Expirations` and the XSD for XML. @@ -46,11 +46,11 @@ Read on to implement your own expiration scheme. == Custom expiry -Support your own expiration scheme simply means implementing the `Expiry` interface: +Supporting your own expiration scheme simply means implementing the `Expiry` interface: [source,java,indent=0] ---- -include::{sourcedir31}/api/src/main/java/org/ehcache/expiry/Expiry.java[lines=21..-1] +include::{sourcedir32}/api/src/main/java/org/ehcache/expiry/Expiry.java[lines=21..-1] ---- The main points to remember on the return value from these methods: @@ -71,7 +71,7 @@ In Java: [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=customExpiry] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=customExpiry] ---- <1> Simply pass your custom expiry instance into the cache builder. @@ -80,7 +80,7 @@ In XML: [source,xml,indent=0] ---- -include::{sourcedir31}/xml/src/test/resources/configs/docs/getting-started.xml[tags=customExpiry] +include::{sourcedir32}/xml/src/test/resources/configs/docs/getting-started.xml[tags=customExpiry] ---- -<1> Simply pass the fully qualified class name of your custom expiry. \ No newline at end of file +<1> Simply pass the fully qualified class name of your custom expiry. diff --git a/docs/src/docs/asciidoc/user/getting-started.adoc b/docs/src/docs/asciidoc/user/getting-started.adoc index c13b6f7b4d..a96ea62447 100644 --- a/docs/src/docs/asciidoc/user/getting-started.adoc +++ b/docs/src/docs/asciidoc/user/getting-started.adoc @@ -1,9 +1,9 @@ --- --- -= Ehcache 3.1 Documentation -ifndef::sourcedir31[] += Ehcache 3.2 Documentation +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] We feel that the Ehcache 3.x API is a great improvement over the Ehcache 2.x API that has been used by millions of developers. We hope you enjoy this new generation of Ehcache! ifdef::notBuildingForSite[] @@ -29,46 +29,51 @@ As with the previous versions of Ehcache, the canonical way of dealing with `Cac [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=cachemanagerExample] ----- - -<1> Static method `org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder` that returns a new `org.ehcache.config.builders.CacheManagerBuilder` - instance; -<2> Use the builder to register a pre-configured `Cache` to be created when we `.build()` the actual `CacheManager`. - The first `String` argument is the alias used to interact with the `Cache` through the `CacheManager`; the second - argument is `org.ehcache.config.CacheConfiguration` to configure the `Cache`. We use the static - `.newCacheConfigurationBuilder()` method on `org.ehcache.config.builders.CacheConfigurationBuilder` to create a default config; -<3> Finally, invoking `.build()` returns a fully instantiated, but uninitialized, `CacheManager` we can use; -<4> Before you start to use the `CacheManager` it needs to be `init()`, which can be done for you by the builder by passing - `true` to `build(boolean)`; -<5> We can retrieve the `preConfigured` aliased `Cache` we declared in step 2. For type-safety, we ask for both key and - value types to be passed in. If these differ from the ones we expect, the `CacheManager` throws a `ClassCastException` - early in the application's lifecycle. It also guards the `Cache` from being polluted by random types. -<6> The `CacheManager` can also be used to create new `Cache` as needed. Just as in step 2, it requires passing in an +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=cachemanagerExample] +---- + +<1> The static method `org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder` returns a new `org.ehcache.config.builders.CacheManagerBuilder` instance. +<2> Use the builder to define a `Cache` with alias "preConfigured". This cache will be created when `cacheManager.build()` is invoked on the actual `CacheManager` instance. +The first `String` argument is the cache alias, which is used to retrieve the cache from the `CacheManager`. +The second argument, `org.ehcache.config.CacheConfiguration`, is used to configure the `Cache`. +We use the static `newCacheConfigurationBuilder()` method on `org.ehcache.config.builders.CacheConfigurationBuilder` to create a default configuration. +<3> Finally, invoking `build()` returns a fully instantiated, but uninitialized, `CacheManager` we can use. +<4> Before using the `CacheManager` it needs to be initialized, which can be done in 1 of 2 ways: +Calling `CacheManager.init()` on the `CacheManager` instance, or calling the `CacheManagerBuilder.build(boolean init)` method with the boolean parameter set to true. +<5> A cache is retrieved by passing its alias, key type and value type to the `CacheManager`. +For instance, to obtain the cache declared in step 2 you need its alias=preConfigured, keyType=Long.class and valueType=String.class. +For type-safety, we ask for both key and value types to be passed in. +If these differ from the ones we expect, the `CacheManager` throws a `ClassCastException` early in the application's lifecycle. +This guards the `Cache` from being polluted by random types. +<6> The `CacheManager` can be used to create new `Cache` instances as needed. Just as in step 2, it requires passing in an alias as well as a `CacheConfiguration`. The instantiated and fully initialized `Cache` added will be returned and/or accessed through the `CacheManager.getCache` API. -<7> We can now use the newly added `Cache` to store and ... -<8> ... retrieve data. -<9> We can also `CacheManager.removeCache(String)` a given `Cache`. The `CacheManager` will not only remove it's reference to the +<7> The newly added `Cache` can now be used to store entries, which are comprised of key value pairs. +The put method's first parameter is the key and the second parameter is the value. +Remember the key and value types must be the same types as those defined in the `CacheConfiguration`. +Additionally the key must be unique and is only associated with one value. +<8> A value is retrieved from a cache by calling the `cache.get(key)` method. +It only takes one parameter which is the key, and returns the value associated with that key. If there is no value associated with that key then null is returned. +<9> We can `CacheManager.removeCache(String)` a given `Cache`. The `CacheManager` will not only remove its reference to the `Cache`, but will also close it. The `Cache` releases all locally held transient resources (such as memory). References to this `Cache` become unusable. <10> In order to release all transient resources (memory, threads, ...) a `CacheManager` provides to `Cache` instances it manages, you have to invoke `CacheManager.close()`, which in turns closes all `Cache` instances known at the time. -=== Creating cache manager with clustering support +=== Creating a cache manager with clustering support To enable Clustering with Terracotta, firstly you will have to <> configured with clustered storage. -Further, for creating the cache manager with clustering support, you will need to provide the clustering service configuration: +In addition, for creating the cache manager with clustering support, you will need to provide the clustering service configuration: [source,java,indent=0] ---- -include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerExample] +include::{sourcedir32}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerExample] ---- <1> Returns the `org.ehcache.config.builders.CacheManagerBuilder` instance; <2> Use the `ClusteringServiceConfigurationBuilder`{empty}'s static method `.cluster(URI)` for connecting the cache manager to the clustering storage at the URI specified that returns the clustering service configuration builder instance. - Sample URI provided in the example is pointing to the clustered storage with clustered storage identifier *my-application* on the Terracotta server (Assuming the server is running on localhost and port *9510*); the query-param `auto-create` + The sample URI provided in the example points to the clustered storage with clustered storage identifier *my-application* on the Terracotta server (assuming the server is running on localhost and port *9510*); the query-param `auto-create` creates the clustered storage in the server if it doesn't already exist. <3> Returns a fully initialized cache manager that can be used to create clustered caches. <4> Close the cache manager. @@ -81,10 +86,10 @@ Ehcache 3 introduces the concept of `UserManagedCache`: [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java[tag=userManagedCacheExample] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java[tag=userManagedCacheExample] ---- -<1> A new feature of Ehcache 3 is the ability to create `UserManagedCache` instances, i.e. ones not managed by a `CacheManager`, again you can either have the builder `init()` it for you, passing true or +<1> A new feature of Ehcache 3 is the ability to create `UserManagedCache` instances, i.e. ones not managed by a `CacheManager`, again you can either have the builder `init()` it for you, passing true, or <2> pass false and it is up to you to `UserManagedCache.init()` them, prior to using them. <3> You can use the cache exactly as a managed cache <4> In the same vein, a `UserManagedCache` requires you to `UserManagedCache.close()` it explicitly. If you would also use @@ -97,15 +102,15 @@ NOTE: See <> for more in Ehcache 3, as in previous versions, offers a tiering model to allow storing increasing amounts of data on slower tiers (which are generally more abundant). -The idea is that resources related to faster storage are more rare, but are where the 'hottest' data is preferred to be. -Thus less-hot (less frequently used) data is moved to the more abundant but slower tiers. Hotter data is faulted onto +The idea is that resources related to faster storage are more rare, but are located where the 'hottest' data is preferred to be. +Thus less-hot (less frequently used) data is moved to the more abundant but slower tiers. Hotter data is moved onto the faster tiers. ==== Off-heap [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=offheapCacheManager] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=offheapCacheManager] ---- <1> If you wish to use off-heap, you'll have to define a resource pool, giving the memory size you want to allocate. @@ -120,12 +125,15 @@ Do not forget to define in the java options the `-XX:MaxDirectMemorySize` option [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=persistentCacheManager] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=persistentCacheManager] ---- <1> If you wish to use disk storage (like for persistent `Cache` instances), you'll have to provide a location where data should be stored on disk to the `CacheManagerBuilder.persistence(String)` static method. -<2> You define a resource pool for the disk. +<2> Defines a resource pool for the disk. +The third parameter is a boolean value which is used to set whether the disk pool is persistent. +When set to true, the pool is persistent, and when set to false, the pool is not persistent. +When this method is used without the third boolean parameter then the pool is not persistent. The example above allocates a very small amount of disk storage. Remember that data stored on disk will have to be serialized / deserialized and written / read from disk - @@ -133,20 +141,22 @@ and is thus slower than heap and offheap. You should thus favor disk for large amounts of data. Another reason to use disk storage is persistence across application restarts. -Note that Ehcache 3 only offers persistence in case of clean shutdowns. +Note that Ehcache 3 only offers persistence in the case of clean shutdowns. ==== Three tiers +The example below illustrates how to use disk storage for a non-persistent `Cache` instance. + [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=threeTiersCacheManager] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=threeTiersCacheManager] ---- <1> If you wish to use disk storage (like for persistent `Cache` instances), you'll have to provide a location where data should be stored on disk to the `CacheManagerBuilder.persistence(String)` static method. <2> You define a resource pool for the heap. <3> You define a resource pool for the off-heap. -<4> You define a resource pool for the disk. +<4> You define a non-persistent resource pool for the disk. Remember this pool is not persistent because we declared the disk pool using the method that does not use the boolean persistent parameter. ==== Byte-sized heap @@ -156,15 +166,15 @@ NOTE: Byte sizing has a runtime performance impact that depends on the size and [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=byteSizedTieredCache] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=byteSizedTieredCache] ---- -<1> You can also size the heap tier in bytes. This will limit the amount of heap used by that tier for - storing key-value pairs. Note that there is a cost associated to sizing objects. -<2> The sizing mechanism can be configured along two axis: The first one specifies the maximum number - of objects to traverse while walking the object graph, the second defines the maximum size of a - single object. If the sizing goes above any of these two limits, the mutative operation on the - cache will be ignored. +<1> You can also size the heap tier in bytes. +This will limit the amount of memory used by the heap tier for storing key-value pairs. +Note that there is a cost associated to sizing objects. +<2> The sizing can also be further restrained by 2 additional configuration settings: +The first one specifies the maximum number of objects to traverse while walking the object graph, the second defines the maximum size of a single object. +If the sizing goes above any of these two limits, the mutative operation on the cache will be ignored. <3> A default configuration can be provided at CacheManager level to be used by the caches unless defined explicitly. @@ -172,17 +182,17 @@ include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[t Limited size adjustment can be performed on a live cache. -NOTE: Presently, `updateResourcePools()` only supports updating the heap tier and without changing the resource type. +NOTE: `updateResourcePools()` only allows you to change the heap tier sizing, not the pool type. Thus you can't change the sizing of off-heap or disk tiers. [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=updateResourcesAtRuntime] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=updateResourcesAtRuntime] ---- <1> You will need to create a new `ResourcePools` object with resources of required size, using `ResourcePoolsBuilder`. This object can then be passed to the said method so as to trigger the update. -<2> To update capacity of `ResourcePools`, `updateResourcePools(ResourcePools)` method in `RuntimeConfiguration` can be of help. - `ResourcePools` object created earlier can then be passed to this method so as to trigger the update. +<2> To update capacity of `ResourcePools`, the `updateResourcePools(ResourcePools)` method in `RuntimeConfiguration` can be of help. + The `ResourcePools` object created earlier can then be passed to this method so as to trigger the update. === Data freshness @@ -191,7 +201,7 @@ The following illustrates how to configure a _time-to-live_ expiry. [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=expiry] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=expiry] ---- <1> Expiry is configured at the cache level, so start by defining a cache configuration, @@ -202,22 +212,22 @@ See the section on <> for more information about the option [[configuring-with-xml]] === Configuring With XML -...It wouldn't be Java without _some_ XML +...It wouldn't be Java without _some_ XML. -You can create a XML file to configure a `CacheManager`: +You can create an XML file to configure a `CacheManager`: [source,xml] ---- -include::{sourcedir31}/xml/src/test/resources/configs/docs/getting-started.xml[tags=gettingStarted] +include::{sourcedir32}/xml/src/test/resources/configs/docs/getting-started.xml[tags=gettingStarted] ---- <1> Declares a `Cache` aliased to `foo` <2> The keys of `foo` are declared as type `String`; since the value type is not specified, the values will be of type `Object`. <3> `foo` is declared to hold up to 2,000 entries on heap... <4> ...as well as up to 500 MB of off-heap memory before it starts evicting -<5> `` elements let you create an abstract configuration that further `` configuration can then _extend_ +<5> `` elements let you create an abstract configuration that further `` configurations can then _extend_ <6> `bar` is such a `Cache`. `bar` uses the `` named `myDefaults` and overrides its `key-type` to a wider type. -<7> `simpleCache` is another such a `Cache`. It uses `myDefaults` configuration for its sole `CacheConfiguration`. +<7> `simpleCache` is another such `Cache`. It uses `myDefaults` configuration for its sole `CacheConfiguration`. Refer to the <> for more details on the XML format. @@ -237,7 +247,7 @@ CacheManager myCacheManager = CacheManagerBuilder.newCacheManager(xmlConfig); // == Current development -For developer information, you might want to go check the https://github.com/ehcache/ehcache3/wiki[Ehcache 3 project wiki on GitHub]. +For developer information, you might want to check the https://github.com/ehcache/ehcache3/wiki[Ehcache 3 project wiki on GitHub]. The next version, 3.2, will finalize http://terracotta.org[Terracotta clustering] support. Cache events, cache-through and transactional clustered caches will then be supported. diff --git a/docs/src/docs/asciidoc/user/index.adoc b/docs/src/docs/asciidoc/user/index.adoc index f387229fb0..95926edb6d 100644 --- a/docs/src/docs/asciidoc/user/index.adoc +++ b/docs/src/docs/asciidoc/user/index.adoc @@ -1,9 +1,9 @@ --- --- -= Ehcache 3.1 Documentation Overview -ifndef::sourcedir31[] += Ehcache 3.2 Documentation Overview +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -11,7 +11,7 @@ endif::notBuildingForSite[] == Table of Contents -The Table of Contents provides an overview of the Ehcache 3.1 documentation on this site. +The Table of Contents provides an overview of the Ehcache 3.2 documentation on this site. Each topic below corresponds to a menu item at the left. === Basic Topics @@ -25,7 +25,7 @@ Each topic below corresponds to a menu item at the left. |link:107.html[JSR-107 Support]|Using Ehcache as a javax.cache aka JSR-107 provider |link:examples.html[Java Examples]|Examples of using Ehcache APIs |link:xsds.html[Configuration XSD]|Reference XSD for configuration -|link:clustered-cache.html[Clustering with Terracotta]|Using Terracotta to enable clustering of caches in EhCache +|link:clustered-cache.html[Clustering with Terracotta]|Using Terracotta to enable clustering of caches in Ehcache |=== === General Topics diff --git a/docs/src/docs/asciidoc/user/management.adoc b/docs/src/docs/asciidoc/user/management.adoc index 07f9569d93..922aa14e7d 100644 --- a/docs/src/docs/asciidoc/user/management.adoc +++ b/docs/src/docs/asciidoc/user/management.adoc @@ -1,9 +1,9 @@ --- --- = Management and Monitoring -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -11,7 +11,7 @@ endif::notBuildingForSite[] == Intro -Managed objects like caches, cache managers and stores are registered into a `org.ehcache.management.ManagementRegistryService` +Managed objects like caches, cache managers and stores are registered into an `org.ehcache.management.ManagementRegistryService` instance. A `ManagementRegistry` implementation has to understand the registered object and provide management and monitoring @@ -25,13 +25,13 @@ a minimal set of statistics and actions via a couple of capabilities. == Making use of the `ManagementRegistry` -By default, a `ManagementRegistry` is automatically discovered and enabled, but can only be accessed by ehcache +By default, a `ManagementRegistry` is automatically discovered and enabled, but can only be accessed by Ehcache internal services. If you wish to make use of it, you should create your own instance and pass it to the cache manager builder as a service: [source,java,indent=0] ---- -include::{sourcedir31}/management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=usingManagementRegistry] +include::{sourcedir32}/management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=usingManagementRegistry] ---- <1> Optional: give a name to your cache manager by using a custom configuration <2> Create an instance of `org.ehcache.management.registry.DefaultManagementRegistryService`. This is only required because the service is used below. @@ -39,29 +39,28 @@ include::{sourcedir31}/management/src/test/java/org/ehcache/docs/ManagementTest. <4> Perform a few gets to increment the statistic's counter <5> Create the target statistic's context <6> Collect the get count statistic -<7> Check that the statistic reports the expected count Obviously, you may use the above technique to pass your own implementation of `ManagementRegistry`. == Capabilities and contexts -Capabilities are metadata of what the managed objects are capable of: a collection of statistic that can be queried +Capabilities are metadata of what the managed objects are capable of: a collection of statistics that can be queried and/or remote actions that can be called. -Each capability requires a context to run within. For instance, cache-specific statistics require a cache manager name +Each capability requires a context to run in. For instance, cache-specific statistics require a cache manager name and a cache name to uniquely identify the cache on which you want to query stats or call an action. [source,java,indent=0] ---- -include::{sourcedir31}/management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=capabilitiesAndContexts] +include::{sourcedir32}/management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=capabilitiesAndContexts] ---- <1> Query the `ManagementRegistry` for the registered managed objects' capabilities. <2> Each capability has a unique name you will need to refer to it. <3> Each capability has a collection of `Descriptor`s that contains the metadata of each statistic or action. -<4> Each capability requires a context to which it needs to refer to. +<4> Each capability requires a context which it needs to refer to. <5> The first attribute of this context is the cache manager name. <6> The second attribute of this context is the cache name. With both attributes, the capability can uniquely refer to a unique managed object. -<7> Query the `ManagementRegistry` for the all the registered managed objects' contexts. +<7> Query the `ManagementRegistry` for all of the registered managed objects' contexts. <8> There is only one context here, and its name is the cache manager's name. <9> The above context has a subcontext: the cache's name. @@ -70,13 +69,13 @@ context container to a capability's context by matching their respective names. == Actions -There are two forms of capabilities: statistics and action ones. The statistic ones offer a set of predefined -statistics that can be queried at will, while the action ones offer a set of actions that can be taken upon -a managed object. Examples of actions could be: clear caches, get their config or modify a config setting. +There are two forms of capabilities: statistics and action ones. The statistics ones offer a set of predefined +statistics that can be queried at will, while the action ones offer a set of actions that can be taken on +a managed object. Examples of actions could be: clear caches, get their configuration or modify a configuration setting. [source,java,indent=0] ---- -include::{sourcedir31}/management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=actionCall] +include::{sourcedir32}/management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=actionCall] ---- <1> Put something in a cache. <2> Call the 'clear' action on the managed cache. Refer to the descriptors of the provider to get the exact list of @@ -94,7 +93,7 @@ manager by default, but sometimes you may want one `ManagementRegistry` to manag [source,java,indent=0] ---- -include::{sourcedir31}/management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=managingMultipleCacheManagers] +include::{sourcedir32}/management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=managingMultipleCacheManagers] ---- <1> Create an instance of `org.ehcache.management.SharedManagementService` <2> Pass it as a service to the first cache manager diff --git a/docs/src/docs/asciidoc/user/serializers-copiers.adoc b/docs/src/docs/asciidoc/user/serializers-copiers.adoc index 27be820b30..c9b499a2e0 100644 --- a/docs/src/docs/asciidoc/user/serializers-copiers.adoc +++ b/docs/src/docs/asciidoc/user/serializers-copiers.adoc @@ -1,9 +1,9 @@ --- --- = Serializers and Copiers -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -45,7 +45,7 @@ If a serializer is configured directly at the cache level, it will be used, igno If a serializer is configured at the cache manager level, upon initialization, a cache with no specifically configured serializer will search through its cache manager's registered list of serializers and try to find one that directly matches the cache's key or value type. -If such search fails, all the registered serializers will be tried in the added order to find one that handles compatible types. +If such a search fails, all the registered serializers will be tried in the added order to find one that handles compatible types. For instance, let's say you have a `Person` interface and two subclasses: `Employee` and `Customer`. If you configure your cache manager as follows: ```java @@ -60,7 +60,7 @@ NOTE: Given the above, it is recommended to limit `Serializer` registration to c [[serializers-bundled]] === Bundled implementations -By default, cache managers are pre-configured with specially optimized `Serializer` that can handle the following types, in the following order: +By default, cache managers are pre-configured with a specially optimized `Serializer` that can handle the following types, in the following order: - `java.io.Serializable` - `java.lang.Long` @@ -84,7 +84,7 @@ However, registering a different `Serializer` for one of the given type means it === Lifecycle: instances vs. class names When a `Serializer` is configured by providing an _instance_, it is up to the provider of that instance to manage its lifecycle. -It will need to dispose of any resource the serializer might hold upon and/or persisting and reloading the serializer's state. +It will need to dispose of any resource the serializer might hold upon persisting and/or reloading the serializer's state. When a `Serializer` is configured by providing a _class name_ either at the cache or cache manager level, since Ehcache is responsible for creating the instance, it also is responsible for disposing of it. @@ -109,59 +109,57 @@ Implement the following interface, from package `org.ehcache.spi.serialization`: [source,java,indent=0] ---- -include::{sourcedir31}/api/src/main/java/org/ehcache/spi/serialization/Serializer.java[lines=21..-1] +include::{sourcedir32}/api/src/main/java/org/ehcache/spi/serialization/Serializer.java[lines=20..-1] ---- As the javadoc states, there are some constructor rules, see the <> for that. -You can optionally implement `java.io.Closeable`. If you do, Ehcache will call `close()` when a cache using such serializer gets disposed of, but *only if* +You can optionally implement `java.io.Closeable`. If you do, Ehcache will call `close()` when a cache using such a serializer gets disposed of, but *only if* Ehcache instantiated the serializer itself. === ClassLoaders -When Ehcache instantiates a serializer itself, it will pass it a `ClassLoader` via the constructor. Such class loader must be used to access the classes +When Ehcache instantiates a serializer itself, it will pass it a `ClassLoader` via the constructor. Such a class loader must be used to access the classes of the serialized types as they might not be available in the current class loader [[persistent-vs-transient-caches]] ==== Persistent vs. transient caches -When configured on a persistent cache, serializers may need to persist and restore their state across restarts. -For clustered caches there might be an additional requirement that the state of the serializer must be visible to all clients using the same cache(configured with the same serializer). -To address these requirement you have to implement a constructor with the following signature: +All custom serializers must have a constructor with the following signature: ```java -public MySerializer(ClassLoader classLoader, StateRepository stateRepository) { +public MySerializer(ClassLoader classLoader) { } ``` -otherwise persistent caches won't be able to use your serializer. - -The `StateRepository.getPersistentConcurrentMap()` provides a `ConcurrentMap` that you can use to store any relevant state. -The users don't have to worry about the persistence aspects of this map as it is taken care by `Ehcache`. -In the case of a disk persistent cache, the contents of the map will be persisted locally on to the disk. -For clustered caches the contents are persisted in the cluster itself so that other clients using the same cache can also access the contents of the map. - -Attempting to configure a serializer that lacks such constructor on a persistent cache using either of +Attempting to configure a serializer that lacks such constructor on a cache using either of `CacheConfigurationBuilder.withKeySerializer(Class> keySerializerClass)` or `CacheConfigurationBuilder.withValueSerializer(Class> valueSerializerClass)` will be sanctioned with an exception upon cache initialization. -Configuring a serializer that lacks such constructor by instance on a persistent cache using either of -`CacheConfigurationBuilder.withKeySerializer(Serializer keySerializer)` or `CacheConfigurationBuilder.withValueSerializer(Serializer valueSerializer)` -will work, but the responsibility of persisting and restoring the serializer's state across restarts lies on you. +But if an instance of the serializer is configured using either of +`CacheConfigurationBuilder.withKeySerializer(Serializer keySerializer)` or +`CacheConfigurationBuilder.withValueSerializer(Serializer valueSerializer)` +it will work since the instantiation is done by the user code itself. + +Registering a serializer that lacks such constructor at the cache manager level will prevent it from being chosen for caches. + +Custom serializer implementations could have some state that is used in the serialization/deserialization process. +When configured on a persistent cache, the state of such serializers needs to be persisted across restarts. -On caches that have no persistent capable store, serializers must have a constructor: +To address these requirements you can have a `StatefulSerializer` implementation. +`StatefulSerializer` is a specialized `Serializer` with an additional _init_ method with the following signature: ```java -public MySerializer(ClassLoader classLoader) { +public void init(StateRepository repository) { } ``` -Attempting to configure a serializer that lacks such constructor on a transient cache using either of -`CacheConfigurationBuilder.withKeySerializer(Class> keySerializerClass)` or -`CacheConfigurationBuilder.withValueSerializer(Class> valueSerializerClass)` -will be sanctioned with an exception upon cache initialization. +The `StateRepository.getPersistentStateHolder(String, Class, Class)` provides a `StateHolder` (a map like structure) that you can use to store any relevant state. +The `StateRepository` is provided by the authoritative tier of the cache and hence will have the same persistence properties of that tier. +For persistent caches it is highly recommended that all state is stored in these holders as the users won't have to worry about the persistence aspects of this state holder as it is taken care by `Ehcache`. -Registering a serializer that lacks such constructor at the cache manager level will prevent it from being chosen for persistent caches. +* In the case of a disk persistent cache, the contents of the state holder will be persisted locally on to the disk. +* For clustered caches the contents are persisted in the cluster itself so that other clients using the same cache can also access the contents of the state holder. NOTE: The constructor with the signature `(ClassLoader classLoader, FileBasedPersistenceContext persistenceContext)` - that existed in v3.0 is still supported to respect backward compatibility but the usage is limited to disk based caches. + that existed till v3.1 has been removed since v3.2 in favor of `StatefulSerializer`s. [[copiers]] == Copiers @@ -194,7 +192,7 @@ which allow by _instance_ or by _class name_ configuration. If a copier is configured directly at the cache level, it will be used, ignoring any cache manager level configuration. If a copier is configured at the cache manager level, upon initialization, a cache with no specifically configured copier will -search through its cache manager's registered list of copiers and try to find one that directly matches the cache's key or value type. If such +search through its cache manager's registered list of copiers and try to find one that directly matches the cache's key or value type. If such a search fails, all the registered copiers will be tried in the added order to find one that handles compatible types. For instance, let's say you have a `Person` interface and two subclasses: `Employee` and `Customer`. If you configure your cache manager as follows: @@ -221,11 +219,11 @@ The `CacheConfigurationBuilder` provides the following methods to make use of th === Lifecycle: instances vs class names When a `Copier` is configured by providing an _instance_, it is up to the provider of that instance to manage its lifecycle. -It will have dispose of any resource the copier might hold upon. +It will have to dispose of any resource the copier might hold. When a `Copier` is configured by providing a _class name_ either at the cache or cache manager level, since Ehcache is responsible for creating the instance, it also is responsible for disposing of it. -If the `Copier` implements `java.io.Closeable` then `close()` will be called when the cache is closed and the `Copier` no longer needed. +If the `Copier` implements `java.io.Closeable` then `close()` will be called when the cache is closed and the `Copier` is no longer needed. === Writing your own Copier @@ -233,7 +231,7 @@ Implement the following interface: [source,java,indent=0] ---- -include::{sourcedir31}/api/src/main/java/org/ehcache/spi/copy/Copier.java[lines=19..-1] +include::{sourcedir32}/api/src/main/java/org/ehcache/spi/copy/Copier.java[lines=19..-1] ---- * `T copyForRead(T obj)` is invoked when a copy must be made upon a read operation (like a cache `get()`), diff --git a/docs/src/docs/asciidoc/user/thread-pools.adoc b/docs/src/docs/asciidoc/user/thread-pools.adoc index 7d6beef65f..b36c121f55 100644 --- a/docs/src/docs/asciidoc/user/thread-pools.adoc +++ b/docs/src/docs/asciidoc/user/thread-pools.adoc @@ -1,9 +1,9 @@ --- --- = Thread Pools -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -66,12 +66,12 @@ Following is the list of services making use of `ExecutionService`: `CacheEventDispatcherFactoryConfiguration` is used to configure what thread pool to use at the cache manager level. The different builders will make use of the right configuration class, you do not have to use those classes directly. -For instance, calling `CacheManagerBuilder.withDefaultDiskStoreThreadPool(String threadPoolAlias)` actually is identical +For instance, calling `CacheManagerBuilder.withDefaultDiskStoreThreadPool(String threadPoolAlias)` is actually identical to calling `CacheManagerBuilder.using(new OffHeapDiskStoreProviderConfiguration(threadPoolAlias))`. -The thread pool can be assigned to a service with the builders by passing to the ad-hoc method a -`threadPoolAlias` parameter. When a service isn't told anything about what thread pool to use, the default thread pool -is used. +The thread pool can be assigned to a service with the builders by passing a +`threadPoolAlias` parameter to the ad-hoc method. When a service isn't told anything about what thread pool to use, +the default thread pool is used. == In practice @@ -84,7 +84,7 @@ Following are examples of describing how to configure the thread pools the diffe [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag=diskStore] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag=diskStore] ---- <1> Configure the thread pools. Note that the default one (`dflt`) is required for the events even when no event @@ -96,7 +96,7 @@ include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag= [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag=writeBehind] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag=writeBehind] ---- <1> Configure the thread pools. Note that the default one (`dflt`) is required for the events even when no event @@ -109,7 +109,7 @@ include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag= [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag=events] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag=events] ---- <1> Configure the thread pools. Note that there is no default one so all thread-using services must be configured @@ -122,7 +122,7 @@ include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag= [source,xml] ---- -include::{sourcedir31}/xml/src/test/resources/configs/docs/thread-pools.xml[tags=threadPools] +include::{sourcedir32}/xml/src/test/resources/configs/docs/thread-pools.xml[tags=threadPools] ---- <1> Configure the thread pools. Note that there is no default one. diff --git a/docs/src/docs/asciidoc/user/usermanaged.adoc b/docs/src/docs/asciidoc/user/usermanaged.adoc index 59aa434b11..301aa48d87 100644 --- a/docs/src/docs/asciidoc/user/usermanaged.adoc +++ b/docs/src/docs/asciidoc/user/usermanaged.adoc @@ -1,9 +1,9 @@ --- --- = User managed caches -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -25,36 +25,52 @@ Of course, if you find yourself requiring plenty of services, maybe the cache ma === API extensions -While a `UserManagedCache` extends `Cache`, it offers additional methods: +If you use a `UserManagedCache`, you need to configure all required services by hand. +The `UserManagedCache` class extends the `Cache` class by offering additional methods: + +* `init()` - initializes the cache +* `close()` - releases the cache resources +* `getStatus()` - returns a status + +The `init` and `close` methods deal with the lifecycle of the cache and need to be called explicitly, whereas these methods are hidden when the cache is inside a `CacheManager`. + +The interface definition is shown in this code: [source,java,indent=0] ---- -include::{sourcedir31}/api/src/main/java/org/ehcache/UserManagedCache.java[lines=17..-1] +include::{sourcedir32}/api/src/main/java/org/ehcache/UserManagedCache.java[lines=17..-1] ---- -As can be seen, these methods deal with the lifecycle of the cache and need to be called explicitly. +=== User Managed Persistent Cache -There is also the following interface which comes into play when a user managed persistent cache is created: +A user managed persistent cache holds cached data in a persistent store such as disk, so that the stored data can outlive the JVM in which your caching application runs. +If you want to create a user managed persistent cache, there is an additional interface `PersistentUserManagedCache` that extends `UserManagedCache` and adds the `destroy` method. +The `destroy` method deletes all data structures, including data stored persistently on disk, for a `PersistentUserManagedCache`. +The `destroy` method deals with the lifecycle of the cache and needs to be called explicitly. + +The interface definition is shown in this code: [source,java,indent=0] ---- -include::{sourcedir31}/api/src/main/java/org/ehcache/PersistentUserManagedCache.java[lines=17..-1] +include::{sourcedir32}/api/src/main/java/org/ehcache/PersistentUserManagedCache.java[lines=17..-1] ---- -== Getting started with user managed caches +== Code examples for User Managed Caches + +=== Example of a basic cache lifecycle -=== Starting example with lifecycle +Here is a simple example showing a basic lifecycle of a user managed cache: [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java[tag=userManagedCacheExample] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java[tag=userManagedCacheExample] ---- -<1> Create a `UserManagedCache` instance, again you can either have the builder `init()` it for you, passing true or -<2> pass false and it is up to you to `UserManagedCache.init()` them, prior to using them. +<1> Create a `UserManagedCache` instance. You can either pass "true" to have the builder `init()` it for you, or you can pass "false" and it is up to you to `init()` it prior to using it. +<2> Since "false" was passed in step 1, you have to `init()` the `UserManagedCache` prior to using it. <3> You can use the cache exactly as a managed cache -<4> In the same vein, a `UserManagedCache` requires you to `UserManagedCache.close()` it explicitly. If you would also use - managed caches simultaneously, the `CacheManager.close()` operation would not impact the user managed cache(s). +<4> In the same vein, a `UserManagedCache` requires you to close it explicitly using `UserManagedCache.close()`. +If you are also using managed caches simultaneously, the `CacheManager.close()` operation does not impact the user managed cache(s). From this basic example, explore the API of `UserManagedCacheBuilder` to find all the directly available features. @@ -65,28 +81,28 @@ The following features apply in the exact same way to user managed caches: Simply use the methods from `UserManagedCacheBuilder` which are equivalent to the ones from `CacheConfigurationBuilder`. -Below we will describe some more advanced setup where there is need to maintain a service instance in order to have working user managed cache. +Below we will describe some more advanced setup where there is a need to maintain a service instance in order to have a working user managed cache. -=== Example with disk persistent and lifecycle +=== Example with disk persistence and lifecycle -If you want to use disk persistent cache, you will need to create and lifecycle the persistence service. +If you want to use a disk persistent cache, you will need to create and lifecycle the persistence service. [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java[tag=persistentUserManagedCache] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java[tag=persistentUserManagedCache] ---- <1> Create the persistence service to be used by the cache for storing data on disk -<2> Pass the persistence service to the builder next to an id for the cache - note that this will make the builder produce a more specific type: `PersistentUserManagedCache` +<2> Pass the persistence service to the builder as well as a name for the cache. Note that this will make the builder produce a more specific type: `PersistentUserManagedCache` <3> As usual, indicate here if the data should outlive the cache -<4> Closing the cache will not delete the data it saved on disk when marked as persistent. -<5> To delete the data, after closing the cache, destroy has to be explicitly invoked. -<6> It is also your responsibility to stop the persistence service once you are done with the cache. +<4> Closing the cache will not delete the data it saved on disk, since the cache is marked as persistent. +<5> To delete the data on disk after closing the cache, you need to invoke the `destroy` method explicitly. +<6> You need to stop the persistence service once you have finished using the cache. === Example with cache event listeners Cache event listeners require executor services to work. You will have to provide either a `CacheEventDispatcher` implementation -or make use of the default one by providing two executor services: one for ordered events and one for un-ordered ones. +or make use of the default one by providing two executor services: one for ordered events and one for unordered ones. NOTE: The ordered events executor must be single threaded to guarantee ordering. @@ -94,7 +110,7 @@ For more information on cache event listeners, see < Provide ExecutorService for ordered and unordered events delivery. -<2> Provide listener configuration using CacheEventListenerConfigurationBuilder. +<1> Provide the `ExecutorService` for ordered and unordered event delivery. +<2> Provide listener configuration using `CacheEventListenerConfigurationBuilder`. diff --git a/docs/src/docs/asciidoc/user/writers.adoc b/docs/src/docs/asciidoc/user/writers.adoc index 44f8b8334e..ea264f0bad 100644 --- a/docs/src/docs/asciidoc/user/writers.adoc +++ b/docs/src/docs/asciidoc/user/writers.adoc @@ -1,9 +1,9 @@ --- --- = Cache Writers -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -69,10 +69,10 @@ maximum write delay:: [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=writeThroughCache] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=writeThroughCache] ---- -<1> We register a sample `CacheLoaderWriter` who knows about the mapping `(41L -> "zero")` +<1> We register a sample `CacheLoaderWriter` that knows about the mapping `(41L -> "zero")` <2> Since the cache has no content yet, this will delegate to the `CacheLoaderWriter`. The returned mapping will populate the cache and be returned to the caller. <3> While creating this cache mapping, the `CacheLoaderWriter` will be invoked to write the mapping into the system of record. @@ -81,12 +81,12 @@ include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[t [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=writeBehindCache] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=writeBehindCache] ---- <1> For write-behind you need a configured `CacheLoaderWriter`. <2> Additionally, register a `WriteBehindConfiguration` on the cache by using the `WriteBehindConfigurationBuilder`. -<3> Here we configure write behind or batching with a batch size of 3 and a maximum write delay of 1 second. +<3> Here we configure write-behind or batching with a batch size of 3 and a maximum write delay of 1 second. <4> We also set the maximum size of the write-behind queue. <5> Define the concurrency level of write-behind queue(s). This indicates how many writer threads work in parallel to update the underlying system of record asynchronously. diff --git a/docs/src/docs/asciidoc/user/xa.adoc b/docs/src/docs/asciidoc/user/xa.adoc index a7a7b071a1..1a60161734 100644 --- a/docs/src/docs/asciidoc/user/xa.adoc +++ b/docs/src/docs/asciidoc/user/xa.adoc @@ -1,15 +1,15 @@ --- --- = XA transactional caches -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] Ehcache 3 supports caches that work within a XA transaction's context controlled by a JTA transaction manager, fully supporting the whole two-phase commit protocol, including crash recovery. [IMPORTANT] ========================== -Ehcache 3.1 jar no longer contains the transaction related code. +Ehcache 3.1+ jar no longer contains the transaction related code. This is now available through a different binary: [source,xml] ---- @@ -32,7 +32,7 @@ endif::notBuildingForSite[] * The isolation level is guaranteed by the use of the `Copier` mechanism. When no copiers are configured for either the key or the value, default ones are automatically used instead. You cannot disable the `Copier` mechanism for a transactional cache. - * Accessing a cache access outside of a JTA transaction context is forbidden. + * Accessing a cache outside of a JTA transaction context is forbidden. * There is no protection against the ABA problem. * Everything else works orthogonally. @@ -51,15 +51,15 @@ INFO org.ehcache.transactions.xa.txmgr.btm.BitronixTransactionManagerLookup - Us [source,java,indent=0] ---- -include::{sourcedir31}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testSimpleXACache] +include::{sourcedir32}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testSimpleXACache] ---- <1> First start the Bitronix transaction manager. By default, Ehcache 3 will auto-detect it but will throw an exception during the cache manager initialization if BTM isn't started. -<2> Configure the cache manager such as it can handle transactions by having a `TransactionManagerProvider` loaded and configured to use Bitronix. +<2> Configure the cache manager to handle transactions by having a `TransactionManagerProvider` loaded and configured to use Bitronix. <3> Register a cache the normal way. <4> Give it the resources you wish. -<5> Add a `XAStoreConfiguration` object to make the cache XA transactional. You must also give the cache a unique +<5> Add an `XAStoreConfiguration` object to make the cache XA transactional. You must also give the cache a unique XAResource identifier as some transaction managers require this. <6> Begin a JTA transaction the normal way. <7> Work with the cache the normal way, all operations are supported. Note that concurrent transactions will not see @@ -68,9 +68,9 @@ include::{sourcedir31}/transactions/src/test/java/org/ehcache/docs/transactions/ === Configuring your transaction manager -While only the Bitronix JTA implementation has been tested so far, plugging-in another one is possible. +While only the Bitronix JTA implementation has been tested so far, plugging in another one is possible. -You will need to implement a `org.ehcache.transactions.xa.txmgr.provider.TransactionManagerLookup` +You will need to implement an `org.ehcache.transactions.xa.txmgr.provider.TransactionManagerLookup` and make sure you understand its expected lifecycle as well as the one of the `org.ehcache.transactions.xa.txmgr.provider.LookupTransactionManagerProvider`. If such a lifecycle does not match your needs, you will have to go one step further and implement your own `org.ehcache.transactions.xa.txmgr.provider.TransactionManagerProvider`. @@ -83,17 +83,17 @@ transaction context. Nothing special needs to be configured for this to happen, [source,java,indent=0] ---- -include::{sourcedir31}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testXACacheWithWriteThrough] +include::{sourcedir32}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testXACacheWithWriteThrough] ---- <1> First start the Bitronix transaction manager. By default, Ehcache 3 will auto-detect it but will throw an exception during the cache manager initialization if BTM isn't started. -<2> Configure the cache manager such as it can handle transactions by having a `TransactionManagerProvider` loaded and configured to use Bitronix. +<2> Configure the cache manager to handle transactions by having a `TransactionManagerProvider` loaded and configured to use Bitronix. <3> Register a cache the normal way. <4> Give it the resources you wish. -<5> Add a `XAStoreConfiguration` object to make the cache XA transactional. You must also give the cache a unique +<5> Add an `XAStoreConfiguration` object to make the cache XA transactional. You must also give the cache a unique XAResource identifier as some transaction managers require this. -<6> Add a `CacheLoaderWriter` configuration. This one is a mocked SoR backed by a map for illustration purpose that +<6> Add a `CacheLoaderWriter` configuration. This one is a mocked SoR backed by a map for illustration purposes that is filled with `1L`/`"eins"` key/value pair at startup. <7> Begin a JTA transaction the normal way. <8> The cache is empty at startup, so the `CacheLoaderWriter` will be called to load the value. @@ -102,20 +102,20 @@ include::{sourcedir31}/transactions/src/test/java/org/ehcache/docs/transactions/ === Transactional scope -A XA cache can only be accessed within a JTA transaction's context. Any attempt to access one outside of such context -will result in `XACacheException` to be thrown. +An XA cache can only be accessed within a JTA transaction's context. Any attempt to access one outside of such a context +will result in `XACacheException` being thrown. [source,java,indent=0] ---- -include::{sourcedir31}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testNonTransactionalAccess] +include::{sourcedir32}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testNonTransactionalAccess] ---- <1> First start the Bitronix transaction manager. By default, Ehcache 3 will auto-detect it but will throw an exception during the cache manager initialization if BTM isn't started. -<2> Configure the cache manager such as it can handle transactions by having a `TransactionManagerProvider` loaded and configured to use Bitronix. +<2> Configure the cache manager to handle transactions by having a `TransactionManagerProvider` loaded and configured to use Bitronix. <3> Register a cache the normal way. <4> Give it the resources you wish. -<5> Add a `XAStoreConfiguration` object to make the cache XA transactional. You must also give the cache a unique +<5> Add an `XAStoreConfiguration` object to make the cache XA transactional. You must also give the cache a unique XAResource identifier as some transaction managers require this. <6> The cache is being accessed with no prior call to `transactionManager.begin()` which makes it throw `XACacheException`. @@ -134,16 +134,16 @@ INFO o.e.t.x.j.DefaultJournalProvider - Using persistent XAStore journal [source,java,indent=0] ---- -include::{sourcedir31}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testXACacheWithThreeTiers] +include::{sourcedir32}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testXACacheWithThreeTiers] ---- <1> First start the Bitronix transaction manager. By default, Ehcache 3 will auto-detect it but will throw an exception during the cache manager initialization if BTM isn't started. -<2> Configure the cache manager such as it can handle transactions by having a `TransactionManagerProvider` loaded and configured to use Bitronix. +<2> Configure the cache manager to handle transactions by having a `TransactionManagerProvider` loaded and configured to use Bitronix. <3> Register a `LocalPersistenceService` with your `CacheManager` to use disk storage. <4> Register a cache the normal way. <5> Give it the resources you wish. -<6> Add a `XAStoreConfiguration` object to make the cache XA transactional. You must also give the cache a unique +<6> Add an `XAStoreConfiguration` object to make the cache XA transactional. You must also give the cache a unique XAResource identifier as some transaction managers require this. <7> Begin a JTA transaction the normal way. <8> Update the value. @@ -152,15 +152,15 @@ include::{sourcedir31}/transactions/src/test/java/org/ehcache/docs/transactions/ == Configuring it with XML -You can create a XML file to configure a `CacheManager`, lookup a specific transaction manager and configure +You can create an XML file to configure a `CacheManager`, look up a specific transaction manager and configure XA caches: [source,xml] ---- -include::{sourcedir31}/transactions/src/test/resources/docs/configs/xa-getting-started.xml[tags=gettingStarted] +include::{sourcedir32}/transactions/src/test/resources/docs/configs/xa-getting-started.xml[tags=gettingStarted] ---- -<1> Declare a `TransactionManagerLookup` that will lookup your transaction manager. +<1> Declare a `TransactionManagerLookup` that will look up your transaction manager. <2> Configure a `xaCache` cache the normal way. <3> Configure `xaCache` as an XA cache, giving it `xaCache` as its unique XAResource ID. @@ -168,12 +168,12 @@ In order to parse an XML configuration, you can use the `XmlConfiguration` type: [source,java] ---- -include::{sourcedir31}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testXACacheWithXMLConfig] +include::{sourcedir32}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testXACacheWithXMLConfig] ---- <1> The Bitronix transaction manager must be started before the cache manager is initialized. <2> Create a `URL` to your XML file's location. -<3> Instantiate a `XmlConfiguration` passing it the XML file's `URL`. +<3> Instantiate an `XmlConfiguration` passing it the XML file's `URL`. <4> Using the static `org.ehcache.config.builders.CacheManagerBuilder.newCacheManager(org.ehcache.config.Configuration)` lets you create your `CacheManager` instance using the `Configuration` from the `XmlConfiguration`. @@ -181,14 +181,14 @@ And here is what the `BitronixTransactionManagerLookup` implementation looks lik [source,java] ---- -include::{sourcedir31}/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/BitronixTransactionManagerLookup.java[tag=BitronixLookup] +include::{sourcedir32}/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/BitronixTransactionManagerLookup.java[tag=BitronixLookup] ---- -<1> The `TransactionManagerLookup` interface must be implemented and the offer a no-arg constructor. +<1> The `TransactionManagerLookup` interface must be implemented and offer a no-arg constructor. <2> The `lookupTransactionManagerWrapper()` method must return a `TransactionManagerWrapper` instance. <3> Here is the check that makes sure BTM is started. <4> The `TransactionManagerWrapper` class is constructed with both the `javax.transaction.TransactionManager` - instance as well as a `XAResourceRegistry` instance. The latter is used to register the + instance and an `XAResourceRegistry` instance. The latter is used to register the `javax.transaction.xa.XAResource` instances of the cache with the transaction manager using an implementation-specific mechanism. If your JTA implementation doesn't require that, you can use the `NullXAResourceRegistry` instead. diff --git a/docs/src/docs/asciidoc/user/xml.adoc b/docs/src/docs/asciidoc/user/xml.adoc index 64b5da166c..5878280a5f 100644 --- a/docs/src/docs/asciidoc/user/xml.adoc +++ b/docs/src/docs/asciidoc/user/xml.adoc @@ -1,9 +1,9 @@ --- --- = XML Configuration -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -16,18 +16,17 @@ Using an XML file you can configure a `CacheManager` at creation time, according === `` root element -The root element of our XML configuration. One `` element and, by implication, one XML file, -provides the definition for a `CacheManager`. With Ehcache 3, however, you may create multiple -`CacheManager` instances using the same XML configuration file. Unlike the JSR-107 -`javax.cache.spi.CachingProvider`, Ehcache does not maintain a registry of `CacheManager` instances. +The root element of our XML configuration. One `` element in an XML file provides the definition for a `CacheManager`. +NOTE: Ehcache allows for creating multiple `CacheManager` instances using the same XML configuration file. +In contrast to the JSR-107 `javax.cache.spi.CachingProvider`, Ehcache does not maintain a registry of `CacheManager` instances. === `` elements -`` elements are an extension point for specifying `CacheManager` managed services. +`` elements are extension points for specifying services managed by the `CacheManager`. Each `Service` defined in this way is managed with the -same lifecycle as the `CacheManager` -- for each `Service` defined for a `CacheManager, the `Service.start` +same lifecycle as the `CacheManager` -- for each `Service` defined for a `CacheManager`, the `Service.start` is called during `CacheManager.init` processing and the `Service.stop` method is called during `CacheManager.close` processing. @@ -38,7 +37,7 @@ JSR-107 uses this extension point of the XML configuration (and Ehcache 3's modu === `` element -A `` element represents `Serializers` configured at `CacheManager` level. +A `` element represents `Serializers` configured at `CacheManager` level. It is a collection of `` elements that require a `type` and a fully qualified class name of the `Serializer`. === `` element @@ -55,7 +54,7 @@ It requires the `directory` location where data needs be stored on disk. A `` element represent a `Cache` instance that will be created and managed by the `CacheManager`. Each `` requires the `alias` attribute, used at runtime to retrieve the corresponding `Cache` instance using -the `org.ehcache.CacheManager.getCache(String, Class, Class)` method. The optional `uses-template` attribute, lets you reference +the `org.ehcache.CacheManager.getCache(String, Class, Class)` method. The optional `uses-template` attribute lets you reference a `` element's `name` attribute. See the <> for further details on using them. @@ -74,11 +73,30 @@ Supported nested elements are optional: `` elements represent a uniquely named (specified using the mandatory `name` attribute) template for `` elements to inherit from. A `` element that references a `` by its `name` using the `uses-template` attribute, will inherit all properties of the ``. A `` -can override these properties as it needs. +can override these properties as required. A `` element may contain all the same child elements as a `` element. -NOTE: We've setup a complete configuration <> to inspire you. +NOTE: We've set up a complete configuration <> to inspire you. + +== Property replacement in XML configuration files + +Java system properties can be referenced inside XML configuration files. +The property value will replace the property reference during the configuration parsing. + +This is done by using the `${prop.name}` syntax. +It is supported in all attributes and elements values that accept the `${}` characters as legal characters. +This currently rules out all numbers, mostly used in sizing things, and identifiers, such as cache and template names. + +WARNING: If the system property does not exist, this will make the configuration parsing fail. + +A classical use case for this feature is for disk files location inside the `directory` attribute of the `persistence` tag: + +[source,xml] +---- + +---- +<1> Here `user.home` will be replaced by the value of the system property, something like `/home/user` == XML programmatic parsing @@ -87,26 +105,27 @@ NOTE: If you are obtaining your `CacheManager` through the JSR-107 API, what fol [source,java,indent=0] ---- -include::{sourcedir31}/xml/src/test/java/org/ehcache/docs/GettingStarted.java[tag=xmlConfig] +include::{sourcedir32}/xml/src/test/java/org/ehcache/docs/GettingStarted.java[tag=xmlConfig] ---- <1> Obtain a `URL` to your XML file's location <2> Instantiate an `XmlConfiguration` passing the XML file's URL to it <3> Using the static `org.ehcache.config.builders.CacheManagerBuilder.newCacheManager(org.ehcache.config.Configuration)` allows you - to create your `CacheManager` instance using the `Configuration` from the `XmlConfiguration` + to create your `CacheManager` instance using the `Configuration` from the `XmlConfiguration`. +<4> Initialize the `cacheManager` before it is used. We can also use `` declared in the XML file to seed instances of `CacheConfigurationBuilder`. In order -to use a `` element from a XML file, e.g. the `/my-config.xml` contained this XML fragment: +to use a `` element from an XML file, e.g. the `/my-config.xml` contains this XML fragment: [source,xml,indent=0] ---- -include::{sourcedir31}/xml/src/test/resources/configs/docs/template-sample.xml[tag=templateSample] +include::{sourcedir32}/xml/src/test/resources/configs/docs/template-sample.xml[tag=templateSample] ---- Creating a `CacheConfigurationBuilder` of that `example` `` element, would be done as follows: [source,java,indent=0] ---- -include::{sourcedir31}/xml/src/test/java/org/ehcache/docs/GettingStarted.java[tag=xmlTemplate] +include::{sourcedir32}/xml/src/test/java/org/ehcache/docs/GettingStarted.java[tag=xmlTemplate] ---- <1> Creates a builder, inheriting the capacity constraint of 200 entries <2> The inherent properties can be overridden by simply providing a different value prior to building the `CacheConfiguration` diff --git a/docs/src/docs/asciidoc/user/xsds.adoc b/docs/src/docs/asciidoc/user/xsds.adoc index 16e20cf48b..8a76c40e36 100644 --- a/docs/src/docs/asciidoc/user/xsds.adoc +++ b/docs/src/docs/asciidoc/user/xsds.adoc @@ -1,9 +1,9 @@ --- --- = Ehcache XSDs -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -25,7 +25,7 @@ endif::notBuildingForSite[] [source,xml,indent=0] ---- -include::{sourcedir31}/107/src/test/resources/org/ehcache/docs/public-xsds-location.xml[tag=xsdLocations] +include::{sourcedir32}/107/src/test/resources/org/ehcache/docs/public-xsds-location.xml[tag=xsdLocations] ---- [[core]] @@ -33,7 +33,7 @@ include::{sourcedir31}/107/src/test/resources/org/ehcache/docs/public-xsds-locat [source,xsd,indent=0] ---- -include::{sourcedir31}/xml/src/main/resources/ehcache-core.xsd[lines=18..-1] +include::{sourcedir32}/xml/src/main/resources/ehcache-core.xsd[lines=18..-1] ---- [[jsr-107-extension]] @@ -41,12 +41,12 @@ include::{sourcedir31}/xml/src/main/resources/ehcache-core.xsd[lines=18..-1] [source,xsd,indent=0] ---- -include::{sourcedir31}/107/src/main/resources/ehcache-107ext.xsd[lines=18..-1] +include::{sourcedir32}/107/src/main/resources/ehcache-107ext.xsd[lines=18..-1] ---- == XA transactions extension [source,xsd,indent=0] ---- -include::{sourcedir31}/transactions/src/main/resources/ehcache-tx-ext.xsd[lines=18..-1] +include::{sourcedir32}/transactions/src/main/resources/ehcache-tx-ext.xsd[lines=18..-1] ---- diff --git a/gradle.properties b/gradle.properties index 71b12fe25b..d6918002a4 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,5 +1,9 @@ sonatypeUser = OVERRIDE_ME sonatypePwd = OVERRIDE_ME +deployUrl = https://oss.sonatype.org/service/local/staging/deploy/maven2/ + # Enable the daemon by adding org.gradle.daemon in USER_HOME/.gradle/gradle.properties -org.gradle.parallel=true \ No newline at end of file +org.gradle.parallel=true + +java6Home=/Library/Java/JavaVirtualMachines/1.6.0_65-b14-462.jdk/Contents/Home diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index ca78035ef0..6ffa237849 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 88ef034727..cc1e0a8410 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ -#Wed Jun 15 22:53:20 CEST 2016 +#Fri Nov 18 10:33:07 CET 2016 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-2.14-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-3.2-all.zip diff --git a/gradlew b/gradlew index 27309d9231..9aa616c273 100755 --- a/gradlew +++ b/gradlew @@ -161,4 +161,9 @@ function splitJvmOpts() { eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME" +# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong +if [[ "$(uname)" == "Darwin" ]] && [[ "$HOME" == "$PWD" ]]; then + cd "$(dirname "$0")" +fi + exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@" diff --git a/gradlew.bat b/gradlew.bat index 832fdb6079..f9553162f1 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -49,7 +49,6 @@ goto fail @rem Get command-line arguments, handling Windows variants if not "%OS%" == "Windows_NT" goto win9xME_args -if "%@eval[2+2]" == "4" goto 4NT_args :win9xME_args @rem Slurp the command line arguments. @@ -60,11 +59,6 @@ set _SKIP=2 if "x%~1" == "x" goto execute set CMD_LINE_ARGS=%* -goto execute - -:4NT_args -@rem Get arguments from the 4NT Shell from JP Software -set CMD_LINE_ARGS=%$ :execute @rem Setup the command line diff --git a/impl/build.gradle b/impl/build.gradle index 8fc63c184d..aa23d8388e 100644 --- a/impl/build.gradle +++ b/impl/build.gradle @@ -19,7 +19,9 @@ apply plugin: EhDeploy dependencies { compile project(':api'), project(':core') compile group: 'org.terracotta', name: 'offheap-store', version: parent.offheapVersion - compile group: 'org.ehcache', name: 'sizeof', version: parent.sizeofVersion + compile (group: 'org.ehcache', name: 'sizeof', version: parent.sizeofVersion) { + exclude group:'org.slf4j', module:'slf4j-api' + } testCompile project(path: ':core-spi-test'), 'org.ow2.asm:asm-all:5.0.4' } diff --git a/impl/src/main/java/org/ehcache/config/builders/CacheConfigurationBuilder.java b/impl/src/main/java/org/ehcache/config/builders/CacheConfigurationBuilder.java index 442c2e7d91..1f8df4a384 100644 --- a/impl/src/main/java/org/ehcache/config/builders/CacheConfigurationBuilder.java +++ b/impl/src/main/java/org/ehcache/config/builders/CacheConfigurationBuilder.java @@ -63,8 +63,8 @@ public class CacheConfigurationBuilder implements Builder evictionAdvisor; private ResourcePools resourcePools; - private Class keyType; - private Class valueType; + private Class keyType; + private Class valueType; /** * Creates a new instance ready to produce a {@link CacheConfiguration} with key type {@code } and with value type @@ -96,13 +96,32 @@ public static CacheConfigurationBuilder newCacheConfigurationBuilde return new CacheConfigurationBuilder(keyType, valueType, resourcePoolsBuilder.build()); } + /** + * Creates a new instance ready to produce a {@link CacheConfiguration} functionally equivalent to the supplied configuration. + * + * @param configuration seed configuration + * @param the key type + * @param the value type + * @return a {@code CacheConfigurationBuilder} + */ + public static CacheConfigurationBuilder newCacheConfigurationBuilder(CacheConfiguration configuration) { + CacheConfigurationBuilder builder = newCacheConfigurationBuilder(configuration.getKeyType(), configuration.getValueType(), configuration.getResourcePools()) + .withClassLoader(configuration.getClassLoader()) + .withEvictionAdvisor(configuration.getEvictionAdvisor()) + .withExpiry(configuration.getExpiry()); + for (ServiceConfiguration serviceConfig : configuration.getServiceConfigurations()) { + builder = builder.add(serviceConfig); + } + return builder; + } + private CacheConfigurationBuilder(Class keyType, Class valueType, ResourcePools resourcePools) { this.keyType = keyType; this.valueType = valueType; this.resourcePools = resourcePools; } - private CacheConfigurationBuilder(CacheConfigurationBuilder other) { + private CacheConfigurationBuilder(CacheConfigurationBuilder other) { this.keyType = other.keyType; this.valueType = other.valueType; this.expiry = other.expiry; @@ -342,7 +361,7 @@ public CacheConfigurationBuilder withLoaderWriter(Class withKeySerializingCopier() { CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder(this); removeExistingCopierConfigFor(DefaultCopierConfiguration.Type.KEY, otherBuilder); - otherBuilder.serviceConfigurations.add(new DefaultCopierConfiguration((Class) SerializingCopier.class, DefaultCopierConfiguration.Type.KEY)); + otherBuilder.serviceConfigurations.add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)); return otherBuilder; } @@ -356,7 +375,7 @@ public CacheConfigurationBuilder withKeySerializingCopier() { public CacheConfigurationBuilder withValueSerializingCopier() { CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder(this); removeExistingCopierConfigFor(DefaultCopierConfiguration.Type.VALUE, otherBuilder); - otherBuilder.serviceConfigurations.add(new DefaultCopierConfiguration((Class) SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE)); + otherBuilder.serviceConfigurations.add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)); return otherBuilder; } diff --git a/impl/src/main/java/org/ehcache/config/builders/CacheManagerBuilder.java b/impl/src/main/java/org/ehcache/config/builders/CacheManagerBuilder.java index 2de7ef7c66..e7903a6feb 100644 --- a/impl/src/main/java/org/ehcache/config/builders/CacheManagerBuilder.java +++ b/impl/src/main/java/org/ehcache/config/builders/CacheManagerBuilder.java @@ -100,7 +100,8 @@ private CacheManagerBuilder(CacheManagerBuilder builder, ConfigurationBuilder } /** - * Creates a new {@link CacheManager} based on the provided configuration + * Creates a new {@link CacheManager} based on the provided configuration. + * The returned {@code CacheManager} is uninitialized. * * @param configuration the configuration to use * @return a {@code CacheManager} @@ -374,11 +375,13 @@ public static CacheManagerBuilder newCacheManagerBuilder() { } /** - * Convenience method to get a {@link CacheManagerConfiguration} for a {@link PersistentCacheManager}. + * Convenience method to get a {@link CacheManagerConfiguration} for a {@link PersistentCacheManager} stored on disk. The actual + * level of persistence is configured on the disk resource pool per cache. * * @param location the file location for persistent data * @return a {@code CacheManagerConfiguration} * + * @see ResourcePoolsBuilder#disk(long, MemoryUnit, boolean) * @see #with(CacheManagerConfiguration) * @see PersistentCacheManager */ diff --git a/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java b/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java index 2a05d10af5..1f60dbf2fc 100644 --- a/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java +++ b/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java @@ -17,54 +17,53 @@ package org.ehcache.config.builders; import org.ehcache.Cache; +import org.ehcache.CachePersistenceException; +import org.ehcache.UserManagedCache; import org.ehcache.config.Builder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.ResourceType; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.Ehcache; import org.ehcache.core.EhcacheWithLoaderWriter; import org.ehcache.core.InternalCache; import org.ehcache.core.PersistentUserManagedEhcache; -import org.ehcache.UserManagedCache; import org.ehcache.core.config.BaseCacheConfiguration; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.EvictionAdvisor; -import org.ehcache.config.ResourcePools; -import org.ehcache.config.ResourceType; +import org.ehcache.core.events.CacheEventDispatcher; +import org.ehcache.core.events.CacheEventListenerConfiguration; +import org.ehcache.core.events.CacheEventListenerProvider; +import org.ehcache.core.internal.service.ServiceLocator; import org.ehcache.core.internal.store.StoreConfigurationImpl; -import org.ehcache.core.spi.store.heap.SizeOfEngine; -import org.ehcache.impl.events.CacheEventDispatcherImpl; import org.ehcache.core.internal.store.StoreSupport; +import org.ehcache.core.internal.util.ClassLoading; +import org.ehcache.core.spi.LifeCycled; +import org.ehcache.core.spi.LifeCycledAdapter; +import org.ehcache.core.spi.service.DiskResourceService; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.store.heap.SizeOfEngine; +import org.ehcache.core.spi.store.heap.SizeOfEngineProvider; import org.ehcache.event.CacheEventListener; -import org.ehcache.core.events.CacheEventListenerConfiguration; -import org.ehcache.core.events.CacheEventListenerProvider; +import org.ehcache.expiry.Expirations; +import org.ehcache.expiry.Expiry; import org.ehcache.impl.config.copy.DefaultCopierConfiguration; import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration; import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineProviderConfiguration; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.events.CacheEventDispatcher; -import org.ehcache.impl.internal.events.DisabledCacheEventNotificationService; -import org.ehcache.CachePersistenceException; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; import org.ehcache.impl.copy.SerializingCopier; +import org.ehcache.impl.events.CacheEventDispatcherImpl; +import org.ehcache.impl.internal.events.DisabledCacheEventNotificationService; import org.ehcache.impl.internal.spi.event.DefaultCacheEventListenerProvider; -import org.ehcache.core.spi.LifeCycled; -import org.ehcache.core.spi.LifeCycledAdapter; -import org.ehcache.core.internal.service.ServiceLocator; -import org.ehcache.core.spi.store.Store; -import org.ehcache.spi.persistence.PersistableResourceService; -import org.ehcache.spi.service.ServiceProvider; import org.ehcache.spi.copy.Copier; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.persistence.PersistableResourceService; import org.ehcache.spi.serialization.SerializationProvider; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.serialization.UnsupportedTypeException; -import org.ehcache.core.spi.service.LocalPersistenceService; -import org.ehcache.core.spi.store.heap.SizeOfEngineProvider; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceCreationConfiguration; -import org.ehcache.spi.service.ServiceDependencies; -import org.ehcache.core.internal.util.ClassLoading; +import org.ehcache.spi.service.ServiceProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -79,10 +78,11 @@ import static org.ehcache.config.ResourceType.Core.DISK; import static org.ehcache.config.ResourceType.Core.OFFHEAP; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.ehcache.core.internal.service.ServiceLocator.findSingletonAmongst; import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_MAX_OBJECT_SIZE; import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_OBJECT_GRAPH_SIZE; import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_UNIT; -import static org.ehcache.core.internal.service.ServiceLocator.findSingletonAmongst; /** * The {@code UserManagedCacheBuilder} enables building {@link UserManagedCache}s using a fluent style. @@ -101,13 +101,6 @@ */ public class UserManagedCacheBuilder> implements Builder { - @ServiceDependencies(Store.Provider.class) - private static class ServiceDeps { - private ServiceDeps() { - throw new UnsupportedOperationException("This is an annotation placeholder, not to be instantiated"); - } - } - private static final Logger LOGGER = LoggerFactory.getLogger(UserManagedCacheBuilder.class); private static final AtomicLong instanceId = new AtomicLong(0L); @@ -169,18 +162,17 @@ private UserManagedCacheBuilder(UserManagedCacheBuilder toCopy) { this.sizeOfUnit = toCopy.sizeOfUnit; } - T build(ServiceLocator serviceLocator) throws IllegalStateException { + T build(ServiceLocator.DependencySet serviceLocatorBuilder) throws IllegalStateException { validateListenerConfig(); + ServiceLocator serviceLocator; try { for (ServiceCreationConfiguration serviceCreationConfig : serviceCreationConfigurations) { - Service service = serviceLocator.getOrCreateServiceFor(serviceCreationConfig); - if (service == null) { - throw new IllegalArgumentException("Couldn't resolve Service " + serviceCreationConfig.getServiceType().getName()); - } + serviceLocatorBuilder = serviceLocatorBuilder.with(serviceCreationConfig); } - serviceLocator.loadDependenciesOf(ServiceDeps.class); + serviceLocatorBuilder = serviceLocatorBuilder.with(Store.Provider.class); + serviceLocator = serviceLocatorBuilder.build(); serviceLocator.startAllServices(); } catch (Exception e) { throw new IllegalStateException("UserManagedCacheBuilder failed to build.", e); @@ -191,12 +183,12 @@ T build(ServiceLocator serviceLocator) throws IllegalStateException { if (keyCopier != null) { serviceConfigsList.add(new DefaultCopierConfiguration(keyCopier, DefaultCopierConfiguration.Type.KEY)); } else if (useKeySerializingCopier) { - serviceConfigsList.add(new DefaultCopierConfiguration((Class) SerializingCopier.class, DefaultCopierConfiguration.Type.KEY)); + serviceConfigsList.add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)); } if (valueCopier != null) { serviceConfigsList.add(new DefaultCopierConfiguration(valueCopier, DefaultCopierConfiguration.Type.VALUE)); } else if (useValueSerializingCopier) { - serviceConfigsList.add(new DefaultCopierConfiguration((Class) SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE)); + serviceConfigsList.add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)); } CacheConfiguration cacheConfig = new BaseCacheConfiguration(keyType, valueType, evictionAdvisor, @@ -210,20 +202,20 @@ T build(ServiceLocator serviceLocator) throws IllegalStateException { if (id == null) { throw new IllegalStateException("Persistent user managed caches must have an id set"); } - final LocalPersistenceService persistenceService = serviceLocator.getService(LocalPersistenceService.class); + final DiskResourceService diskResourceService = serviceLocator.getService(DiskResourceService.class); if (!resourcePools.getPoolForResource(ResourceType.Core.DISK).isPersistent()) { try { - persistenceService.destroy(id); + diskResourceService.destroy(id); } catch (CachePersistenceException cpex) { throw new RuntimeException("Unable to clean-up persistence space for non-restartable cache " + id, cpex); } } try { - final PersistableResourceService.PersistenceSpaceIdentifier identifier = persistenceService.getPersistenceSpaceIdentifier(id, cacheConfig); + final PersistableResourceService.PersistenceSpaceIdentifier identifier = diskResourceService.getPersistenceSpaceIdentifier(id, cacheConfig); lifeCycledList.add(new LifeCycledAdapter() { @Override public void close() throws Exception { - persistenceService.releasePersistenceSpaceIdentifier(identifier); + diskResourceService.releasePersistenceSpaceIdentifier(identifier); } }); serviceConfigsList.add(identifier); @@ -275,7 +267,7 @@ public void close() throws Exception { if (resources.contains(OFFHEAP) || resources.contains(DISK)) { throw new RuntimeException(e); } else { - LOGGER.debug("Could not create serializers for user managed cache {}", id, e); + LOGGER.debug("Serializers for cache '{}' failed creation ({}). However, depending on the configuration, they might not be needed", id, e.getMessage()); } } } @@ -304,13 +296,13 @@ public void close() throws Exception { eventDispatcher.setStoreEventSource(store.getStoreEventSource()); if (persistent) { - LocalPersistenceService persistenceService = serviceLocator - .getService(LocalPersistenceService.class); - if (persistenceService == null) { + DiskResourceService diskResourceService = serviceLocator + .getService(DiskResourceService.class); + if (diskResourceService == null) { throw new IllegalStateException("No LocalPersistenceService could be found - did you configure one?"); } - PersistentUserManagedEhcache cache = new PersistentUserManagedEhcache(cacheConfig, store, persistenceService, cacheLoaderWriter, eventDispatcher, id); + PersistentUserManagedEhcache cache = new PersistentUserManagedEhcache(cacheConfig, store, diskResourceService, cacheLoaderWriter, eventDispatcher, id); registerListeners(cache, serviceLocator, lifeCycledList); for (LifeCycled lifeCycled : lifeCycledList) { cache.addHook(lifeCycled); @@ -394,7 +386,7 @@ T cast(UserManagedCache cache) { * @throws IllegalStateException if the user managed cache cannot be built */ public final T build(final boolean init) throws IllegalStateException { - final T build = build(new ServiceLocator(services.toArray(new Service[services.size()]))); + final T build = build(dependencySet().with(services)); if (init) { build.init(); } diff --git a/impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopyProviderConfiguration.java b/impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopyProviderConfiguration.java index 8f15d56368..4c575fe640 100644 --- a/impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopyProviderConfiguration.java +++ b/impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopyProviderConfiguration.java @@ -16,6 +16,7 @@ package org.ehcache.impl.config.copy; +import org.ehcache.impl.internal.classes.ClassInstanceConfiguration; import org.ehcache.impl.internal.classes.ClassInstanceProviderConfiguration; import org.ehcache.spi.copy.Copier; import org.ehcache.spi.copy.CopyProvider; @@ -93,7 +94,9 @@ public DefaultCopyProviderConfiguration addCopierFor(Class clazz, Class> configuration = (ClassInstanceConfiguration) new DefaultCopierConfiguration(copierClass); + getDefaults().put(clazz, configuration); return this; } } diff --git a/impl/src/main/java/org/ehcache/impl/config/persistence/CacheManagerPersistenceConfiguration.java b/impl/src/main/java/org/ehcache/impl/config/persistence/CacheManagerPersistenceConfiguration.java index da46c4a831..679938fc46 100644 --- a/impl/src/main/java/org/ehcache/impl/config/persistence/CacheManagerPersistenceConfiguration.java +++ b/impl/src/main/java/org/ehcache/impl/config/persistence/CacheManagerPersistenceConfiguration.java @@ -44,6 +44,7 @@ public CacheManagerPersistenceConfiguration(final File rootDirectory) { * Transforms the builder received in one that returns a {@link PersistentCacheManager}. */ @Override + @SuppressWarnings("unchecked") public CacheManagerBuilder builder(final CacheManagerBuilder other) { return (CacheManagerBuilder)other.using(this); } diff --git a/impl/src/main/java/org/ehcache/impl/config/persistence/UserManagedPersistenceContext.java b/impl/src/main/java/org/ehcache/impl/config/persistence/UserManagedPersistenceContext.java index 6e155c9c25..a0d42a79c2 100644 --- a/impl/src/main/java/org/ehcache/impl/config/persistence/UserManagedPersistenceContext.java +++ b/impl/src/main/java/org/ehcache/impl/config/persistence/UserManagedPersistenceContext.java @@ -46,6 +46,7 @@ public UserManagedPersistenceContext(String identifier, LocalPersistenceService * Transforms the builder received in one that returns a {@link PersistentUserManagedCache}. */ @Override + @SuppressWarnings("unchecked") public UserManagedCacheBuilder> builder(UserManagedCacheBuilder> builder) { return (UserManagedCacheBuilder>) builder.identifier(identifier).using(persistenceService); } diff --git a/impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfiguration.java b/impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfiguration.java index 55dc152642..c59425109d 100644 --- a/impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfiguration.java +++ b/impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfiguration.java @@ -20,19 +20,21 @@ import java.util.LinkedHashMap; import java.util.Map; -import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.SerializationProvider; import org.ehcache.spi.serialization.Serializer; import org.ehcache.core.spi.service.FileBasedPersistenceContext; import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * {@link ServiceCreationConfiguration} for the default {@link SerializationProvider}. */ public class DefaultSerializationProviderConfiguration implements ServiceCreationConfiguration { - private final Map, Class>> transientSerializers = new LinkedHashMap, Class>>(); - private final Map, Class>> persistentSerializers = new LinkedHashMap, Class>>(); + private static final Logger LOGGER = LoggerFactory.getLogger(DefaultSerializationProviderConfiguration.class); + + private final Map, Class>> defaultSerializers = new LinkedHashMap, Class>>(); /** * Creates a new configuration instance. @@ -47,8 +49,7 @@ public DefaultSerializationProviderConfiguration() { * @param other the other to copy from */ public DefaultSerializationProviderConfiguration(DefaultSerializationProviderConfiguration other) { - transientSerializers.putAll(other.transientSerializers); - persistentSerializers.putAll(other.persistentSerializers); + defaultSerializers.putAll(other.defaultSerializers); } /** @@ -94,42 +95,21 @@ public DefaultSerializationProviderConfiguration addSerializerFor(Class s throw new NullPointerException("Serializer class cannot be null"); } - boolean transientConstructorPresent; - boolean persistentConstructorPresent; - - if(transientConstructorPresent = isConstructorPresent(serializerClass, ClassLoader.class)) { - if (!overwrite && transientSerializers.containsKey(serializableClass)) { - throw new IllegalArgumentException("Duplicate transient serializer for class : " + serializableClass.getName()); - } else { - transientSerializers.put(serializableClass, serializerClass); - } - } - - if (persistentConstructorPresent = isConstructorPresent(serializerClass, ClassLoader.class, StateRepository.class)) { - if (!overwrite && persistentSerializers.containsKey(serializableClass)) { - throw new IllegalArgumentException("Duplicate persistent serializer for class : " + serializableClass.getName()); - } else { - persistentSerializers.put(serializableClass, serializerClass); - } + if(!isConstructorPresent(serializerClass, ClassLoader.class)) { + throw new IllegalArgumentException("The serializer: " + serializerClass.getName() + " does not have a constructor that takes in a ClassLoader."); } if (isConstructorPresent(serializerClass, ClassLoader.class, FileBasedPersistenceContext.class)) { - if (persistentConstructorPresent) { - throw new IllegalArgumentException("Serializer cannot have constructors taking (ClassLoader, StateRepository) and (ClassLoader, FileBasedPersistenceContext)" + - " - you should remove the second one as it is deprecated since version 3.1.0"); - } - persistentConstructorPresent = true; - if (!overwrite && persistentSerializers.containsKey(serializableClass)) { - throw new IllegalArgumentException("Duplicate persistent serializer for class : " + serializableClass.getName()); - } else { - persistentSerializers.put(serializableClass, serializerClass); - } + LOGGER.warn(serializerClass.getName() + " class has a constructor that takes in a FileBasedPersistenceContext. " + + "Support for this constructor has been removed since version 3.2. Consider removing it."); } - if(!transientConstructorPresent && !persistentConstructorPresent) { - throw new IllegalArgumentException("The serializer: " + serializerClass.getName() - + " does not meet the constructor requirements for either transient or persistent caches."); + if (defaultSerializers.containsKey(serializableClass) && !overwrite) { + throw new IllegalArgumentException("Duplicate serializer for class : " + serializableClass.getName()); + } else { + defaultSerializers.put(serializableClass, serializerClass); } + return this; } @@ -142,21 +122,12 @@ private static boolean isConstructorPresent(Class clazz, Class... args) { } } - /** - * Returns the map of class to serializer class for transient serializers. - * - * @return the map from class to serializer class - */ - public Map, Class>> getTransientSerializers() { - return unmodifiableMap(transientSerializers); - } - /** * Returns the map of class to serializer class for persistent serializers. * * @return the map from class to serializer class */ - public Map, Class>> getPersistentSerializers() { - return unmodifiableMap(persistentSerializers); + public Map, Class>> getDefaultSerializers() { + return unmodifiableMap(defaultSerializers); } } diff --git a/impl/src/main/java/org/ehcache/impl/copy/SerializingCopier.java b/impl/src/main/java/org/ehcache/impl/copy/SerializingCopier.java index 2cf92db17d..8864a4177d 100644 --- a/impl/src/main/java/org/ehcache/impl/copy/SerializingCopier.java +++ b/impl/src/main/java/org/ehcache/impl/copy/SerializingCopier.java @@ -16,6 +16,7 @@ package org.ehcache.impl.copy; +import org.ehcache.spi.copy.Copier; import org.ehcache.spi.serialization.SerializerException; import org.ehcache.spi.serialization.Serializer; @@ -26,6 +27,17 @@ public final class SerializingCopier extends ReadWriteCopier { private final Serializer serializer; + /** + * Convenience method allowing to represent this copier's class as the expected type in configuration. + * + * @param The type to work on + * @return the proper type + */ + @SuppressWarnings("unchecked") + public static Class> asCopierClass() { + return (Class) SerializingCopier.class; + } + /** * Creates a new copier that will using the provided {@link Serializer}. * diff --git a/impl/src/main/java/org/ehcache/impl/events/CacheEventAdapter.java b/impl/src/main/java/org/ehcache/impl/events/CacheEventAdapter.java index 7a6246c19e..ba6c72280d 100644 --- a/impl/src/main/java/org/ehcache/impl/events/CacheEventAdapter.java +++ b/impl/src/main/java/org/ehcache/impl/events/CacheEventAdapter.java @@ -32,7 +32,7 @@ public abstract class CacheEventAdapter implements CacheEventListener event) { + public final void onEvent(CacheEvent event) { switch (event.getType()) { case CREATED: onCreation(event.getKey(), event.getNewValue()); diff --git a/impl/src/main/java/org/ehcache/impl/events/CacheEventDispatcherImpl.java b/impl/src/main/java/org/ehcache/impl/events/CacheEventDispatcherImpl.java index c1c6a8c66f..20de35f4b1 100644 --- a/impl/src/main/java/org/ehcache/impl/events/CacheEventDispatcherImpl.java +++ b/impl/src/main/java/org/ehcache/impl/events/CacheEventDispatcherImpl.java @@ -60,8 +60,8 @@ public class CacheEventDispatcherImpl implements CacheEventDispatcher syncListenersList = new CopyOnWriteArrayList(); - private final List aSyncListenersList = new CopyOnWriteArrayList(); + private final List> syncListenersList = new CopyOnWriteArrayList>(); + private final List> aSyncListenersList = new CopyOnWriteArrayList>(); private final StoreEventListener eventListener = new StoreListener(); private volatile Cache listenerSource; @@ -85,7 +85,7 @@ public CacheEventDispatcherImpl(ExecutorService unOrderedExecutor, ExecutorServi @Override public void registerCacheEventListener(CacheEventListener listener, EventOrdering ordering, EventFiring firing, EnumSet forEventTypes) { - EventListenerWrapper wrapper = new EventListenerWrapper(listener, firing, ordering, forEventTypes); + EventListenerWrapper wrapper = new EventListenerWrapper(listener, firing, ordering, forEventTypes); registerCacheEventListener(wrapper); } @@ -96,7 +96,7 @@ public void registerCacheEventListener(CacheEventListener * * @param wrapper the listener wrapper to register */ - private synchronized void registerCacheEventListener(EventListenerWrapper wrapper) { + private synchronized void registerCacheEventListener(EventListenerWrapper wrapper) { if(aSyncListenersList.contains(wrapper) || syncListenersList.contains(wrapper)) { throw new IllegalStateException("Cache Event Listener already registered: " + wrapper.getListener()); } @@ -126,7 +126,7 @@ private synchronized void registerCacheEventListener(EventListenerWrapper wrappe */ @Override public void deregisterCacheEventListener(CacheEventListener listener) { - EventListenerWrapper wrapper = new EventListenerWrapper(listener); + EventListenerWrapper wrapper = new EventListenerWrapper(listener); if (!removeWrapperFromList(wrapper, aSyncListenersList)) { if (!removeWrapperFromList(wrapper, syncListenersList)) { @@ -141,7 +141,7 @@ public void deregisterCacheEventListener(CacheEventListener listenersList) { + private synchronized boolean removeWrapperFromList(EventListenerWrapper wrapper, List> listenersList) { int index = listenersList.indexOf(wrapper); if (index != -1) { EventListenerWrapper containedWrapper = listenersList.remove(index); @@ -201,15 +201,16 @@ void onEvent(CacheEvent event) { * {@inheritDoc} */ @Override + @SuppressWarnings("unchecked") public List getConfigurationChangeListeners() { List configurationChangeListenerList = new ArrayList(); configurationChangeListenerList.add(new CacheConfigurationChangeListener() { @Override public void cacheConfigurationChange(final CacheConfigurationChangeEvent event) { if (event.getProperty().equals(CacheConfigurationProperty.ADD_LISTENER)) { - registerCacheEventListener((EventListenerWrapper)event.getNewValue()); + registerCacheEventListener((EventListenerWrapper)event.getNewValue()); } else if (event.getProperty().equals(CacheConfigurationProperty.REMOVE_LISTENER)) { - CacheEventListener oldListener = (CacheEventListener)event.getOldValue(); + CacheEventListener oldListener = (CacheEventListener)event.getOldValue(); deregisterCacheEventListener(oldListener); } } diff --git a/impl/src/main/java/org/ehcache/impl/events/EventDispatchTask.java b/impl/src/main/java/org/ehcache/impl/events/EventDispatchTask.java index 73c4025c38..79897ca453 100644 --- a/impl/src/main/java/org/ehcache/impl/events/EventDispatchTask.java +++ b/impl/src/main/java/org/ehcache/impl/events/EventDispatchTask.java @@ -24,9 +24,9 @@ class EventDispatchTask implements Runnable { private static final Logger LOGGER = LoggerFactory.getLogger(EventDispatchTask.class); private final CacheEvent cacheEvent; - private final Iterable listenerWrappers; + private final Iterable> listenerWrappers; - EventDispatchTask(CacheEvent cacheEvent, Iterable listener) { + EventDispatchTask(CacheEvent cacheEvent, Iterable> listener) { if (cacheEvent == null) { throw new NullPointerException("cache event cannot be null"); } @@ -39,7 +39,7 @@ class EventDispatchTask implements Runnable { @Override public void run() { - for(EventListenerWrapper listenerWrapper : listenerWrappers) { + for(EventListenerWrapper listenerWrapper : listenerWrappers) { if (listenerWrapper.isForEventType(cacheEvent.getType())) { try { listenerWrapper.onEvent(cacheEvent); diff --git a/impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceConfiguration.java b/impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceConfiguration.java index ef5e3ffca3..e7cea1f690 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceConfiguration.java +++ b/impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceConfiguration.java @@ -37,7 +37,9 @@ public ClassInstanceConfiguration(Class clazz, Object... arguments) public ClassInstanceConfiguration(T instance) { this.instance = instance; - this.clazz = (Class) instance.getClass(); + @SuppressWarnings("unchecked") + Class instanceClass = (Class) instance.getClass(); + this.clazz = instanceClass; this.arguments = null; } diff --git a/impl/src/main/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMap.java b/impl/src/main/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMap.java index e23ba543af..1cece698b0 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMap.java +++ b/impl/src/main/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMap.java @@ -252,6 +252,7 @@ * @param the type of keys maintained by this map * @param the type of mapped values */ +@SuppressWarnings("unchecked") public class ConcurrentHashMap extends AbstractMap implements ConcurrentMap, Serializable { private static final long serialVersionUID = 7249069246763182397L; diff --git a/impl/src/main/java/org/ehcache/impl/internal/events/AbstractStoreEventDispatcher.java b/impl/src/main/java/org/ehcache/impl/internal/events/AbstractStoreEventDispatcher.java index 56e5a8e179..78a11d7cc8 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/events/AbstractStoreEventDispatcher.java +++ b/impl/src/main/java/org/ehcache/impl/internal/events/AbstractStoreEventDispatcher.java @@ -32,7 +32,7 @@ */ abstract class AbstractStoreEventDispatcher implements StoreEventDispatcher { - protected static final StoreEventSink NO_OP_EVENT_SINK = new CloseableStoreEventSink() { + protected static final StoreEventSink NO_OP_EVENT_SINK = new CloseableStoreEventSink() { @Override public void close() { // Do nothing @@ -49,17 +49,17 @@ public void reset() { } @Override - public void removed(Object key, ValueSupplier value) { + public void removed(Object key, ValueSupplier value) { // Do nothing } @Override - public void updated(Object key, ValueSupplier oldValue, Object newValue) { + public void updated(Object key, ValueSupplier oldValue, Object newValue) { // Do nothing } @Override - public void expired(Object key, ValueSupplier value) { + public void expired(Object key, ValueSupplier value) { // Do nothing } @@ -69,7 +69,7 @@ public void created(Object key, Object value) { } @Override - public void evicted(Object key, ValueSupplier value) { + public void evicted(Object key, ValueSupplier value) { // Do nothing } }; @@ -83,7 +83,9 @@ protected AbstractStoreEventDispatcher(int dispatcherConcurrency) { if (dispatcherConcurrency <= 0) { throw new IllegalArgumentException("Dispatcher concurrency must be an integer greater than 0"); } - orderedQueues = new LinkedBlockingQueue[dispatcherConcurrency]; + @SuppressWarnings("unchecked") + LinkedBlockingQueue>[] queues = new LinkedBlockingQueue[dispatcherConcurrency]; + orderedQueues = queues; for (int i = 0; i < orderedQueues.length; i++) { orderedQueues[i] = new LinkedBlockingQueue>(10000); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/events/CacheEventNotificationListenerServiceProviderFactory.java b/impl/src/main/java/org/ehcache/impl/internal/events/CacheEventNotificationListenerServiceProviderFactory.java index e9fea8a845..acb05eae07 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/events/CacheEventNotificationListenerServiceProviderFactory.java +++ b/impl/src/main/java/org/ehcache/impl/internal/events/CacheEventNotificationListenerServiceProviderFactory.java @@ -35,7 +35,7 @@ public CacheEventDispatcherFactory create(ServiceCreationConfiguration getServiceType() { - return CacheEventDispatcherFactory.class; + public Class getServiceType() { + return CacheEventDispatcherFactoryImpl.class; } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/events/ScopedStoreEventDispatcher.java b/impl/src/main/java/org/ehcache/impl/internal/events/ScopedStoreEventDispatcher.java index 6c498f1d02..3f67c65b1f 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/events/ScopedStoreEventDispatcher.java +++ b/impl/src/main/java/org/ehcache/impl/internal/events/ScopedStoreEventDispatcher.java @@ -31,7 +31,9 @@ public ScopedStoreEventDispatcher(int dispatcherConcurrency) { @Override public StoreEventSink eventSink() { if (getListeners().isEmpty()) { - return NO_OP_EVENT_SINK; + @SuppressWarnings("unchecked") + StoreEventSink noOpEventSink = (StoreEventSink) NO_OP_EVENT_SINK; + return noOpEventSink; } else { return new InvocationScopedEventSink(getFilters(), isEventOrdering(), getOrderedQueues(), getListeners()); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/events/ThreadLocalStoreEventDispatcher.java b/impl/src/main/java/org/ehcache/impl/internal/events/ThreadLocalStoreEventDispatcher.java index 86ff36f808..b5c3222ad8 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/events/ThreadLocalStoreEventDispatcher.java +++ b/impl/src/main/java/org/ehcache/impl/internal/events/ThreadLocalStoreEventDispatcher.java @@ -33,7 +33,9 @@ public ThreadLocalStoreEventDispatcher(int dispatcherConcurrency) { @Override public StoreEventSink eventSink() { if (getListeners().isEmpty()) { - return NO_OP_EVENT_SINK; + @SuppressWarnings("unchecked") + StoreEventSink noOpEventSink = (StoreEventSink) NO_OP_EVENT_SINK; + return noOpEventSink; } else { StoreEventSink eventSink = tlEventSink.get(); if (eventSink == null) { diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/BatchingLocalHeapWriteBehindQueue.java b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/BatchingLocalHeapWriteBehindQueue.java index 033fa7f8e2..1d74f90a0c 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/BatchingLocalHeapWriteBehindQueue.java +++ b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/BatchingLocalHeapWriteBehindQueue.java @@ -197,7 +197,7 @@ public void run() { // execute the batch operations for (BatchOperation batch : batches) { try { - batch.performBatchOperation(cacheLoaderWriter); + batch.performOperation(cacheLoaderWriter); } catch (Exception e) { LOGGER.warn("Exception while bulk processing in write behind queue", e); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/NonBatchingLocalHeapWriteBehindQueue.java b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/NonBatchingLocalHeapWriteBehindQueue.java index 7b274f252d..cabf8dd1e3 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/NonBatchingLocalHeapWriteBehindQueue.java +++ b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/NonBatchingLocalHeapWriteBehindQueue.java @@ -69,7 +69,7 @@ protected void addOperation(final SingleOperation operation) { @Override public void run() { try { - operation.performSingleOperation(cacheLoaderWriter); + operation.performOperation(cacheLoaderWriter); } catch (Exception e) { LOGGER.warn("Exception while processing key '{}' write behind queue : {}", operation.getKey(), e); } finally { diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/BatchOperation.java b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/BatchOperation.java index 295e8ccd84..5497cd557b 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/BatchOperation.java +++ b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/BatchOperation.java @@ -31,6 +31,6 @@ public interface BatchOperation { * Perform the batch operation for a particular batch writer * */ - void performBatchOperation(CacheLoaderWriter cacheLoaderWriter) throws BulkCacheWritingException, Exception; + void performOperation(CacheLoaderWriter cacheLoaderWriter) throws BulkCacheWritingException, Exception; } diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteAllOperation.java b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteAllOperation.java index ad7988a095..2c2ba93c42 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteAllOperation.java +++ b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteAllOperation.java @@ -41,7 +41,7 @@ public DeleteAllOperation(Iterable entries) { /** * {@inheritDoc} */ - public void performBatchOperation(CacheLoaderWriter cacheLoaderWriter) throws BulkCacheWritingException, Exception { + public void performOperation(CacheLoaderWriter cacheLoaderWriter) throws BulkCacheWritingException, Exception { cacheLoaderWriter.deleteAll(entries); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteOperation.java b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteOperation.java index 7f6a35dca0..6d92cad624 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteOperation.java +++ b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteOperation.java @@ -15,9 +15,6 @@ */ package org.ehcache.impl.internal.loaderwriter.writebehind.operations; -import java.util.ArrayList; -import java.util.List; - import org.ehcache.spi.loaderwriter.CacheLoaderWriter; /** @@ -49,18 +46,10 @@ public DeleteOperation(K key, long creationTime) { this.creationTime = creationTime; } - public void performSingleOperation(CacheLoaderWriter cacheLoaderWriter) throws Exception { + public void performOperation(CacheLoaderWriter cacheLoaderWriter) throws Exception { cacheLoaderWriter.delete(key); } - public BatchOperation createBatchOperation(List> operations) { - final List keys = new ArrayList(); - for (KeyBasedOperation operation : operations) { - keys.add(operation.getKey()); - } - return new DeleteAllOperation(keys); - } - @Override public K getKey() { return this.key; @@ -78,8 +67,8 @@ public int hashCode() { @Override public boolean equals(Object other) { - if (other instanceof DeleteOperation) { - return getCreationTime() == ((DeleteOperation) other).getCreationTime() && getKey().equals(((DeleteOperation) other).getKey()); + if (other instanceof DeleteOperation) { + return getCreationTime() == ((DeleteOperation) other).getCreationTime() && getKey().equals(((DeleteOperation) other).getKey()); } else { return false; } diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/SingleOperation.java b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/SingleOperation.java index 33ffa9efba..23ea82ecb9 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/SingleOperation.java +++ b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/SingleOperation.java @@ -15,8 +15,6 @@ */ package org.ehcache.impl.internal.loaderwriter.writebehind.operations; -import java.util.List; - import org.ehcache.spi.loaderwriter.CacheLoaderWriter; /** @@ -30,15 +28,6 @@ public interface SingleOperation extends KeyBasedOperation { * Perform this operation as a single execution with the provided cache writer * */ - void performSingleOperation(CacheLoaderWriter cacheLoaderWriter) throws Exception; + void performOperation(CacheLoaderWriter cacheLoaderWriter) throws Exception; - /** - * Creates a batch operation that corresponds to the operation type of this single operation. - *

- * This batch operation will not be stored in the queue anymore and is solely used for structuring. - * The data from the single operation will already be processed in the final form that will be expected by the - * {@code CacheWriter} that will be used to execute the batch operation. - * - */ - BatchOperation createBatchOperation(List> operations); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteAllOperation.java b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteAllOperation.java index e1aa26b634..bccb6806b7 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteAllOperation.java +++ b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteAllOperation.java @@ -39,7 +39,7 @@ public WriteAllOperation(Iterable> this.entries = entries; } - public void performBatchOperation(CacheLoaderWriter cacheLoaderWriter) throws BulkCacheWritingException, Exception { + public void performOperation(CacheLoaderWriter cacheLoaderWriter) throws BulkCacheWritingException, Exception { cacheLoaderWriter.writeAll(entries); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteOperation.java b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteOperation.java index bb181c60cf..0386ce9b0a 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteOperation.java +++ b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteOperation.java @@ -15,10 +15,6 @@ */ package org.ehcache.impl.internal.loaderwriter.writebehind.operations; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - import org.ehcache.spi.loaderwriter.CacheLoaderWriter; /** @@ -54,35 +50,10 @@ public WriteOperation(K k, V v, long creationTime) { } @Override - public void performSingleOperation(CacheLoaderWriter cacheWriter) throws Exception { + public void performOperation(CacheLoaderWriter cacheWriter) throws Exception { cacheWriter.write(key, value); } - @Override - public BatchOperation createBatchOperation(List> operations) { - final List> entries = new ArrayList>(); - for (final KeyBasedOperation operation : operations) { - entries.add(new Map.Entry() { - - @Override - public K getKey() { - return ((WriteOperation)operation).key; - } - - @Override - public V getValue() { - return ((WriteOperation)operation).value; - } - - @Override - public V setValue(V value) { - throw new UnsupportedOperationException("Not Supported."); - } - }); - } - return new WriteAllOperation(entries); - } - @Override public K getKey() { return this.key; @@ -107,7 +78,7 @@ public int hashCode() { @Override public boolean equals(Object other) { if (other instanceof WriteOperation) { - return getCreationTime() == ((WriteOperation) other).getCreationTime() && getKey().equals(((WriteOperation) other).getKey()); + return getCreationTime() == ((WriteOperation) other).getCreationTime() && getKey().equals(((WriteOperation) other).getKey()); } else { return false; } diff --git a/impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultDiskResourceServiceFactory.java b/impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultDiskResourceServiceFactory.java new file mode 100644 index 0000000000..46a6c24d4f --- /dev/null +++ b/impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultDiskResourceServiceFactory.java @@ -0,0 +1,35 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.persistence; + +import org.ehcache.core.spi.service.DiskResourceService; +import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.impl.persistence.DefaultDiskResourceService; +import org.ehcache.spi.service.ServiceCreationConfiguration; + +public class DefaultDiskResourceServiceFactory implements ServiceFactory { + + @Override + public DefaultDiskResourceService create(final ServiceCreationConfiguration serviceConfiguration) { + return new DefaultDiskResourceService(); + } + + @Override + public Class getServiceType() { + return DefaultDiskResourceService.class; + } +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultLocalPersistenceServiceFactory.java b/impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultLocalPersistenceServiceFactory.java index 0b7b4fcc99..55ae580bc2 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultLocalPersistenceServiceFactory.java +++ b/impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultLocalPersistenceServiceFactory.java @@ -25,6 +25,7 @@ /** * @author Alex Snaps */ +@ServiceFactory.RequiresConfiguration public class DefaultLocalPersistenceServiceFactory implements ServiceFactory { @Override diff --git a/impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngine.java b/impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngine.java index 19592634df..8962df8e1f 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngine.java +++ b/impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngine.java @@ -43,7 +43,7 @@ public DefaultSizeOfEngine(long maxObjectGraphSize, long maxObjectSize) { this.maxObjectGraphSize = maxObjectGraphSize; this.maxObjectSize = maxObjectSize; this.sizeOf = SizeOf.newInstance(filterSource.getFilters()); - this.onHeapKeyOffset = sizeOf.deepSizeOf(new CopiedOnHeapKey(new Object(), new IdentityCopier())); + this.onHeapKeyOffset = sizeOf.deepSizeOf(new CopiedOnHeapKey(new Object(), new IdentityCopier())); this.chmTreeBinOffset = sizeOf.deepSizeOf(ConcurrentHashMap.FAKE_TREE_BIN); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProvider.java b/impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProvider.java index f549c75661..f879cf8963 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProvider.java +++ b/impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProvider.java @@ -55,7 +55,7 @@ public SizeOfEngine createSizeOfEngine(ResourceUnit resourceUnit, ServiceConfigu if(!isByteSized) { return new NoopSizeOfEngine(); // Noop Size of Engine } - DefaultSizeOfEngineConfiguration config = ServiceLocator.findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, serviceConfigs); + DefaultSizeOfEngineConfiguration config = ServiceLocator.findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, (Object[]) serviceConfigs); if(config != null) { long maxSize = config.getUnit().toBytes(config.getMaxObjectSize()); return new DefaultSizeOfEngine(config.getMaxObjectGraphSize(), maxSize); diff --git a/impl/src/main/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProvider.java b/impl/src/main/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProvider.java index 8cf3a07440..765d42b68b 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProvider.java +++ b/impl/src/main/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProvider.java @@ -40,6 +40,7 @@ public class DefaultCopyProvider extends ClassInstanceProvider, Copier< private static final Logger LOG = LoggerFactory.getLogger(DefaultCopyProvider.class); + @SuppressWarnings("unchecked") public DefaultCopyProvider(DefaultCopyProviderConfiguration configuration) { super(configuration, (Class) DefaultCopierConfiguration.class); } @@ -87,9 +88,12 @@ private Copier createCopier(Type type, Class clazz, } private Copier createCopier(Class clazz, DefaultCopierConfiguration config, Type type) { + @SuppressWarnings("unchecked") Copier copier = (Copier) newInstance(clazz, config); if (copier == null) { - copier = (Copier) newInstance(clazz, new DefaultCopierConfiguration((Class) IdentityCopier.class, type)); + @SuppressWarnings("unchecked") + Copier defaultInstance = (Copier) newInstance(clazz, new DefaultCopierConfiguration((Class) IdentityCopier.class, type)); + copier = defaultInstance; } return copier; } diff --git a/impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProvider.java b/impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProvider.java index 35ec028cd2..9d074f1818 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProvider.java +++ b/impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProvider.java @@ -16,29 +16,23 @@ package org.ehcache.impl.internal.spi.serialization; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration; import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration; -import org.ehcache.CachePersistenceException; import org.ehcache.impl.serialization.ByteArraySerializer; import org.ehcache.impl.serialization.CharSerializer; import org.ehcache.core.internal.service.ServiceLocator; import org.ehcache.impl.serialization.CompactJavaSerializer; -import org.ehcache.impl.serialization.CompactPersistentJavaSerializer; import org.ehcache.impl.serialization.DoubleSerializer; import org.ehcache.impl.serialization.FloatSerializer; import org.ehcache.impl.serialization.IntegerSerializer; import org.ehcache.impl.serialization.LongSerializer; -import org.ehcache.impl.serialization.PlainJavaSerializer; import org.ehcache.impl.serialization.StringSerializer; -import org.ehcache.spi.persistence.StateRepository; -import org.ehcache.spi.persistence.PersistableResourceService; -import org.ehcache.spi.persistence.PersistableResourceService.PersistenceSpaceIdentifier; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.spi.serialization.SerializationProvider; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.serialization.UnsupportedTypeException; import org.ehcache.core.spi.service.FileBasedPersistenceContext; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.core.internal.util.ConcurrentWeakIdentityHashMap; @@ -57,8 +51,6 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; -import static org.ehcache.core.internal.service.ServiceLocator.findSingletonAmongst; - /** * @author Ludovic Orban */ @@ -66,31 +58,25 @@ public class DefaultSerializationProvider implements SerializationProvider { private static final Logger LOG = LoggerFactory.getLogger(DefaultSerializationProvider.class); - private final TransientProvider transientProvider; - private final PersistentProvider persistentProvider; + protected final Map, Class>> serializers; final ConcurrentWeakIdentityHashMap, AtomicInteger> providedVsCount = new ConcurrentWeakIdentityHashMap, AtomicInteger>(); final Set> instantiated = Collections.newSetFromMap(new ConcurrentWeakIdentityHashMap, Boolean>()); public DefaultSerializationProvider(DefaultSerializationProviderConfiguration configuration) { if (configuration != null) { - transientProvider = new TransientProvider(configuration.getTransientSerializers()); - persistentProvider = new PersistentProvider(configuration.getPersistentSerializers()); + this.serializers = new LinkedHashMap, Class>>(configuration.getDefaultSerializers()); } else { - transientProvider = new TransientProvider(Collections., Class>>emptyMap()); - persistentProvider = new PersistentProvider(Collections., Class>>emptyMap()); + this.serializers = new LinkedHashMap, Class>>(Collections., Class>>emptyMap()); } } @Override public Serializer createKeySerializer(Class clazz, ClassLoader classLoader, ServiceConfiguration... configs) throws UnsupportedTypeException { - Serializer serializer = (Serializer)getUserProvidedSerializer(find(DefaultSerializerConfiguration.Type.KEY, configs)); + DefaultSerializerConfiguration configuration = find(DefaultSerializerConfiguration.Type.KEY, configs); + Serializer serializer = getUserProvidedSerializer(configuration); if (serializer == null) { - if (findSingletonAmongst(PersistenceSpaceIdentifier.class, (Object[])configs) == null) { - serializer = transientProvider.createKeySerializer(clazz, classLoader, configs); - } else { - serializer = persistentProvider.createKeySerializer(clazz, classLoader, configs); - } + serializer = createSerializer(clazz, classLoader, configuration, configs); instantiated.add(serializer); } updateProvidedInstanceCounts(serializer); @@ -99,19 +85,74 @@ public Serializer createKeySerializer(Class clazz, ClassLoader classLo @Override public Serializer createValueSerializer(Class clazz, ClassLoader classLoader, ServiceConfiguration... configs) throws UnsupportedTypeException { - Serializer serializer = (Serializer)getUserProvidedSerializer(find(DefaultSerializerConfiguration.Type.VALUE, configs)); + DefaultSerializerConfiguration configuration = find(DefaultSerializerConfiguration.Type.VALUE, configs); + Serializer serializer = getUserProvidedSerializer(configuration); if (serializer == null) { - if (findSingletonAmongst(PersistenceSpaceIdentifier.class, (Object[])configs) == null) { - serializer = transientProvider.createValueSerializer(clazz, classLoader, configs); - } else { - serializer = persistentProvider.createValueSerializer(clazz, classLoader, configs); - } + serializer = createSerializer(clazz, classLoader, configuration, configs); instantiated.add(serializer); } updateProvidedInstanceCounts(serializer); return serializer; } + private Serializer createSerializer(Class clazz, ClassLoader classLoader, DefaultSerializerConfiguration config, ServiceConfiguration... configs) throws UnsupportedTypeException { + Class> klazz = getSerializerClassFor(clazz, config); + + try { + klazz.getConstructor(ClassLoader.class, FileBasedPersistenceContext.class); + LOG.warn(klazz.getName() + " class has a constructor that takes in a FileBasedPersistenceContext. " + + "Support for this constructor has been removed since version 3.2. Consider removing it."); + } catch (NoSuchMethodException e) { + // Ideal + } + + try { + return constructSerializer(clazz, klazz.getConstructor(ClassLoader.class), classLoader); + } catch (NoSuchMethodException e) { + throw new RuntimeException(klazz + " does not have a constructor that takes in a ClassLoader.", e); + } + } + + + private Class> getSerializerClassFor(Class clazz, DefaultSerializerConfiguration config) throws UnsupportedTypeException { + if (config != null) { + Class> configured = config.getClazz(); + if (configured != null) { + return configured; + } + } + + @SuppressWarnings("unchecked") + Class> direct = (Class>) serializers.get(clazz); + if (direct != null) { + return direct; + } + for (Map.Entry, Class>> entry : serializers.entrySet()) { + if (entry.getKey().isAssignableFrom(clazz)) { + @SuppressWarnings("unchecked") + Class> type = (Class>)entry.getValue(); + return type; + } + } + throw new UnsupportedTypeException("No serializer found for type '" + clazz.getName() + "'"); + } + + private Serializer constructSerializer(Class clazz, Constructor> constructor, Object ... args) { + try { + Serializer serializer = constructor.newInstance(args); + LOG.debug("Serializer for <{}> : {}", clazz.getName(), serializer); + return serializer; + } catch (InstantiationException e) { + throw new RuntimeException(e); + } catch (IllegalAccessException e) { + throw new RuntimeException(e); + } catch (IllegalArgumentException e) { + throw new AssertionError(e); + } catch (InvocationTargetException e) { + throw new RuntimeException(e); + } + } + private void updateProvidedInstanceCounts(Serializer serializer) { AtomicInteger currentCount = providedVsCount.putIfAbsent(serializer, new AtomicInteger(1)); if(currentCount != null) { @@ -140,8 +181,14 @@ public void releaseSerializer(final Serializer serializer) throws IOException @Override public void start(ServiceProvider serviceProvider) { - transientProvider.start(serviceProvider); - persistentProvider.start(serviceProvider); + addDefaultSerializerIfNoneRegistered(serializers, Serializable.class, CompactJavaSerializer.asTypedSerializer()); + addDefaultSerializerIfNoneRegistered(serializers, Long.class, LongSerializer.class); + addDefaultSerializerIfNoneRegistered(serializers, Integer.class, IntegerSerializer.class); + addDefaultSerializerIfNoneRegistered(serializers, Float.class, FloatSerializer.class); + addDefaultSerializerIfNoneRegistered(serializers, Double.class, DoubleSerializer.class); + addDefaultSerializerIfNoneRegistered(serializers, Character.class, CharSerializer.class); + addDefaultSerializerIfNoneRegistered(serializers, String.class, StringSerializer.class); + addDefaultSerializerIfNoneRegistered(serializers, byte[].class, ByteArraySerializer.class); } @Override @@ -155,144 +202,6 @@ private static void addDefaultSerializerIfNoneRegistered(Map, Class } } - static class TransientProvider extends AbstractProvider { - - public TransientProvider(Map, Class>> serializers) { - super(serializers); - } - - @Override - protected Serializer createSerializer(String suffix, Class clazz, ClassLoader classLoader, DefaultSerializerConfiguration config, ServiceConfiguration... configs) throws UnsupportedTypeException { - try { - Class> klazz = getClassFor(clazz, config, classLoader); - return constructSerializer(clazz, klazz.getConstructor(ClassLoader.class), classLoader); - } catch (NoSuchMethodException e) { - throw new RuntimeException(e); - } - } - - public void start(ServiceProvider serviceProvider) { - addDefaultSerializerIfNoneRegistered(serializers, Serializable.class, (Class) CompactJavaSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, Long.class, LongSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, Integer.class, IntegerSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, Float.class, FloatSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, Double.class, DoubleSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, Character.class, CharSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, String.class, StringSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, byte[].class, ByteArraySerializer.class); - } - } - - static class PersistentProvider extends AbstractProvider { - - private ServiceProvider serviceProvider; - - private PersistentProvider(Map, Class>> serializers) { - super(serializers); - } - - @Override - protected Serializer createSerializer(String suffix, Class clazz, ClassLoader classLoader, DefaultSerializerConfiguration config, ServiceConfiguration... configs) throws UnsupportedTypeException { - Class> klazz = getClassFor(clazz, config, classLoader); - PersistenceSpaceIdentifier space = findSingletonAmongst(PersistenceSpaceIdentifier.class, (Object[]) configs); - PersistableResourceService service = serviceProvider.getService(space.getServiceType()); - - String subSpaceName = DefaultSerializationProvider.class.getSimpleName() + suffix; - - try { - Constructor> constructor = klazz.getConstructor(ClassLoader.class, StateRepository.class); - StateRepository stateRepository = service.getStateRepositoryWithin(space, subSpaceName); - return constructSerializer(clazz, constructor, classLoader, stateRepository); - } catch (NoSuchMethodException e) { - if (service instanceof LocalPersistenceService) { - try { - Constructor> constructor = klazz.getConstructor(ClassLoader.class, FileBasedPersistenceContext.class); - FileBasedPersistenceContext context = ((LocalPersistenceService) service).createPersistenceContextWithin(space, subSpaceName); - return constructSerializer(clazz, constructor, classLoader, context); - } catch (NoSuchMethodException nsmex) { - throw new RuntimeException(nsmex); - } catch (CachePersistenceException cpex) { - throw new RuntimeException(cpex); - } - } else { - throw new RuntimeException(e); - } - } catch (CachePersistenceException e) { - throw new RuntimeException(e); - } - } - - public void start(ServiceProvider serviceProvider) { - this.serviceProvider = serviceProvider; - addDefaultSerializerIfNoneRegistered(serializers, Serializable.class, (Class) CompactJavaSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, Long.class, LongSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, Integer.class, IntegerSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, Float.class, FloatSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, Double.class, DoubleSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, Character.class, CharSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, String.class, StringSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, byte[].class, ByteArraySerializer.class); - } - - } - - static abstract class AbstractProvider { - - protected final Map, Class>> serializers; - - private AbstractProvider(Map, Class>> serializers) { - this.serializers = new LinkedHashMap, Class>>(serializers); - } - - public Serializer createKeySerializer(Class clazz, ClassLoader classLoader, ServiceConfiguration... configs) throws UnsupportedTypeException { - DefaultSerializerConfiguration conf = find(DefaultSerializerConfiguration.Type.KEY, configs); - return createSerializer("-Key", clazz, classLoader, conf, configs); - } - - public Serializer createValueSerializer(Class clazz, ClassLoader classLoader, ServiceConfiguration... configs) throws UnsupportedTypeException { - DefaultSerializerConfiguration conf = find(DefaultSerializerConfiguration.Type.VALUE, configs); - return createSerializer("-Value", clazz, classLoader, conf, configs); - } - - protected abstract Serializer createSerializer(String suffix, Class clazz, ClassLoader classLoader, DefaultSerializerConfiguration config, ServiceConfiguration... configs) throws UnsupportedTypeException; - - protected Class> getClassFor(Class clazz, DefaultSerializerConfiguration config, ClassLoader classLoader) throws UnsupportedTypeException { - if (config != null) { - Class> configured = config.getClazz(); - if (configured != null) { - return configured; - } - } - - Class> direct = (Class>) serializers.get(clazz); - if (direct != null) { - return direct; - } - for (Map.Entry, Class>> entry : serializers.entrySet()) { - if (entry.getKey().isAssignableFrom(clazz)) { - return (Class>) entry.getValue(); - } - } - throw new UnsupportedTypeException("No serializer found for type '" + clazz.getName() + "'"); - } - - protected Serializer constructSerializer(Class clazz, Constructor> constructor, Object ... args) { - try { - Serializer serializer = constructor.newInstance(args); - LOG.debug("Serializer for <{}> : {}", clazz.getName(), serializer); - return serializer; - } catch (InstantiationException e) { - throw new RuntimeException(e); - } catch (IllegalAccessException e) { - throw new RuntimeException(e); - } catch (IllegalArgumentException e) { - throw new AssertionError(e); - } catch (InvocationTargetException e) { - throw new RuntimeException(e); - } - } - } - private static Serializer getUserProvidedSerializer(DefaultSerializerConfiguration conf) { if(conf != null) { Serializer instance = conf.getInstance(); diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/basic/EmptyValueHolder.java b/impl/src/main/java/org/ehcache/impl/internal/store/basic/EmptyValueHolder.java new file mode 100644 index 0000000000..827744192b --- /dev/null +++ b/impl/src/main/java/org/ehcache/impl/internal/store/basic/EmptyValueHolder.java @@ -0,0 +1,75 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.store.basic; + +import org.ehcache.core.spi.store.Store; + +import java.util.concurrent.TimeUnit; + +/** + * A value holder that always contains null + * + * @author Henri Tremblay + */ +public class EmptyValueHolder implements Store.ValueHolder { + + private static final Store.ValueHolder EMPTY = new EmptyValueHolder(); + + @SuppressWarnings("unchecked") + public static Store.ValueHolder empty() { + return (Store.ValueHolder) EMPTY; + } + + @Override + public V value() { + return null; + } + + @Override + public long creationTime(TimeUnit unit) { + return 0; + } + + @Override + public long expirationTime(TimeUnit unit) { + return 0; + } + + @Override + public boolean isExpired(long expirationTime, TimeUnit unit) { + return false; + } + + @Override + public long lastAccessTime(TimeUnit unit) { + return 0; + } + + @Override + public float hitRate(long now, TimeUnit unit) { + return 0; + } + + @Override + public long hits() { + return 0; + } + + @Override + public long getId() { + return 0; + } +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/basic/NopStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/basic/NopStore.java new file mode 100644 index 0000000000..12532fbdfe --- /dev/null +++ b/impl/src/main/java/org/ehcache/impl/internal/store/basic/NopStore.java @@ -0,0 +1,195 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.store.basic; + +import org.ehcache.Cache; +import org.ehcache.core.CacheConfigurationChangeListener; +import org.ehcache.core.spi.function.BiFunction; +import org.ehcache.core.spi.function.Function; +import org.ehcache.core.spi.function.NullaryFunction; +import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.core.spi.store.events.StoreEventFilter; +import org.ehcache.core.spi.store.events.StoreEventListener; +import org.ehcache.core.spi.store.events.StoreEventSource; +import org.ehcache.core.spi.store.tiering.AuthoritativeTier; + +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * A store that doesn't store anything. + * + * @author Henri Tremblay + */ +public class NopStore implements AuthoritativeTier { + + + @Override + public List getConfigurationChangeListeners() { + return Collections.emptyList(); + } + + @Override + public ValueHolder getAndFault(K key) throws StoreAccessException { + return null; + } + + @Override + public ValueHolder computeIfAbsentAndFault(K key, Function mappingFunction) throws StoreAccessException { + return null; + } + + @Override + public boolean flush(K key, ValueHolder valueHolder) { + return false; + } + + @Override + public void setInvalidationValve(InvalidationValve valve) { + + } + + @Override + public ValueHolder get(K key) throws StoreAccessException { + return null; + } + + @Override + public boolean containsKey(K key) throws StoreAccessException { + return false; + } + + @Override + public PutStatus put(K key, V value) throws StoreAccessException { + return PutStatus.PUT; + } + + @Override + public ValueHolder putIfAbsent(K key, V value) throws StoreAccessException { + return EmptyValueHolder.empty(); + } + + @Override + public boolean remove(K key) throws StoreAccessException { + return false; + } + + @Override + public RemoveStatus remove(K key, V value) throws StoreAccessException { + return RemoveStatus.KEY_MISSING; + } + + @Override + public ValueHolder replace(K key, V value) throws StoreAccessException { + return null; + } + + @Override + public ReplaceStatus replace(K key, V oldValue, V newValue) throws StoreAccessException { + return ReplaceStatus.MISS_NOT_PRESENT; + } + + @Override + public void clear() throws StoreAccessException { + + } + + @Override + public StoreEventSource getStoreEventSource() { + return new StoreEventSource() { + @Override + public void addEventListener(StoreEventListener eventListener) { + + } + + @Override + public void removeEventListener(StoreEventListener eventListener) { + + } + + @Override + public void addEventFilter(StoreEventFilter eventFilter) { + + } + + @Override + public void setEventOrdering(boolean ordering) { + + } + + @Override + public boolean isEventOrdering() { + return false; + } + }; + } + + @Override + public Iterator>> iterator() { + return new Iterator>>() { + @Override + public boolean hasNext() { + return false; + } + + @Override + public Cache.Entry> next() throws StoreAccessException { + return null; + } + }; + } + + @Override + public ValueHolder compute(K key, BiFunction mappingFunction) throws StoreAccessException { + return EmptyValueHolder.empty(); + } + + @Override + public ValueHolder compute(K key, BiFunction mappingFunction, NullaryFunction replaceEqual) throws StoreAccessException { + return null; + } + + @Override + public ValueHolder computeIfAbsent(K key, Function mappingFunction) throws StoreAccessException { + return null; + } + + @Override + public Map> bulkCompute(Set keys, Function>, Iterable>> remappingFunction) throws StoreAccessException { + return bulkCompute(keys, remappingFunction, null); + } + + @Override + public Map> bulkCompute(Set keys, Function>, Iterable>> remappingFunction, NullaryFunction replaceEqual) throws StoreAccessException { + Map> map = new HashMap>(keys.size()); + for(K key : keys) { + map.put(key, EmptyValueHolder.empty()); + } + return map; + } + + @Override + public Map> bulkComputeIfAbsent(Set keys, Function, Iterable>> mappingFunction) throws StoreAccessException { + Map> map = new HashMap>(keys.size()); + for(K key : keys) { + map.put(key, EmptyValueHolder.empty()); + } + return map; + } +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java index c7684a3ce5..7ea597c050 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java @@ -19,9 +19,11 @@ import org.ehcache.config.SizedResourcePool; import org.ehcache.core.CacheConfigurationChangeListener; import org.ehcache.Status; -import org.ehcache.config.Eviction; import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourceType; +import org.ehcache.core.spi.service.DiskResourceService; +import org.ehcache.core.statistics.AuthoritativeTierOperationOutcomes; +import org.ehcache.core.statistics.StoreOperationOutcomes; import org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.events.StoreEventDispatcher; @@ -37,6 +39,8 @@ import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.time.TimeSourceService; import org.ehcache.spi.persistence.PersistableResourceService.PersistenceSpaceIdentifier; +import org.ehcache.spi.persistence.StateRepository; +import org.ehcache.spi.serialization.StatefulSerializer; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; @@ -44,11 +48,11 @@ import org.ehcache.spi.serialization.Serializer; import org.ehcache.core.spi.service.ExecutionService; import org.ehcache.core.spi.service.FileBasedPersistenceContext; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.core.internal.util.ConcurrentWeakIdentityHashMap; +import org.ehcache.core.statistics.TierOperationOutcomes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terracotta.offheapstore.disk.paging.MappedPageSource; @@ -57,6 +61,7 @@ import org.terracotta.offheapstore.disk.storage.FileBackedStorageEngine; import org.terracotta.offheapstore.storage.portability.Portability; import org.terracotta.offheapstore.util.Factory; +import org.terracotta.statistics.MappedOperationStatistic; import org.terracotta.statistics.StatisticsManager; import java.io.File; @@ -68,16 +73,19 @@ import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import static java.lang.Math.max; +import static org.ehcache.config.Eviction.noAdvice; import static org.ehcache.core.internal.service.ServiceLocator.findSingletonAmongst; import static org.terracotta.offheapstore.util.MemoryUnit.BYTES; @@ -88,6 +96,8 @@ public class OffHeapDiskStore extends AbstractOffHeapStore implement private static final Logger LOGGER = LoggerFactory.getLogger(OffHeapDiskStore.class); + private static final String STATISTICS_TAG = "Disk"; + private static final String KEY_TYPE_PROPERTY_NAME = "keyType"; private static final String VALUE_TYPE_PROPERTY_NAME = "valueType"; private static final int DEFAULT_CONCURRENCY = 16; @@ -111,7 +121,7 @@ public class OffHeapDiskStore extends AbstractOffHeapStore implement public OffHeapDiskStore(FileBasedPersistenceContext fileBasedPersistenceContext, ExecutionService executionService, String threadPoolAlias, int writerConcurrency, final Configuration config, TimeSource timeSource, StoreEventDispatcher eventDispatcher, long sizeInBytes) { - super("local-disk", config, timeSource, eventDispatcher); + super(STATISTICS_TAG, config, timeSource, eventDispatcher); this.fileBasedPersistenceContext = fileBasedPersistenceContext; this.executionService = executionService; this.threadPoolAlias = threadPoolAlias; @@ -121,7 +131,7 @@ public OffHeapDiskStore(FileBasedPersistenceContext fileBasedPersistenceContext, if (evictionAdvisor != null) { this.evictionAdvisor = wrap(evictionAdvisor); } else { - this.evictionAdvisor = wrap(Eviction.noAdvice()); + this.evictionAdvisor = wrap(noAdvice()); } this.keyType = config.getKeyType(); this.valueType = config.getValueType(); @@ -222,10 +232,7 @@ private EhcachePersistentConcurrentOffHeapClockCache> r 64, evictionAdvisor, mapEvictionListener, false); - EhcachePersistentConcurrentOffHeapClockCache m = new EhcachePersistentConcurrentOffHeapClockCache>(input, evictionAdvisor, factory); - - - + EhcachePersistentConcurrentOffHeapClockCache> m = new EhcachePersistentConcurrentOffHeapClockCache>(input, evictionAdvisor, factory); m.bootstrap(input); return m; @@ -294,12 +301,14 @@ private File getMetadataFile() { return new File(fileBasedPersistenceContext.getDirectory(), "ehcache-disk-store.meta"); } - @ServiceDependencies({TimeSourceService.class, SerializationProvider.class, ExecutionService.class}) + @ServiceDependencies({TimeSourceService.class, SerializationProvider.class, ExecutionService.class, DiskResourceService.class}) public static class Provider implements Store.Provider, AuthoritativeTier.Provider { - private final Set> createdStores = Collections.newSetFromMap(new ConcurrentWeakIdentityHashMap, Boolean>()); + private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap, Collection>>(); + private final Map, PersistenceSpaceIdentifier> createdStores = new ConcurrentWeakIdentityHashMap, PersistenceSpaceIdentifier>(); private final String defaultThreadPool; private volatile ServiceProvider serviceProvider; + private volatile DiskResourceService diskPersistenceService; public Provider() { this(null); @@ -321,7 +330,23 @@ public int rankAuthority(ResourceType authorityResource, Collection OffHeapDiskStore createStore(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - return createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); + OffHeapDiskStore store = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); + Collection> tieredOps = new ArrayList>(); + + MappedOperationStatistic get = + new MappedOperationStatistic( + store, TierOperationOutcomes.GET_TRANSLATION, "get", ResourceType.Core.DISK.getTierHeight(), "get", STATISTICS_TAG); + StatisticsManager.associate(get).withParent(store); + tieredOps.add(get); + + MappedOperationStatistic evict = + new MappedOperationStatistic( + store, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.DISK.getTierHeight(), "eviction", STATISTICS_TAG); + StatisticsManager.associate(evict).withParent(store); + tieredOps.add(evict); + + tierOperationStatistics.put(store, tieredOps); + return store; } private OffHeapDiskStore createStoreInternal(Configuration storeConfig, StoreEventDispatcher eventDispatcher, ServiceConfiguration... serviceConfigs) { @@ -337,11 +362,6 @@ private OffHeapDiskStore createStoreInternal(Configuration st } MemoryUnit unit = (MemoryUnit)diskPool.getUnit(); - LocalPersistenceService localPersistenceService = serviceProvider.getService(LocalPersistenceService.class); - if (localPersistenceService == null) { - throw new IllegalStateException("No LocalPersistenceService could be found - did you configure it at the CacheManager level?"); - } - String threadPoolAlias; int writerConcurrency; OffHeapDiskStoreConfiguration config = findSingletonAmongst(OffHeapDiskStoreConfiguration.class, (Object[]) serviceConfigs); @@ -353,13 +373,16 @@ private OffHeapDiskStore createStoreInternal(Configuration st writerConcurrency = config.getWriterConcurrency(); } PersistenceSpaceIdentifier space = findSingletonAmongst(PersistenceSpaceIdentifier.class, (Object[]) serviceConfigs); + if (space == null) { + throw new IllegalStateException("No LocalPersistenceService could be found - did you configure it at the CacheManager level?"); + } try { - FileBasedPersistenceContext persistenceContext = localPersistenceService.createPersistenceContextWithin(space , "offheap-disk-store"); + FileBasedPersistenceContext persistenceContext = diskPersistenceService.createPersistenceContextWithin(space , "offheap-disk-store"); OffHeapDiskStore offHeapStore = new OffHeapDiskStore(persistenceContext, executionService, threadPoolAlias, writerConcurrency, storeConfig, timeSource, eventDispatcher, unit.toBytes(diskPool.getSize())); - createdStores.add(offHeapStore); + createdStores.put(offHeapStore, space); return offHeapStore; } catch (CachePersistenceException cpex) { throw new RuntimeException("Unable to create persistence context in " + space, cpex); @@ -368,11 +391,14 @@ private OffHeapDiskStore createStoreInternal(Configuration st @Override public void releaseStore(Store resource) { - if (!createdStores.contains(resource)) { + if (createdStores.remove(resource) == null) { throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); } try { - close((OffHeapDiskStore)resource); + OffHeapDiskStore offHeapDiskStore = (OffHeapDiskStore)resource; + close(offHeapDiskStore); + StatisticsManager.nodeFor(offHeapDiskStore).clean(); + tierOperationStatistics.remove(offHeapDiskStore); } catch (IOException e) { throw new RuntimeException(e); } @@ -392,15 +418,38 @@ static void close(final OffHeapDiskStore resource) throws IOExcepti } localMap.close(); } - StatisticsManager.dissociate(resource.offHeapStoreStatsSettings).fromParent(resource); } @Override public void initStore(Store resource) { - if (!createdStores.contains(resource)) { + PersistenceSpaceIdentifier identifier = createdStores.get(resource); + if (identifier == null) { throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); } - init((OffHeapDiskStore)resource); + OffHeapDiskStore diskStore = (OffHeapDiskStore) resource; + + Serializer keySerializer = diskStore.keySerializer; + if (keySerializer instanceof StatefulSerializer) { + StateRepository stateRepository = null; + try { + stateRepository = diskPersistenceService.getStateRepositoryWithin(identifier, "key-serializer"); + } catch (CachePersistenceException e) { + throw new RuntimeException(e); + } + ((StatefulSerializer)keySerializer).init(stateRepository); + } + Serializer valueSerializer = diskStore.valueSerializer; + if (valueSerializer instanceof StatefulSerializer) { + StateRepository stateRepository = null; + try { + stateRepository = diskPersistenceService.getStateRepositoryWithin(identifier, "value-serializer"); + } catch (CachePersistenceException e) { + throw new RuntimeException(e); + } + ((StatefulSerializer)valueSerializer).init(stateRepository); + } + + init(diskStore); } static void init(final OffHeapDiskStore resource) { @@ -410,17 +459,38 @@ static void init(final OffHeapDiskStore resource) { @Override public void start(ServiceProvider serviceProvider) { this.serviceProvider = serviceProvider; + diskPersistenceService = serviceProvider.getService(DiskResourceService.class); + if (diskPersistenceService == null) { + throw new IllegalStateException("Unable to find file based persistence service"); + } } @Override public void stop() { this.serviceProvider = null; createdStores.clear(); + diskPersistenceService = null; } @Override public AuthoritativeTier createAuthoritativeTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - return createStore(storeConfig, serviceConfigs); + OffHeapDiskStore authoritativeTier = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); + Collection> tieredOps = new ArrayList>(); + + MappedOperationStatistic get = + new MappedOperationStatistic( + authoritativeTier, TierOperationOutcomes.GET_AND_FAULT_TRANSLATION, "get", ResourceType.Core.DISK.getTierHeight(), "getAndFault", STATISTICS_TAG); + StatisticsManager.associate(get).withParent(authoritativeTier); + tieredOps.add(get); + + MappedOperationStatistic evict = + new MappedOperationStatistic( + authoritativeTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.DISK.getTierHeight(), "eviction", STATISTICS_TAG); + StatisticsManager.associate(evict).withParent(authoritativeTier); + tieredOps.add(evict); + + tierOperationStatistics.put(authoritativeTier, tieredOps); + return authoritativeTier; } @Override @@ -438,6 +508,7 @@ public void initAuthoritativeTier(AuthoritativeTier resource) { * This is kind of a hack, but it's safe to use this if the regular portability * is stateless. */ + @SuppressWarnings("unchecked") public static PersistentPortability persistent(final Portability normal) { final Class normalKlazz = normal.getClass(); Class[] delegateInterfaces = normalKlazz.getInterfaces(); diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/Backend.java b/impl/src/main/java/org/ehcache/impl/internal/store/heap/Backend.java index de74e89c3e..89137e872d 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/Backend.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/heap/Backend.java @@ -76,5 +76,5 @@ interface Backend { void updateUsageInBytesIfRequired(long delta); - Map.Entry> getEvictionCandidate(Random random, int size, final Comparator> prioritizer, final EvictionAdvisor> evictionAdvisor); + Map.Entry> getEvictionCandidate(Random random, int size, final Comparator> prioritizer, final EvictionAdvisor> evictionAdvisor); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/KeyCopyBackend.java b/impl/src/main/java/org/ehcache/impl/internal/store/heap/KeyCopyBackend.java index 3f59f193e0..da62fce6cb 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/KeyCopyBackend.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/heap/KeyCopyBackend.java @@ -59,7 +59,7 @@ public boolean remove(K key, OnHeapValueHolder value) { } @Override - public Map.Entry> getEvictionCandidate(Random random, int size, final Comparator> prioritizer, final EvictionAdvisor> evictionAdvisor) { + public Map.Entry> getEvictionCandidate(Random random, int size, final Comparator> prioritizer, final EvictionAdvisor> evictionAdvisor) { Map.Entry, OnHeapValueHolder> candidate = keyCopyMap.getEvictionCandidate(random, size, prioritizer, evictionAdvisor); if (candidate == null) { diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java index c90fce7434..be3695d304 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java @@ -21,7 +21,6 @@ import org.ehcache.core.CacheConfigurationChangeEvent; import org.ehcache.core.CacheConfigurationChangeListener; import org.ehcache.core.CacheConfigurationProperty; -import org.ehcache.config.Eviction; import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; import org.ehcache.config.ResourceType; @@ -46,7 +45,10 @@ import org.ehcache.impl.internal.store.heap.holders.SerializedOnHeapValueHolder; import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.time.TimeSourceService; +import org.ehcache.impl.serialization.TransientStateRepository; import org.ehcache.sizeof.annotations.IgnoreSizeOf; +import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.events.StoreEventSource; @@ -64,10 +66,11 @@ import org.ehcache.core.statistics.HigherCachingTierOperationOutcomes; import org.ehcache.core.statistics.StoreOperationOutcomes; import org.ehcache.core.internal.util.ConcurrentWeakIdentityHashMap; +import org.ehcache.core.statistics.TierOperationOutcomes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.terracotta.context.annotations.ContextAttribute; import org.terracotta.offheapstore.util.FindbugsSuppressWarnings; +import org.terracotta.statistics.MappedOperationStatistic; import org.terracotta.statistics.StatisticsManager; import org.terracotta.statistics.observer.OperationObserver; @@ -88,6 +91,8 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; +import static java.util.Collections.singletonMap; +import static org.ehcache.config.Eviction.noAdvice; import static org.ehcache.core.exceptions.StorePassThroughException.handleRuntimeException; import static org.ehcache.core.internal.util.ValueSuppliers.supplierOf; import static org.terracotta.statistics.StatisticBuilder.operation; @@ -110,6 +115,8 @@ public class OnHeapStore implements Store, HigherCachingTier { private static final Logger LOG = LoggerFactory.getLogger(OnHeapStore.class); + private static final String STATISTICS_TAG = "OnHeap"; + private static final int ATTEMPT_RATIO = 4; private static final int EVICTION_RATIO = 2; @@ -137,9 +144,9 @@ public int compare(ValueHolder t, ValueHolder u) { } }; - private static final InvalidationListener NULL_INVALIDATION_LISTENER = new InvalidationListener() { + private static final InvalidationListener NULL_INVALIDATION_LISTENER = new InvalidationListener() { @Override - public void onInvalidation(Object key, ValueHolder valueHolder) { + public void onInvalidation(Object key, ValueHolder valueHolder) { // Do nothing } }; @@ -152,13 +159,15 @@ public void onInvalidation(Object key, ValueHolder valueHolder) { private final Copier valueCopier; private final SizeOfEngine sizeOfEngine; + private final boolean byteSized; private volatile long capacity; private final EvictionAdvisor evictionAdvisor; private final Expiry expiry; private final TimeSource timeSource; private final StoreEventDispatcher storeEventDispatcher; - private volatile InvalidationListener invalidationListener = NULL_INVALIDATION_LISTENER; + @SuppressWarnings("unchecked") + private volatile InvalidationListener invalidationListener = (InvalidationListener) NULL_INVALIDATION_LISTENER; private CacheConfigurationChangeListener cacheConfigurationChangeListener = new CacheConfigurationChangeListener() { @Override @@ -200,8 +209,6 @@ public void cacheConfigurationChange(CacheConfigurationChangeEvent event) { private final OperationObserver silentInvalidateAllObserver; private final OperationObserver silentInvalidateAllWithHashObserver; - private final OnHeapStoreStatsSettings onHeapStoreStatsSettings; - private static final NullaryFunction REPLACE_EQUALS_TRUE = new NullaryFunction() { @Override public Boolean apply() { @@ -220,12 +227,18 @@ public OnHeapStore(final Configuration config, final TimeSource timeSource if (heapPool == null) { throw new IllegalArgumentException("OnHeap store must be configured with a resource of type 'heap'"); } + if (timeSource == null) { + throw new NullPointerException("timeSource must not be null"); + } + if (sizeOfEngine == null) { + throw new NullPointerException("sizeOfEngine must not be null"); + } this.sizeOfEngine = sizeOfEngine; - boolean byteSized = this.sizeOfEngine instanceof NoopSizeOfEngine ? false : true; + this.byteSized = this.sizeOfEngine instanceof NoopSizeOfEngine ? false : true; this.capacity = byteSized ? ((MemoryUnit) heapPool.getUnit()).toBytes(heapPool.getSize()) : heapPool.getSize(); this.timeSource = timeSource; if (config.getEvictionAdvisor() == null) { - this.evictionAdvisor = Eviction.noAdvice(); + this.evictionAdvisor = noAdvice(); } else { this.evictionAdvisor = config.getEvictionAdvisor(); } @@ -239,32 +252,46 @@ public OnHeapStore(final Configuration config, final TimeSource timeSource } else { this.map = new KeyCopyBackend(byteSized, keyCopier); } - onHeapStoreStatsSettings = new OnHeapStoreStatsSettings(this); - StatisticsManager.associate(onHeapStoreStatsSettings).withParent(this); - getObserver = operation(StoreOperationOutcomes.GetOutcome.class).named("get").of(this).tag("onheap-store").build(); - putObserver = operation(StoreOperationOutcomes.PutOutcome.class).named("put").of(this).tag("onheap-store").build(); - removeObserver = operation(StoreOperationOutcomes.RemoveOutcome.class).named("remove").of(this).tag("onheap-store").build(); - putIfAbsentObserver = operation(StoreOperationOutcomes.PutIfAbsentOutcome.class).named("putIfAbsent").of(this).tag("onheap-store").build(); - conditionalRemoveObserver = operation(StoreOperationOutcomes.ConditionalRemoveOutcome.class).named("conditionalRemove").of(this).tag("onheap-store").build(); - replaceObserver = operation(StoreOperationOutcomes.ReplaceOutcome.class).named("replace").of(this).tag("onheap-store").build(); - conditionalReplaceObserver = operation(StoreOperationOutcomes.ConditionalReplaceOutcome.class).named("conditionalReplace").of(this).tag("onheap-store").build(); - computeObserver = operation(StoreOperationOutcomes.ComputeOutcome.class).named("compute").of(this).tag("onheap-store").build(); - computeIfAbsentObserver = operation(StoreOperationOutcomes.ComputeIfAbsentOutcome.class).named("computeIfAbsent").of(this).tag("onheap-store").build(); - evictionObserver = operation(StoreOperationOutcomes.EvictionOutcome.class).named("eviction").of(this).tag("onheap-store").build(); - expirationObserver = operation(StoreOperationOutcomes.ExpirationOutcome.class).named("expiration").of(this).tag("onheap-store").build(); - getOrComputeIfAbsentObserver = operation(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.class).named("getOrComputeIfAbsent").of(this).tag("onheap-store").build(); - invalidateObserver = operation(CachingTierOperationOutcomes.InvalidateOutcome.class).named("invalidate").of(this).tag("onheap-store").build(); - invalidateAllObserver = operation(CachingTierOperationOutcomes.InvalidateAllOutcome.class).named("invalidateAll").of(this).tag("onheap-store").build(); - invalidateAllWithHashObserver = operation(CachingTierOperationOutcomes.InvalidateAllWithHashOutcome.class).named("invalidateAllWithHash").of(this).tag("onheap-store").build(); - silentInvalidateObserver = operation(HigherCachingTierOperationOutcomes.SilentInvalidateOutcome.class).named("silentInvalidate").of(this).tag("onheap-store").build(); - silentInvalidateAllObserver = operation(HigherCachingTierOperationOutcomes.SilentInvalidateAllOutcome.class).named("silentInvalidateAll").of(this).tag("onheap-store").build(); - silentInvalidateAllWithHashObserver = operation(HigherCachingTierOperationOutcomes.SilentInvalidateAllWithHashOutcome.class).named("silentInvalidateAllWithHash").of(this).tag("onheap-store").build(); - StatisticsManager.createPassThroughStatistic(this, "mappingsCount", Collections.singleton("onheap-store"), new Callable() { + + getObserver = operation(StoreOperationOutcomes.GetOutcome.class).named("get").of(this).tag(STATISTICS_TAG).build(); + putObserver = operation(StoreOperationOutcomes.PutOutcome.class).named("put").of(this).tag(STATISTICS_TAG).build(); + removeObserver = operation(StoreOperationOutcomes.RemoveOutcome.class).named("remove").of(this).tag(STATISTICS_TAG).build(); + putIfAbsentObserver = operation(StoreOperationOutcomes.PutIfAbsentOutcome.class).named("putIfAbsent").of(this).tag(STATISTICS_TAG).build(); + conditionalRemoveObserver = operation(StoreOperationOutcomes.ConditionalRemoveOutcome.class).named("conditionalRemove").of(this).tag(STATISTICS_TAG).build(); + replaceObserver = operation(StoreOperationOutcomes.ReplaceOutcome.class).named("replace").of(this).tag(STATISTICS_TAG).build(); + conditionalReplaceObserver = operation(StoreOperationOutcomes.ConditionalReplaceOutcome.class).named("conditionalReplace").of(this).tag(STATISTICS_TAG).build(); + computeObserver = operation(StoreOperationOutcomes.ComputeOutcome.class).named("compute").of(this).tag(STATISTICS_TAG).build(); + computeIfAbsentObserver = operation(StoreOperationOutcomes.ComputeIfAbsentOutcome.class).named("computeIfAbsent").of(this).tag(STATISTICS_TAG).build(); + evictionObserver = operation(StoreOperationOutcomes.EvictionOutcome.class).named("eviction").of(this).tag(STATISTICS_TAG).build(); + expirationObserver = operation(StoreOperationOutcomes.ExpirationOutcome.class).named("expiration").of(this).tag(STATISTICS_TAG).build(); + + getOrComputeIfAbsentObserver = operation(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.class).named("getOrComputeIfAbsent").of(this).tag(STATISTICS_TAG).build(); + invalidateObserver = operation(CachingTierOperationOutcomes.InvalidateOutcome.class).named("invalidate").of(this).tag(STATISTICS_TAG).build(); + invalidateAllObserver = operation(CachingTierOperationOutcomes.InvalidateAllOutcome.class).named("invalidateAll").of(this).tag(STATISTICS_TAG).build(); + invalidateAllWithHashObserver = operation(CachingTierOperationOutcomes.InvalidateAllWithHashOutcome.class).named("invalidateAllWithHash").of(this).tag(STATISTICS_TAG).build(); + + silentInvalidateObserver = operation(HigherCachingTierOperationOutcomes.SilentInvalidateOutcome.class).named("silentInvalidate").of(this).tag(STATISTICS_TAG).build(); + silentInvalidateAllObserver = operation(HigherCachingTierOperationOutcomes.SilentInvalidateAllOutcome.class).named("silentInvalidateAll").of(this).tag(STATISTICS_TAG).build(); + silentInvalidateAllWithHashObserver = operation(HigherCachingTierOperationOutcomes.SilentInvalidateAllWithHashOutcome.class).named("silentInvalidateAllWithHash").of(this).tag(STATISTICS_TAG).build(); + + Set tags = new HashSet(Arrays.asList(STATISTICS_TAG, "tier")); + Map properties = singletonMap("discriminator", STATISTICS_TAG); + StatisticsManager.createPassThroughStatistic(this, "mappings", tags, properties, new Callable() { @Override public Number call() throws Exception { return map.mappingCount(); } }); + StatisticsManager.createPassThroughStatistic(this, "occupiedMemory", tags, properties, new Callable() { + @Override + public Number call() throws Exception { + if (byteSized) { + return map.byteSize(); + } else { + return -1L; + } + } + }); } @Override @@ -294,7 +321,7 @@ private OnHeapValueHolder internalGet(final K key, final boolean updateAccess } } - private OnHeapValueHolder getQuiet(final K key) throws StoreAccessException { + private OnHeapValueHolder getQuiet(final K key) throws StoreAccessException { try { OnHeapValueHolder mapping = map.get(key); if (mapping == null) { @@ -413,7 +440,7 @@ public OnHeapValueHolder apply(K mappedKey, OnHeapValueHolder mappedValue) case MISS: return false; default: - throw new AssertionError("Unknow enum value " + outcome); + throw new AssertionError("Unknown enum value " + outcome); } } catch (RuntimeException re) { storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); @@ -689,7 +716,9 @@ public ValueHolder getOrComputeIfAbsent(final K key, final Function backEnd = map; + // First try to find the value from heap OnHeapValueHolder cachedValue = backEnd.get(key); + final long now = timeSource.getTimeMillis(); if (cachedValue == null) { final Fault fault = new Fault(new NullaryFunction>() { @@ -699,60 +728,14 @@ public ValueHolder apply() { } }); cachedValue = backEnd.putIfAbsent(key, fault); - if (cachedValue == null) { - try { - final ValueHolder value = fault.get(); - final OnHeapValueHolder newValue; - if(value != null) { - newValue = importValueFromLowerTier(key, value, now, backEnd, fault); - if (newValue == null) { - // Inline expiry or sizing failure - backEnd.remove(key, fault); - getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED); - return value; - } - } else { - backEnd.remove(key, fault); - getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS); - return null; - } - if (backEnd.replace(key, fault, newValue)) { - getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULTED); - updateUsageInBytesIfRequired(newValue.size()); - enforceCapacity(); - return getValue(newValue); - } else { - final AtomicReference> invalidatedValue = new AtomicReference>(); - backEnd.computeIfPresent(key, new BiFunction, OnHeapValueHolder>() { - @Override - public OnHeapValueHolder apply(K mappedKey, OnHeapValueHolder mappedValue) { - notifyInvalidation(key, mappedValue); - invalidatedValue.set(mappedValue); - updateUsageInBytesIfRequired(mappedValue.size()); - return null; - } - }); - ValueHolder p = getValue(invalidatedValue.get()); - if (p != null) { - if (p.isExpired(now, TimeUnit.MILLISECONDS)) { - getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED_MISS); - return null; - } else { - getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED); - return p; - } - } - getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED); - return newValue; - } - } catch (Throwable e) { - backEnd.remove(key, fault); - throw new StoreAccessException(e); - } + if (cachedValue == null) { + return resolveFault(key, backEnd, now, fault); } } + // If we have a real value (not a fault), we make sure it is not expired + // If yes, we return null and remove it. If no, we return it (below) if (!(cachedValue instanceof Fault)) { if (cachedValue.isExpired(now, TimeUnit.MILLISECONDS)) { expireMappingUnderLock(key, cachedValue); @@ -763,6 +746,8 @@ public OnHeapValueHolder apply(K mappedKey, OnHeapValueHolder mappedValue) } getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.HIT); + + // Return the value that we found in the cache (by getting the fault or just returning the plain value depending on what we found) return getValue(cachedValue); } catch (RuntimeException re) { handleRuntimeException(re); @@ -770,6 +755,62 @@ public OnHeapValueHolder apply(K mappedKey, OnHeapValueHolder mappedValue) } } + private ValueHolder resolveFault(final K key, Backend backEnd, long now, Fault fault) throws StoreAccessException { + try { + final ValueHolder value = fault.get(); + final OnHeapValueHolder newValue; + if(value != null) { + newValue = importValueFromLowerTier(key, value, now, backEnd, fault); + if (newValue == null) { + // Inline expiry or sizing failure + backEnd.remove(key, fault); + getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED); + return value; + } + } else { + backEnd.remove(key, fault); + getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS); + return null; + } + + if (backEnd.replace(key, fault, newValue)) { + getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULTED); + updateUsageInBytesIfRequired(newValue.size()); + enforceCapacity(); + return newValue; + } + + final AtomicReference> invalidatedValue = new AtomicReference>(); + backEnd.computeIfPresent(key, new BiFunction, OnHeapValueHolder>() { + @Override + public OnHeapValueHolder apply(K mappedKey, OnHeapValueHolder mappedValue) { + notifyInvalidation(key, mappedValue); + invalidatedValue.set(mappedValue); + updateUsageInBytesIfRequired(mappedValue.size()); + return null; + } + }); + + ValueHolder p = getValue(invalidatedValue.get()); + if (p != null) { + if (p.isExpired(now, TimeUnit.MILLISECONDS)) { + getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED_MISS); + return null; + } + + getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED); + return p; + } + + getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED); + return newValue; + + } catch (Throwable e) { + backEnd.remove(key, fault); + throw new StoreAccessException(e); + } + } + private void invalidateInGetOrComputeIfAbsent(Backend map, final K key, final ValueHolder value, final Fault fault, final long now, final Duration expiration) { map.computeIfPresent(key, new BiFunction, OnHeapValueHolder>() { @Override @@ -1065,7 +1106,7 @@ public long size() { @Override public String toString() { - return "[Fault : " + (complete ? (throwable == null ? value.toString() : throwable.getMessage()) : "???") + "]"; + return "[Fault : " + (complete ? (throwable == null ? String.valueOf(value) : throwable.getMessage()) : "???") + "]"; } @Override @@ -1557,7 +1598,7 @@ boolean evict(final StoreEventSink eventSink) { if (candidate == null) { // 2nd attempt without any advisor - candidate = map.getEvictionCandidate(random, SAMPLE_SIZE, EVICTION_PRIORITIZER, Eviction.>noAdvice()); + candidate = map.getEvictionCandidate(random, SAMPLE_SIZE, EVICTION_PRIORITIZER, noAdvice()); } if (candidate == null) { @@ -1624,6 +1665,7 @@ public static class Provider implements Store.Provider, CachingTier.Provider, Hi private volatile ServiceProvider serviceProvider; private final Map, List> createdStores = new ConcurrentWeakIdentityHashMap, List>(); + private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap, Collection>>(); @Override public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { @@ -1637,7 +1679,23 @@ public int rankCachingTier(Set> resourceTypes, Collection OnHeapStore createStore(final Configuration storeConfig, final ServiceConfiguration... serviceConfigs) { - return createStoreInternal(storeConfig, new ScopedStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); + OnHeapStore store = createStoreInternal(storeConfig, new ScopedStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); + Collection> tieredOps = new ArrayList>(); + + MappedOperationStatistic get = + new MappedOperationStatistic( + store, TierOperationOutcomes.GET_TRANSLATION, "get", ResourceType.Core.HEAP.getTierHeight(), "get", STATISTICS_TAG); + StatisticsManager.associate(get).withParent(store); + tieredOps.add(get); + + MappedOperationStatistic evict = + new MappedOperationStatistic( + store, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.HEAP.getTierHeight(), "eviction", STATISTICS_TAG); + StatisticsManager.associate(evict).withParent(store); + tieredOps.add(evict); + + tierOperationStatistics.put(store, tieredOps); + return store; } public OnHeapStore createStoreInternal(final Configuration storeConfig, final StoreEventDispatcher eventDispatcher, @@ -1667,6 +1725,9 @@ public void releaseStore(Store resource) { } final OnHeapStore onHeapStore = (OnHeapStore)resource; close(onHeapStore); + StatisticsManager.nodeFor(onHeapStore).clean(); + tierOperationStatistics.remove(onHeapStore); + CopyProvider copyProvider = serviceProvider.getService(CopyProvider.class); for (Copier copier: copiers) { try { @@ -1684,6 +1745,16 @@ static void close(final OnHeapStore onHeapStore) { @Override public void initStore(Store resource) { checkResource(resource); + + List copiers = createdStores.get(resource); + for (Copier copier : copiers) { + if(copier instanceof SerializingCopier) { + Serializer serializer = ((SerializingCopier)copier).getSerializer(); + if(serializer instanceof StatefulSerializer) { + ((StatefulSerializer)serializer).init(new TransientStateRepository()); + } + } + } } private void checkResource(Object resource) { @@ -1705,7 +1776,23 @@ public void stop() { @Override public CachingTier createCachingTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - return createStoreInternal(storeConfig, NullStoreEventDispatcher.nullStoreEventDispatcher(), serviceConfigs); + OnHeapStore cachingTier = createStoreInternal(storeConfig, NullStoreEventDispatcher.nullStoreEventDispatcher(), serviceConfigs); + Collection> tieredOps = new ArrayList>(); + + MappedOperationStatistic get = + new MappedOperationStatistic( + cachingTier, TierOperationOutcomes.GET_OR_COMPUTEIFABSENT_TRANSLATION, "get", ResourceType.Core.HEAP.getTierHeight(), "getOrComputeIfAbsent", STATISTICS_TAG); + StatisticsManager.associate(get).withParent(cachingTier); + tieredOps.add(get); + + MappedOperationStatistic evict + = new MappedOperationStatistic( + cachingTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.HEAP.getTierHeight(), "eviction", STATISTICS_TAG); + StatisticsManager.associate(evict).withParent(cachingTier); + tieredOps.add(evict); + + this.tierOperationStatistics.put(cachingTier, tieredOps); + return cachingTier; } @Override @@ -1721,12 +1808,28 @@ public void releaseCachingTier(CachingTier resource) { @Override public void initCachingTier(CachingTier resource) { - initStore((Store) resource); + checkResource(resource); } @Override public HigherCachingTier createHigherCachingTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - return createStore(storeConfig, serviceConfigs); + OnHeapStore higherCachingTier = createStoreInternal(storeConfig, new ScopedStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); + Collection> tieredOps = new ArrayList>(); + + MappedOperationStatistic get = + new MappedOperationStatistic( + higherCachingTier, TierOperationOutcomes.GET_OR_COMPUTEIFABSENT_TRANSLATION, "get", ResourceType.Core.HEAP.getTierHeight(), "getOrComputeIfAbsent", STATISTICS_TAG); + StatisticsManager.associate(get).withParent(higherCachingTier); + tieredOps.add(get); + + MappedOperationStatistic evict = + new MappedOperationStatistic( + higherCachingTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.HEAP.getTierHeight(), "eviction", STATISTICS_TAG); + StatisticsManager.associate(evict).withParent(higherCachingTier); + tieredOps.add(evict); + + tierOperationStatistics.put(higherCachingTier, tieredOps); + return higherCachingTier; } @Override @@ -1736,19 +1839,7 @@ public void releaseHigherCachingTier(HigherCachingTier resource) { @Override public void initHigherCachingTier(HigherCachingTier resource) { - initStore((Store) resource); - } - } - - private static final class OnHeapStoreStatsSettings { - @ContextAttribute("tags") private final Set tags = new HashSet(Arrays.asList("store")); - @ContextAttribute("cachingTier") private final CachingTier cachingTier; - @ContextAttribute("authoritativeTier") private final OnHeapStore authoritativeTier; - - OnHeapStoreStatsSettings(OnHeapStore onHeapStore) { - this.cachingTier = null; - this.authoritativeTier = onHeapStore; + checkResource(resource); } } - } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/SimpleBackend.java b/impl/src/main/java/org/ehcache/impl/internal/store/heap/SimpleBackend.java index d4063da6b9..bb03a0eefa 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/SimpleBackend.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/heap/SimpleBackend.java @@ -47,7 +47,7 @@ public boolean remove(K key, OnHeapValueHolder value) { } @Override - public Map.Entry> getEvictionCandidate(Random random, int size, final Comparator> prioritizer, final EvictionAdvisor> evictionAdvisor) { + public Map.Entry> getEvictionCandidate(Random random, int size, final Comparator> prioritizer, final EvictionAdvisor> evictionAdvisor) { return realMap.getEvictionCandidate(random, size, prioritizer, evictionAdvisor); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/SerializedOnHeapValueHolder.java b/impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/SerializedOnHeapValueHolder.java index 91ba363c59..2a47a0bce8 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/SerializedOnHeapValueHolder.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/SerializedOnHeapValueHolder.java @@ -87,7 +87,8 @@ public boolean equals(Object other) { if (this == other) return true; if (other == null || getClass() != other.getClass()) return false; - SerializedOnHeapValueHolder that = (SerializedOnHeapValueHolder)other; + @SuppressWarnings("unchecked") + SerializedOnHeapValueHolder that = (SerializedOnHeapValueHolder)other; if (!super.equals(that)) return false; try { diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStore.java index 35cf66c031..a2492e5d40 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStore.java @@ -56,16 +56,15 @@ import static org.ehcache.core.exceptions.StorePassThroughException.handleRuntimeException; import static org.ehcache.core.internal.util.ValueSuppliers.supplierOf; -import org.terracotta.context.annotations.ContextAttribute; import static org.terracotta.statistics.StatisticBuilder.operation; public abstract class AbstractOffHeapStore implements AuthoritativeTier, LowerCachingTier { private static final Logger LOG = LoggerFactory.getLogger(AbstractOffHeapStore.class); - private static final CachingTier.InvalidationListener NULL_INVALIDATION_LISTENER = new CachingTier.InvalidationListener() { + private static final CachingTier.InvalidationListener NULL_INVALIDATION_LISTENER = new CachingTier.InvalidationListener() { @Override - public void onInvalidation(Object key, ValueHolder valueHolder) { + public void onInvalidation(Object key, ValueHolder valueHolder) { // Do nothing } }; @@ -99,11 +98,11 @@ public void onInvalidation(Object key, ValueHolder valueHolder) { private final OperationObserver getAndRemoveObserver; private final OperationObserver installMappingObserver; - protected final OffHeapStoreStatsSettings offHeapStoreStatsSettings; private volatile InvalidationValve valve; protected BackingMapEvictionListener mapEvictionListener; - private volatile CachingTier.InvalidationListener invalidationListener = NULL_INVALIDATION_LISTENER; + @SuppressWarnings("unchecked") + private volatile CachingTier.InvalidationListener invalidationListener = (CachingTier.InvalidationListener) NULL_INVALIDATION_LISTENER; public AbstractOffHeapStore(String statisticsTag, Configuration config, TimeSource timeSource, StoreEventDispatcher eventDispatcher) { keyType = config.getKeyType(); @@ -113,8 +112,6 @@ public AbstractOffHeapStore(String statisticsTag, Configuration config, Ti this.timeSource = timeSource; this.eventDispatcher = eventDispatcher; - this.offHeapStoreStatsSettings = new OffHeapStoreStatsSettings(this); - StatisticsManager.associate(offHeapStoreStatsSettings).withParent(this); this.getObserver = operation(StoreOperationOutcomes.GetOutcome.class).of(this).named("get").tag(statisticsTag).build(); this.putObserver = operation(StoreOperationOutcomes.PutOutcome.class).of(this).named("put").tag(statisticsTag).build(); this.putIfAbsentObserver = operation(StoreOperationOutcomes.PutIfAbsentOutcome.class).of(this).named("putIfAbsent").tag(statisticsTag).build(); @@ -137,73 +134,82 @@ public AbstractOffHeapStore(String statisticsTag, Configuration config, Ti this.getAndRemoveObserver= operation(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.class).of(this).named("getAndRemove").tag(statisticsTag).build(); this.installMappingObserver= operation(LowerCachingTierOperationsOutcome.InstallMappingOutcome.class).of(this).named("installMapping").tag(statisticsTag).build(); - StatisticsManager.createPassThroughStatistic(this, "allocatedMemory", Collections.singleton(statisticsTag), new Callable() { + Set tags = new HashSet(Arrays.asList(statisticsTag, "tier")); + Map properties = new HashMap(); + properties.put("discriminator", statisticsTag); + StatisticsManager.createPassThroughStatistic(this, "allocatedMemory", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().allocatedMemory(); } }); - StatisticsManager.createPassThroughStatistic(this, "occupiedMemory", Collections.singleton(statisticsTag), new Callable() { + StatisticsManager.createPassThroughStatistic(this, "occupiedMemory", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().occupiedMemory(); } }); - StatisticsManager.createPassThroughStatistic(this, "dataAllocatedMemory", Collections.singleton(statisticsTag), new Callable() { + StatisticsManager.createPassThroughStatistic(this, "dataAllocatedMemory", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().dataAllocatedMemory(); } }); - StatisticsManager.createPassThroughStatistic(this, "dataOccupiedMemory", Collections.singleton(statisticsTag), new Callable() { + StatisticsManager.createPassThroughStatistic(this, "dataOccupiedMemory", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().dataOccupiedMemory(); } }); - StatisticsManager.createPassThroughStatistic(this, "dataSize", Collections.singleton(statisticsTag), new Callable() { + StatisticsManager.createPassThroughStatistic(this, "dataSize", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().dataSize(); } }); - StatisticsManager.createPassThroughStatistic(this, "dataVitalMemory", Collections.singleton(statisticsTag), new Callable() { + StatisticsManager.createPassThroughStatistic(this, "dataVitalMemory", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().dataVitalMemory(); } }); - StatisticsManager.createPassThroughStatistic(this, "longSize", Collections.singleton(statisticsTag), new Callable() { + StatisticsManager.createPassThroughStatistic(this, "mappings", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().longSize(); } }); - StatisticsManager.createPassThroughStatistic(this, "vitalMemory", Collections.singleton(statisticsTag), new Callable() { + StatisticsManager.createPassThroughStatistic(this, "maxMappings", tags, properties, new Callable() { + @Override + public Number call() throws Exception { + return -1L; + } + }); + StatisticsManager.createPassThroughStatistic(this, "vitalMemory", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().vitalMemory(); } }); - StatisticsManager.createPassThroughStatistic(this, "removedSlotCount", Collections.singleton(statisticsTag), new Callable() { + StatisticsManager.createPassThroughStatistic(this, "removedSlotCount", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().removedSlotCount(); } }); - StatisticsManager.createPassThroughStatistic(this, "reprobeLength", Collections.singleton(statisticsTag), new Callable() { + StatisticsManager.createPassThroughStatistic(this, "reprobeLength", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().reprobeLength(); } }); - StatisticsManager.createPassThroughStatistic(this, "usedSlotCount", Collections.singleton(statisticsTag), new Callable() { + StatisticsManager.createPassThroughStatistic(this, "usedSlotCount", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().usedSlotCount(); } }); - StatisticsManager.createPassThroughStatistic(this, "tableCapacity", Collections.singleton(statisticsTag), new Callable() { + StatisticsManager.createPassThroughStatistic(this, "tableCapacity", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().tableCapacity(); @@ -1281,7 +1287,9 @@ static class BackingMapEvictionListener implements EhcacheSegmentFactory.E private BackingMapEvictionListener(StoreEventDispatcher eventDispatcher, OperationObserver evictionObserver) { this.eventDispatcher = eventDispatcher; this.evictionObserver = evictionObserver; - this.invalidationListener = NULL_INVALIDATION_LISTENER; + @SuppressWarnings("unchecked") + CachingTier.InvalidationListener nullInvalidationListener = (CachingTier.InvalidationListener) NULL_INVALIDATION_LISTENER; + this.invalidationListener = nullInvalidationListener; } public void setInvalidationListener(CachingTier.InvalidationListener invalidationListener) { @@ -1305,13 +1313,4 @@ public void onEviction(K key, OffHeapValueHolder value) { evictionObserver.end(StoreOperationOutcomes.EvictionOutcome.SUCCESS); } } - - private static final class OffHeapStoreStatsSettings { - @ContextAttribute("tags") private final Set tags = new HashSet(Arrays.asList("store")); - @ContextAttribute("authoritativeTier") private final AbstractOffHeapStore authoritativeTier; - - OffHeapStoreStatsSettings(AbstractOffHeapStore store) { - this.authoritativeTier = store; - } - } -} \ No newline at end of file +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCache.java b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCache.java index 52b7754454..308de82478 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCache.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCache.java @@ -42,7 +42,7 @@ public class EhcacheConcurrentOffHeapClockCache extends AbstractConcurrent private final EvictionAdvisor evictionAdvisor; private final AtomicLong[] counters; - protected EhcacheConcurrentOffHeapClockCache(EvictionAdvisor evictionAdvisor, Factory> segmentFactory, int ssize) { + public EhcacheConcurrentOffHeapClockCache(EvictionAdvisor evictionAdvisor, Factory> segmentFactory, int ssize) { super(segmentFactory, ssize); this.evictionAdvisor = evictionAdvisor; this.counters = new AtomicLong[segments.length]; diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java index e3a5e6c8e2..d017fb47a2 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java @@ -18,12 +18,14 @@ import org.ehcache.config.SizedResourcePool; import org.ehcache.core.CacheConfigurationChangeListener; -import org.ehcache.config.Eviction; import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourceType; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.events.StoreEventDispatcher; import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.core.statistics.AuthoritativeTierOperationOutcomes; +import org.ehcache.core.statistics.LowerCachingTierOperationsOutcome; +import org.ehcache.core.statistics.StoreOperationOutcomes; import org.ehcache.impl.internal.events.NullStoreEventDispatcher; import org.ehcache.impl.internal.events.ThreadLocalStoreEventDispatcher; import org.ehcache.impl.internal.store.offheap.factories.EhcacheSegmentFactory; @@ -31,6 +33,8 @@ import org.ehcache.impl.internal.store.offheap.portability.SerializerPortability; import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.time.TimeSourceService; +import org.ehcache.impl.serialization.TransientStateRepository; +import org.ehcache.spi.serialization.StatefulSerializer; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; @@ -41,6 +45,7 @@ import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.core.internal.util.ConcurrentWeakIdentityHashMap; +import org.ehcache.core.statistics.TierOperationOutcomes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terracotta.offheapstore.paging.PageSource; @@ -50,13 +55,17 @@ import org.terracotta.offheapstore.storage.PointerSize; import org.terracotta.offheapstore.storage.portability.Portability; import org.terracotta.offheapstore.util.Factory; +import org.terracotta.statistics.MappedOperationStatistic; import org.terracotta.statistics.StatisticsManager; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Set; +import static org.ehcache.config.Eviction.noAdvice; import static org.ehcache.impl.internal.store.offheap.OffHeapStoreUtils.getBufferSource; /** @@ -64,6 +73,8 @@ */ public class OffHeapStore extends AbstractOffHeapStore { + private static final String STATISTICS_TAG = "OffHeap"; + private final SwitchableEvictionAdvisor> evictionAdvisor; private final Serializer keySerializer; private final Serializer valueSerializer; @@ -72,12 +83,12 @@ public class OffHeapStore extends AbstractOffHeapStore { private volatile EhcacheConcurrentOffHeapClockCache> map; public OffHeapStore(final Configuration config, TimeSource timeSource, StoreEventDispatcher eventDispatcher, long sizeInBytes) { - super("local-offheap", config, timeSource, eventDispatcher); + super(STATISTICS_TAG, config, timeSource, eventDispatcher); EvictionAdvisor evictionAdvisor = config.getEvictionAdvisor(); if (evictionAdvisor != null) { this.evictionAdvisor = wrap(evictionAdvisor); } else { - this.evictionAdvisor = wrap(Eviction.noAdvice()); + this.evictionAdvisor = wrap(noAdvice()); } this.keySerializer = config.getKeySerializer(); this.valueSerializer = config.getValueSerializer(); @@ -124,6 +135,7 @@ public static class Provider implements Store.Provider, AuthoritativeTier.Provid private volatile ServiceProvider serviceProvider; private final Set> createdStores = Collections.newSetFromMap(new ConcurrentWeakIdentityHashMap, Boolean>()); + private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap, Collection>>(); @Override public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { @@ -137,7 +149,23 @@ public int rankAuthority(ResourceType authorityResource, Collection OffHeapStore createStore(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - return createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); + OffHeapStore store = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); + Collection> tieredOps = new ArrayList>(); + + MappedOperationStatistic get = + new MappedOperationStatistic( + store, TierOperationOutcomes.GET_TRANSLATION, "get", ResourceType.Core.OFFHEAP.getTierHeight(), "get", STATISTICS_TAG); + StatisticsManager.associate(get).withParent(store); + tieredOps.add(get); + + MappedOperationStatistic evict = + new MappedOperationStatistic( + store, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.OFFHEAP.getTierHeight(), "eviction", STATISTICS_TAG); + StatisticsManager.associate(evict).withParent(store); + tieredOps.add(evict); + + tierOperationStatistics.put(store, tieredOps); + return store; } private OffHeapStore createStoreInternal(Configuration storeConfig, StoreEventDispatcher eventDispatcher, ServiceConfiguration... serviceConfigs) { @@ -163,15 +191,18 @@ public void releaseStore(Store resource) { if (!createdStores.contains(resource)) { throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); } - close((OffHeapStore)resource); + OffHeapStore offHeapStore = (OffHeapStore)resource; + close(offHeapStore); + StatisticsManager.nodeFor(offHeapStore).clean(); + tierOperationStatistics.remove(offHeapStore); } - static void close(final OffHeapStore resource) {EhcacheConcurrentOffHeapClockCache> localMap = resource.map; + static void close(final OffHeapStore resource) { + EhcacheConcurrentOffHeapClockCache localMap = resource.map; if (localMap != null) { resource.map = null; localMap.destroy(); } - StatisticsManager.dissociate(resource.offHeapStoreStatsSettings).fromParent(resource); } @Override @@ -179,7 +210,18 @@ public void initStore(Store resource) { if (!createdStores.contains(resource)) { throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); } - init((OffHeapStore)resource); + + OffHeapStore offHeapStore = (OffHeapStore) resource; + Serializer keySerializer = offHeapStore.keySerializer; + if (keySerializer instanceof StatefulSerializer) { + ((StatefulSerializer)keySerializer).init(new TransientStateRepository()); + } + Serializer valueSerializer = offHeapStore.valueSerializer; + if (valueSerializer instanceof StatefulSerializer) { + ((StatefulSerializer)valueSerializer).init(new TransientStateRepository()); + } + + init(offHeapStore); } static void init(final OffHeapStore resource) { @@ -199,7 +241,23 @@ public void stop() { @Override public AuthoritativeTier createAuthoritativeTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - return createStore(storeConfig, serviceConfigs); + OffHeapStore authoritativeTier = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); + Collection> tieredOps = new ArrayList>(); + + MappedOperationStatistic get = + new MappedOperationStatistic( + authoritativeTier, TierOperationOutcomes.GET_AND_FAULT_TRANSLATION, "get", ResourceType.Core.OFFHEAP.getTierHeight(), "getAndFault", STATISTICS_TAG); + StatisticsManager.associate(get).withParent(authoritativeTier); + tieredOps.add(get); + + MappedOperationStatistic evict + = new MappedOperationStatistic( + authoritativeTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.OFFHEAP.getTierHeight(), "eviction", STATISTICS_TAG); + StatisticsManager.associate(evict).withParent(authoritativeTier); + tieredOps.add(evict); + + tierOperationStatistics.put(authoritativeTier, tieredOps); + return authoritativeTier; } @Override @@ -214,10 +272,27 @@ public void initAuthoritativeTier(AuthoritativeTier resource) { @Override public LowerCachingTier createCachingTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - return createStoreInternal(storeConfig, NullStoreEventDispatcher.nullStoreEventDispatcher(), serviceConfigs); + OffHeapStore lowerCachingTier = createStoreInternal(storeConfig, NullStoreEventDispatcher.nullStoreEventDispatcher(), serviceConfigs); + Collection> tieredOps = new ArrayList>(); + + MappedOperationStatistic get + = new MappedOperationStatistic( + lowerCachingTier, TierOperationOutcomes.GET_AND_REMOVE_TRANSLATION, "get", ResourceType.Core.OFFHEAP.getTierHeight(), "getAndRemove", STATISTICS_TAG); + StatisticsManager.associate(get).withParent(lowerCachingTier); + tieredOps.add(get); + + MappedOperationStatistic evict = + new MappedOperationStatistic( + lowerCachingTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.OFFHEAP.getTierHeight(), "eviction", STATISTICS_TAG); + StatisticsManager.associate(evict).withParent(lowerCachingTier); + tieredOps.add(evict); + + tierOperationStatistics.put(lowerCachingTier, tieredOps); + return lowerCachingTier; } @Override + @SuppressWarnings("unchecked") public void releaseCachingTier(LowerCachingTier resource) { if (!createdStores.contains(resource)) { throw new IllegalArgumentException("Given caching tier is not managed by this provider : " + resource); @@ -251,7 +326,7 @@ public void initCachingTier(LowerCachingTier resource) { if (!createdStores.contains(resource)) { throw new IllegalArgumentException("Given caching tier is not managed by this provider : " + resource); } - initStore((Store) resource); + init((OffHeapStore) resource); } } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/portability/SerializerPortability.java b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/portability/SerializerPortability.java index a3a0e49f1c..b41a2088e2 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/portability/SerializerPortability.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/portability/SerializerPortability.java @@ -51,7 +51,9 @@ public T decode(ByteBuffer byteBuffer) { @Override public boolean equals(Object o, ByteBuffer byteBuffer) { try { - return serializer.equals((T)o, byteBuffer); + @SuppressWarnings("unchecked") + T otherValue = (T) o; + return serializer.equals(otherValue, byteBuffer); } catch (ClassNotFoundException e) { throw new SerializerException(e); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTier.java b/impl/src/main/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTier.java index 76a45f040f..d74e3cc031 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTier.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTier.java @@ -211,7 +211,7 @@ public List getConfigurationChangeListeners() } - @ServiceDependencies({OnHeapStore.Provider.class, OffHeapStore.Provider.class}) + @ServiceDependencies({HigherCachingTier.Provider.class, LowerCachingTier.Provider.class}) public static class Provider implements CachingTier.Provider { private volatile ServiceProvider serviceProvider; private final ConcurrentMap, Map.Entry> providersMap = new ConcurrentWeakIdentityHashMap, Map.Entry>(); @@ -222,10 +222,18 @@ public CachingTier createCachingTier(Store.Configuration stor throw new RuntimeException("ServiceProvider is null."); } - HigherCachingTier.Provider higherProvider = serviceProvider.getService(HigherCachingTier.Provider.class); + Collection higherProviders = serviceProvider.getServicesOfType(HigherCachingTier.Provider.class); + if (higherProviders.size() != 1) { + throw new IllegalStateException("Cannot handle multiple higher tier providers"); + } + HigherCachingTier.Provider higherProvider = higherProviders.iterator().next(); HigherCachingTier higherCachingTier = higherProvider.createHigherCachingTier(storeConfig, serviceConfigs); - LowerCachingTier.Provider lowerProvider = serviceProvider.getService(LowerCachingTier.Provider.class); + Collection lowerProviders = serviceProvider.getServicesOfType(LowerCachingTier.Provider.class); + if (lowerProviders.size() != 1) { + throw new IllegalStateException("Cannot handle multiple lower tier providers"); + } + LowerCachingTier.Provider lowerProvider = lowerProviders.iterator().next(); LowerCachingTier lowerCachingTier = lowerProvider.createCachingTier(storeConfig, serviceConfigs); CompoundCachingTier compoundCachingTier = new CompoundCachingTier(higherCachingTier, lowerCachingTier); diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java index c98e5f45d7..8b1de19617 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java @@ -16,29 +16,27 @@ package org.ehcache.impl.internal.store.tiering; import org.ehcache.Cache; -import org.ehcache.config.ResourcePool; import org.ehcache.config.ResourcePools; import org.ehcache.config.ResourceType; import org.ehcache.core.CacheConfigurationChangeListener; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.core.internal.util.ConcurrentWeakIdentityHashMap; import org.ehcache.core.spi.function.BiFunction; import org.ehcache.core.spi.function.Function; import org.ehcache.core.spi.function.NullaryFunction; -import org.ehcache.impl.internal.store.disk.OffHeapDiskStore; -import org.ehcache.impl.internal.store.heap.OnHeapStore; -import org.ehcache.impl.internal.store.offheap.OffHeapStore; -import org.ehcache.spi.service.ServiceProvider; import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.store.StoreAccessException; import org.ehcache.core.spi.store.events.StoreEventSource; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; import org.ehcache.core.spi.store.tiering.CachingTier; +import org.ehcache.impl.internal.store.disk.OffHeapDiskStore; +import org.ehcache.impl.internal.store.heap.OnHeapStore; +import org.ehcache.impl.internal.store.offheap.OffHeapStore; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; -import org.ehcache.core.internal.util.ConcurrentWeakIdentityHashMap; import org.ehcache.spi.service.ServiceDependencies; +import org.ehcache.spi.service.ServiceProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.terracotta.context.annotations.ContextAttribute; import org.terracotta.statistics.StatisticsManager; import java.util.AbstractMap; @@ -52,10 +50,6 @@ import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicReference; -import static org.ehcache.config.ResourceType.Core.DISK; -import static org.ehcache.config.ResourceType.Core.HEAP; -import static org.ehcache.config.ResourceType.Core.OFFHEAP; - /** * A {@link Store} implementation supporting a tiered caching model. */ @@ -68,9 +62,6 @@ public class TieredStore implements Store { private final CachingTier realCachingTier; private final AuthoritativeTier authoritativeTier; - private final TieringStoreStatsSettings tieringStoreStatsSettings; - - public TieredStore(CachingTier cachingTier, AuthoritativeTier authoritativeTier) { this.cachingTierRef = new AtomicReference>(cachingTier); this.authoritativeTier = authoritativeTier; @@ -99,8 +90,6 @@ public void invalidateAllWithHash(long hash) throws StoreAccessException { StatisticsManager.associate(cachingTier).withParent(this); StatisticsManager.associate(authoritativeTier).withParent(this); - tieringStoreStatsSettings = new TieringStoreStatsSettings(cachingTier, authoritativeTier); - StatisticsManager.associate(tieringStoreStatsSettings).withParent(this); } @@ -154,15 +143,11 @@ public PutStatus put(final K key, final V value) throws StoreAccessException { @Override public ValueHolder putIfAbsent(K key, V value) throws StoreAccessException { - ValueHolder previous = null; try { - previous = authoritativeTier.putIfAbsent(key, value); + return authoritativeTier.putIfAbsent(key, value); } finally { - if (previous == null) { - cachingTier().invalidate(key); - } + cachingTier().invalidate(key); } - return previous; } @Override @@ -176,79 +161,55 @@ public boolean remove(K key) throws StoreAccessException { @Override public RemoveStatus remove(K key, V value) throws StoreAccessException { - RemoveStatus removed = null; try { - removed = authoritativeTier.remove(key, value); - return removed; + return authoritativeTier.remove(key, value); } finally { - if (removed != null && removed.equals(RemoveStatus.REMOVED)) { - cachingTier().invalidate(key); - } + cachingTier().invalidate(key); } } @Override public ValueHolder replace(K key, V value) throws StoreAccessException { - ValueHolder previous = null; - boolean exceptionThrown = true; try { - previous = authoritativeTier.replace(key, value); - exceptionThrown = false; + return authoritativeTier.replace(key, value); } finally { - if (exceptionThrown || previous != null) { - cachingTier().invalidate(key); - } + cachingTier().invalidate(key); } - return previous; } @Override public ReplaceStatus replace(K key, V oldValue, V newValue) throws StoreAccessException { - ReplaceStatus replaced = null; try { - replaced = authoritativeTier.replace(key, oldValue, newValue); + return authoritativeTier.replace(key, oldValue, newValue); } finally { - if (replaced != null && replaced.equals(ReplaceStatus.HIT)) { - cachingTier().invalidate(key); - } + cachingTier().invalidate(key); } - return replaced; } @Override public void clear() throws StoreAccessException { - boolean interrupted = false; - while(!cachingTierRef.compareAndSet(realCachingTier, noopCachingTier)) { - synchronized (noopCachingTier) { - if(cachingTierRef.get() == noopCachingTier) { - try { - noopCachingTier.wait(); - } catch (InterruptedException e) { - interrupted = true; - } - } - } - } - if(interrupted) { - Thread.currentThread().interrupt(); - } + swapCachingTiers(); try { authoritativeTier.clear(); } finally { try { realCachingTier.clear(); } finally { - if(!cachingTierRef.compareAndSet(noopCachingTier, realCachingTier)) { - throw new AssertionError("Something bad happened"); - } - synchronized (noopCachingTier) { - noopCachingTier.notify(); - } + swapBackCachingTiers(); } } } private void invalidateAllInternal() throws StoreAccessException { + swapCachingTiers(); + try { + realCachingTier.invalidateAll(); + } finally { + swapBackCachingTiers(); + } + } + + private void swapCachingTiers() { boolean interrupted = false; while(!cachingTierRef.compareAndSet(realCachingTier, noopCachingTier)) { synchronized (noopCachingTier) { @@ -264,15 +225,14 @@ private void invalidateAllInternal() throws StoreAccessException { if(interrupted) { Thread.currentThread().interrupt(); } - try { - realCachingTier.invalidateAll(); - } finally { - if(!cachingTierRef.compareAndSet(noopCachingTier, realCachingTier)) { - throw new AssertionError("Something bad happened"); - } - synchronized (noopCachingTier) { - noopCachingTier.notify(); - } + } + + private void swapBackCachingTiers() { + if(!cachingTierRef.compareAndSet(noopCachingTier, realCachingTier)) { + throw new AssertionError("Something bad happened"); + } + synchronized (noopCachingTier) { + noopCachingTier.notify(); } } @@ -367,8 +327,7 @@ private CachingTier cachingTier() { return cachingTierRef.get(); } - @ServiceDependencies({CompoundCachingTier.Provider.class, - OnHeapStore.Provider.class, OffHeapStore.Provider.class, OffHeapDiskStore.Provider.class}) + @ServiceDependencies({CachingTier.Provider.class, AuthoritativeTier.Provider.class}) public static class Provider implements Store.Provider { private volatile ServiceProvider serviceProvider; @@ -517,17 +476,6 @@ public void stop() { } } - private static final class TieringStoreStatsSettings { - @ContextAttribute("tags") private final Set tags = new HashSet(Arrays.asList("store")); - @ContextAttribute("cachingTier") private final CachingTier cachingTier; - @ContextAttribute("authoritativeTier") private final AuthoritativeTier authoritativeTier; - - TieringStoreStatsSettings(CachingTier cachingTier, AuthoritativeTier authoritativeTier) { - this.cachingTier = cachingTier; - this.authoritativeTier = authoritativeTier; - } - } - private static class NoopCachingTier implements CachingTier { private final AuthoritativeTier authoritativeTier; diff --git a/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java b/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java new file mode 100644 index 0000000000..04facbab5d --- /dev/null +++ b/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java @@ -0,0 +1,289 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.persistence; + +import org.ehcache.CachePersistenceException; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.ResourceType; +import org.ehcache.core.spi.service.DiskResourceService; +import org.ehcache.core.spi.service.FileBasedPersistenceContext; +import org.ehcache.core.spi.service.LocalPersistenceService; +import org.ehcache.core.spi.service.LocalPersistenceService.SafeSpaceIdentifier; +import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; +import org.ehcache.spi.persistence.StateRepository; +import org.ehcache.spi.service.MaintainableService; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceDependencies; +import org.ehcache.spi.service.ServiceProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.util.Map; +import java.util.concurrent.ConcurrentMap; + +/** + * Default implementation of the {@link DiskResourceService} which can be used explicitly when + * {@link org.ehcache.PersistentUserManagedCache persistent user managed caches} are desired. + */ +@ServiceDependencies(LocalPersistenceService.class) +public class DefaultDiskResourceService implements DiskResourceService { + private static final Logger LOGGER = LoggerFactory.getLogger(DefaultDiskResourceService.class); + static final String PERSISTENCE_SPACE_OWNER = "file"; + + private final ConcurrentMap knownPersistenceSpaces = new ConcurrentHashMap(); + private volatile LocalPersistenceService persistenceService; + private volatile boolean isStarted; + + private boolean isStarted() { + return isStarted; + } + + /** + * {@inheritDoc} + */ + @Override + public void start(final ServiceProvider serviceProvider) { + innerStart(serviceProvider); + } + + /** + * {@inheritDoc} + */ + @Override + public void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope) { + innerStart(serviceProvider); + } + + private void innerStart(ServiceProvider serviceProvider) { + persistenceService = serviceProvider.getService(LocalPersistenceService.class); + isStarted = true; + } + + /** + * {@inheritDoc} + */ + @Override + public void stop() { + isStarted = false; + persistenceService = null; + } + + /** + * {@inheritDoc} + */ + @Override + public boolean handlesResourceType(ResourceType resourceType) { + return persistenceService != null && ResourceType.Core.DISK.equals(resourceType); + } + + /** + * {@inheritDoc} + */ + @Override + public PersistenceSpaceIdentifier getPersistenceSpaceIdentifier(String name, CacheConfiguration config) throws CachePersistenceException { + if (persistenceService == null) { + return null; + } + boolean persistent = config.getResourcePools().getPoolForResource(ResourceType.Core.DISK).isPersistent(); + while (true) { + PersistenceSpace persistenceSpace = knownPersistenceSpaces.get(name); + if (persistenceSpace != null) { + return persistenceSpace.identifier; + } + PersistenceSpace newSpace = createSpace(name, persistent); + if (newSpace != null) { + return newSpace.identifier; + } + } + } + + @Override + public void releasePersistenceSpaceIdentifier(PersistenceSpaceIdentifier identifier) throws CachePersistenceException { + String name = null; + for (Map.Entry entry : knownPersistenceSpaces.entrySet()) { + if (entry.getValue().identifier.equals(identifier)) { + name = entry.getKey(); + } + } + if (name == null) { + throw newCachePersistenceException(identifier); + } + PersistenceSpace persistenceSpace = knownPersistenceSpaces.remove(name); + if (persistenceSpace != null) { + for (FileBasedStateRepository stateRepository : persistenceSpace.stateRepositories.values()) { + try { + stateRepository.close(); + } catch (IOException e) { + LOGGER.warn("StateRepository close failed - destroying persistence space {} to prevent corruption", identifier, e); + persistenceService.destroySafeSpace(((DefaultPersistenceSpaceIdentifier)identifier).persistentSpaceId, true); + } + } + } + } + + private PersistenceSpace createSpace(String name, boolean persistent) throws CachePersistenceException { + DefaultPersistenceSpaceIdentifier persistenceSpaceIdentifier = + new DefaultPersistenceSpaceIdentifier(persistenceService.createSafeSpaceIdentifier(PERSISTENCE_SPACE_OWNER, name)); + PersistenceSpace persistenceSpace = new PersistenceSpace(persistenceSpaceIdentifier); + if (knownPersistenceSpaces.putIfAbsent(name, persistenceSpace) == null) { + boolean created = false; + try { + if (!persistent) { + persistenceService.destroySafeSpace(persistenceSpaceIdentifier.persistentSpaceId, true); + } + persistenceService.createSafeSpace(persistenceSpaceIdentifier.persistentSpaceId); + created = true; + } finally { + if (!created) { + // this happens only if an exception is thrown..clean up for any throwable.. + knownPersistenceSpaces.remove(name, persistenceSpace); + } + } + return persistenceSpace; + } + return null; + } + + private void checkStarted() { + if(!isStarted()) { + throw new IllegalStateException(getClass().getName() + " should be started to call destroy"); + } + } + + /** + * {@inheritDoc} + */ + @Override + public void destroy(String name) throws CachePersistenceException { + checkStarted(); + + if(persistenceService == null) { + return; + } + + PersistenceSpace space = knownPersistenceSpaces.remove(name); + SafeSpaceIdentifier identifier = (space == null) ? + persistenceService.createSafeSpaceIdentifier(PERSISTENCE_SPACE_OWNER, name) : space.identifier.persistentSpaceId; + persistenceService.destroySafeSpace(identifier, true); + } + + /** + * {@inheritDoc} + */ + @Override + public void destroyAll() { + checkStarted(); + + if(persistenceService == null) { + return; + } + + persistenceService.destroyAll(PERSISTENCE_SPACE_OWNER); + } + + /** + * {@inheritDoc} + */ + @Override + public StateRepository getStateRepositoryWithin(PersistenceSpaceIdentifier identifier, String name) + throws CachePersistenceException { + + PersistenceSpace persistenceSpace = getPersistenceSpace(identifier); + if(persistenceSpace == null) { + throw newCachePersistenceException(identifier); + } + + FileBasedStateRepository stateRepository = new FileBasedStateRepository( + FileUtils.createSubDirectory(persistenceSpace.identifier.persistentSpaceId.getRoot(), name)); + FileBasedStateRepository previous = persistenceSpace.stateRepositories.putIfAbsent(name, stateRepository); + if (previous != null) { + return previous; + } + return stateRepository; + } + + private CachePersistenceException newCachePersistenceException(PersistenceSpaceIdentifier identifier) throws CachePersistenceException { + return new CachePersistenceException("Unknown space: " + identifier); + } + + private PersistenceSpace getPersistenceSpace(PersistenceSpaceIdentifier identifier) { + for (PersistenceSpace persistenceSpace : knownPersistenceSpaces.values()) { + if (persistenceSpace.identifier.equals(identifier)) { + return persistenceSpace; + } + } + return null; + } + + /** + * {@inheritDoc} + */ + @Override + public FileBasedPersistenceContext createPersistenceContextWithin(PersistenceSpaceIdentifier identifier, String name) + throws CachePersistenceException { + if(getPersistenceSpace(identifier) == null) { + throw newCachePersistenceException(identifier); + } + return new DefaultFileBasedPersistenceContext( + FileUtils.createSubDirectory(((DefaultPersistenceSpaceIdentifier)identifier).persistentSpaceId.getRoot(), name)); + } + + private static class PersistenceSpace { + final DefaultPersistenceSpaceIdentifier identifier; + final ConcurrentMap stateRepositories = new ConcurrentHashMap(); + + private PersistenceSpace(DefaultPersistenceSpaceIdentifier identifier) { + this.identifier = identifier; + } + } + + private static class DefaultPersistenceSpaceIdentifier implements PersistenceSpaceIdentifier { + final SafeSpaceIdentifier persistentSpaceId; + + private DefaultPersistenceSpaceIdentifier(SafeSpaceIdentifier persistentSpaceId) { + this.persistentSpaceId = persistentSpaceId; + } + + @Override + public Class getServiceType() { + return DiskResourceService.class; + } + + @Override + public String toString() { + return persistentSpaceId.toString(); + } + + // no need to override equals and hashcode as references are private and created in a protected fashion + // within this class. So two space identifiers are equal iff their references are equal. + } + + private static class DefaultFileBasedPersistenceContext implements FileBasedPersistenceContext { + private final File directory; + + private DefaultFileBasedPersistenceContext(File directory) { + this.directory = directory; + } + + @Override + public File getDirectory() { + return directory; + } + } +} diff --git a/impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java b/impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java index 4f02e1f132..4a406f2068 100644 --- a/impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java +++ b/impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java @@ -16,15 +16,9 @@ package org.ehcache.impl.persistence; -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import org.ehcache.CachePersistenceException; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.ResourceType; -import org.ehcache.core.spi.service.FileBasedPersistenceContext; import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; -import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; -import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.service.MaintainableService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceProvider; @@ -36,63 +30,28 @@ import java.io.IOException; import java.io.RandomAccessFile; import java.nio.channels.FileLock; -import java.nio.charset.Charset; -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.util.ArrayDeque; -import java.util.Deque; -import java.util.HashSet; -import java.util.Locale; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentMap; -import static java.lang.Integer.toHexString; -import static java.nio.charset.Charset.forName; +import static org.ehcache.impl.persistence.FileUtils.createLocationIfRequiredAndVerify; +import static org.ehcache.impl.persistence.FileUtils.recursiveDeleteDirectoryContent; +import static org.ehcache.impl.persistence.FileUtils.safeIdentifier; +import static org.ehcache.impl.persistence.FileUtils.tryRecursiveDelete; +import static org.ehcache.impl.persistence.FileUtils.validateName; /** - * Default implementation of the {@link LocalPersistenceService} which can be used explicitly when - * {@link org.ehcache.PersistentUserManagedCache persistent user managed caches} are desired. + * Implements the local persistence service that provides individual sub-spaces for different + * services. */ public class DefaultLocalPersistenceService implements LocalPersistenceService { - private static final Charset UTF8 = forName("UTF8"); - private static final int DEL = 0x7F; - private static final char ESCAPE = '%'; - private static final Set ILLEGALS = new HashSet(); - static { - ILLEGALS.add('/'); - ILLEGALS.add('\\'); - ILLEGALS.add('<'); - ILLEGALS.add('>'); - ILLEGALS.add(':'); - ILLEGALS.add('"'); - ILLEGALS.add('|'); - ILLEGALS.add('?'); - ILLEGALS.add('*'); - ILLEGALS.add('.'); - } + private static final Logger LOGGER = LoggerFactory.getLogger(DefaultLocalPersistenceService.class); - private final ConcurrentMap knownPersistenceSpaces = new ConcurrentHashMap(); private final File rootDirectory; private final File lockFile; - private FileLock lock; + private FileLock lock; private RandomAccessFile rw; - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultLocalPersistenceService.class); - private boolean started; - /** - * Tells if the service is currently started - * - * @return if the service is started - */ - public boolean isStarted() { - return started; - } - /** * Creates a new service instance using the provided configuration. * @@ -115,18 +74,12 @@ public synchronized void start(final ServiceProvider serviceProvider) { internalStart(); } - /** - * {@inheritDoc} - */ @Override - public synchronized void startForMaintenance(ServiceProvider serviceProvider) { + public synchronized void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope) { internalStart(); } - /** - * Default scope for testing - */ - void internalStart() { + private void internalStart() { if (!started) { createLocationIfRequiredAndVerify(rootDirectory); try { @@ -173,378 +126,116 @@ public synchronized void stop() { } } - private static void createLocationIfRequiredAndVerify(final File rootDirectory) { - if(!rootDirectory.exists()) { - if(!rootDirectory.mkdirs()) { - throw new IllegalArgumentException("Directory couldn't be created: " + rootDirectory.getAbsolutePath()); - } - } else if(!rootDirectory.isDirectory()) { - throw new IllegalArgumentException("Location is not a directory: " + rootDirectory.getAbsolutePath()); - } - - if(!rootDirectory.canWrite()) { - throw new IllegalArgumentException("Location isn't writable: " + rootDirectory.getAbsolutePath()); - } - } - - /** - * {@inheritDoc} - */ - @Override - public boolean handlesResourceType(ResourceType resourceType) { - return ResourceType.Core.DISK.equals(resourceType); + File getLockFile() { + return lockFile; } /** * {@inheritDoc} */ @Override - public PersistenceSpaceIdentifier getPersistenceSpaceIdentifier(String name, CacheConfiguration config) throws CachePersistenceException { - boolean persistent = config.getResourcePools().getPoolForResource(ResourceType.Core.DISK).isPersistent(); - while (true) { - PersistenceSpace persistenceSpace = knownPersistenceSpaces.get(name); - if (persistenceSpace != null) { - return persistenceSpace.identifier; - } - PersistenceSpace newSpace = createSpace(name, persistent); - if (newSpace != null) { - return newSpace.identifier; - } - } - } - - @Override - public void releasePersistenceSpaceIdentifier(PersistenceSpaceIdentifier identifier) throws CachePersistenceException { - String name = null; - for (Map.Entry entry : knownPersistenceSpaces.entrySet()) { - if (entry.getValue().identifier.equals(identifier)) { - name = entry.getKey(); - } - } - if (name == null) { - throw new CachePersistenceException("Unknown space " + identifier); - } - PersistenceSpace persistenceSpace = knownPersistenceSpaces.remove(name); - if (persistenceSpace != null) { - for (FileBasedStateRepository stateRepository : persistenceSpace.stateRepositories.values()) { - try { - stateRepository.close(); - } catch (IOException e) { - LOGGER.warn("StateRepository close failed - destroying persistence space {} to prevent corruption", identifier, e); - destroy(name, (DefaultPersistenceSpaceIdentifier) identifier, true); - } - } - } - } + public SafeSpaceIdentifier createSafeSpaceIdentifier(String owner, String identifier) { + validateName(owner); + SafeSpace ss = createSafeSpaceLogical(owner, identifier); - private PersistenceSpace createSpace(String name, boolean persistent) throws CachePersistenceException { - DefaultPersistenceSpaceIdentifier persistenceSpaceIdentifier = new DefaultPersistenceSpaceIdentifier(getDirectoryFor(name)); - PersistenceSpace persistenceSpace = new PersistenceSpace(persistenceSpaceIdentifier); - if (knownPersistenceSpaces.putIfAbsent(name, persistenceSpace) == null) { - try { - if (!persistent) { - destroy(name, persistenceSpaceIdentifier, true); - } - create(persistenceSpaceIdentifier.getDirectory()); - } catch (IOException e) { - knownPersistenceSpaces.remove(name, persistenceSpace); - throw new CachePersistenceException("Unable to create persistence space for " + name, e); + for (File parent = ss.directory.getParentFile(); parent != null; parent = parent.getParentFile()) { + if (rootDirectory.equals(parent)) { + return new DefaultSafeSpaceIdentifier(ss); } - return persistenceSpace; - } else { - return null; - } - } - /** - * {@inheritDoc} - */ - @Override - public synchronized void destroy(String name) throws CachePersistenceException { - boolean wasStarted = false; - if (!started) { - internalStart(); - wasStarted = true; } - try { - PersistenceSpace space = knownPersistenceSpaces.remove(name); - if (space == null) { - destroy(name, new DefaultPersistenceSpaceIdentifier(getDirectoryFor(name)), true); - } else { - destroy(name, space.identifier, true); - } - } finally { - if (wasStarted) { - stop(); - } - } + throw new IllegalArgumentException("Attempted to access file outside the persistence path"); } /** * {@inheritDoc} */ @Override - public synchronized void destroyAll() { - if (!started) { - throw new IllegalStateException("Service must be started"); - } - if(recursiveDeleteDirectoryContent(rootDirectory)){ - LOGGER.debug("Destroyed all file based persistence contexts"); - } else { - LOGGER.warn("Could not delete all file based persistence contexts"); + public void createSafeSpace(SafeSpaceIdentifier safeSpaceId) throws CachePersistenceException { + if (safeSpaceId == null || !(safeSpaceId instanceof DefaultSafeSpaceIdentifier)) { + // this cannot happen..if identifier created before creating physical space.. + throw new AssertionError("Invalid safe space identifier. Identifier not created"); } + SafeSpace ss = ((DefaultSafeSpaceIdentifier) safeSpaceId).safeSpace; + FileUtils.create(ss.directory.getParentFile()); + FileUtils.create(ss.directory); } /** * {@inheritDoc} */ @Override - public StateRepository getStateRepositoryWithin(PersistenceSpaceIdentifier identifier, String name) throws CachePersistenceException { - PersistenceSpace persistenceSpace = getPersistenceSpace(identifier); - if (persistenceSpace != null) { - validateName(name); - File directory = new File(((DefaultPersistenceSpaceIdentifier) identifier).getDirectory(), name); - if (!directory.mkdirs()) { - if (!directory.exists()) { - throw new CachePersistenceException("Unable to create directory " + directory); - } - } - FileBasedStateRepository stateRepository = new FileBasedStateRepository(directory); - FileBasedStateRepository previous = persistenceSpace.stateRepositories.putIfAbsent(name, stateRepository); - if (previous != null) { - return previous; - } else { - return stateRepository; - } - } - throw new CachePersistenceException("Unknown space " + identifier); - } - - private PersistenceSpace getPersistenceSpace(PersistenceSpaceIdentifier identifier) { - for (PersistenceSpace persistenceSpace : knownPersistenceSpaces.values()) { - if (persistenceSpace.identifier.equals(identifier)) { - return persistenceSpace; - } + public void destroySafeSpace(SafeSpaceIdentifier safeSpaceId, boolean verbose) { + if (safeSpaceId == null || !(safeSpaceId instanceof DefaultSafeSpaceIdentifier)) { + // this cannot happen..if identifier created before creating/destroying physical space.. + throw new AssertionError("Invalid safe space identifier. Identifier not created"); } - return null; + SafeSpace ss = ((DefaultSafeSpaceIdentifier) safeSpaceId).safeSpace; + destroy(ss, verbose); } /** * {@inheritDoc} */ - @Override - public FileBasedPersistenceContext createPersistenceContextWithin(PersistenceSpaceIdentifier identifier, String name) throws CachePersistenceException { - if (containsSpace(identifier)) { - validateName(name); - File directory = new File(((DefaultPersistenceSpaceIdentifier) identifier).getDirectory(), name); - try { - create(directory); - } catch (IOException ex) { - throw new CachePersistenceException("Unable to create persistence context for " + name + " in " + identifier); - } - return new DefaultFileBasedPersistenceContext(directory); - } else { - throw new CachePersistenceException("Unknown space: " + identifier); - } - } - - private void validateName(String name) { - if (!name.matches("[a-zA-Z0-9\\-_]+")) { - throw new IllegalArgumentException("Name is invalid for persistence context: " + name); - } - } - - private boolean containsSpace(PersistenceSpaceIdentifier identifier) { - for (PersistenceSpace persistenceSpace : knownPersistenceSpaces.values()) { - if (persistenceSpace.identifier.equals(identifier)) { - return true; - } - } - return false; - } - - File getLockFile() { - return lockFile; - } - - private File getDirectoryFor(String identifier) { - File directory = new File(rootDirectory, safeIdentifier(identifier)); - - for (File parent = directory.getParentFile(); parent != null; parent = parent.getParentFile()) { - if (rootDirectory.equals(parent)) { - return directory; + public void destroyAll(String owner) { + File ownerDirectory = new File(rootDirectory, owner); + boolean cleared = true; + if (ownerDirectory.exists() && ownerDirectory.isDirectory()) { + cleared = false; + if (recursiveDeleteDirectoryContent(ownerDirectory)) { + LOGGER.debug("Destroyed all file based persistence contexts owned by {}", owner); + cleared = ownerDirectory.delete(); } } - - throw new IllegalArgumentException("Attempted to access file outside the persistence path"); - } - - private static void create(File directory) throws IOException, CachePersistenceException { - if (directory.isDirectory()) { - LOGGER.debug("Reusing {}", directory.getAbsolutePath()); - } else if (directory.mkdir()) { - LOGGER.debug("Created {}", directory.getAbsolutePath()); - } else { - throw new CachePersistenceException("Unable to create or reuse directory: " + directory.getAbsolutePath()); + if (!cleared) { + LOGGER.warn("Could not delete all file based persistence contexts owned by {}", owner); } } - private static void destroy(String identifier, DefaultPersistenceSpaceIdentifier fileBasedPersistenceContext, boolean verbose) { + private void destroy(SafeSpace ss, boolean verbose) { if (verbose) { - LOGGER.debug("Destroying file based persistence context for {}", identifier); + LOGGER.debug("Destroying file based persistence context for {}", ss.identifier); } - if (fileBasedPersistenceContext.getDirectory().exists() && !tryRecursiveDelete(fileBasedPersistenceContext.getDirectory())) { + if (ss.directory.exists() && !tryRecursiveDelete(ss.directory)) { if (verbose) { - LOGGER.warn("Could not delete directory for context {}", identifier); - } - } - } - - private static boolean recursiveDeleteDirectoryContent(File file) { - File[] contents = file.listFiles(); - if (contents == null) { - throw new IllegalArgumentException("File " + file.getAbsolutePath() + " is not a directory"); - } else { - boolean deleteSuccessful = true; - for (File f : contents) { - deleteSuccessful &= tryRecursiveDelete(f); - } - return deleteSuccessful; - } - } - - private static boolean recursiveDelete(File file) { - Deque toDelete = new ArrayDeque(); - toDelete.push(file); - while (!toDelete.isEmpty()) { - File target = toDelete.pop(); - File[] contents = target.listFiles(); - if (contents == null || contents.length == 0) { - if (target.exists() && !target.delete()) { - return false; - } - } else { - toDelete.push(target); - for (File f : contents) { - toDelete.push(f); - } - } - } - return true; - } - - @SuppressFBWarnings("DM_GC") - private static boolean tryRecursiveDelete(File file) { - boolean interrupted = false; - try { - for (int i = 0; i < 5; i++) { - if (recursiveDelete(file) || !isWindows()) { - return true; - } else { - System.gc(); - System.runFinalization(); - - try { - Thread.sleep(50); - } catch (InterruptedException e) { - interrupted = true; - } - } - } - } finally { - if (interrupted) { - Thread.currentThread().interrupt(); - } - } - return false; - } - - private static boolean isWindows() { - return System.getProperty("os.name").toLowerCase(Locale.ENGLISH).contains("windows"); - } - - /** - * sanitize a name for valid file or directory name - * - * @param name the name to sanitize - * @return sanitized version of name - */ - private static String safeIdentifier(String name) { - return safeIdentifier(name, true); - } - - static String safeIdentifier(String name, boolean withSha1) { - int len = name.length(); - StringBuilder sb = new StringBuilder(len); - for (int i = 0; i < len; i++) { - char c = name.charAt(i); - if (c <= ' ' || c >= DEL || ILLEGALS.contains(c) || c == ESCAPE) { - sb.append(ESCAPE); - sb.append(String.format("%04x", (int) c)); - } else { - sb.append(c); + LOGGER.warn("Could not delete directory for context {}", ss.identifier); } } - if (withSha1) { - sb.append("_").append(sha1(name)); - } - return sb.toString(); } - private static String sha1(String input) { - StringBuilder sb = new StringBuilder(); - for (byte b : getSha1Digest().digest(input.getBytes(UTF8))) { - sb.append(toHexString((b & 0xf0) >>> 4)); - sb.append(toHexString((b & 0xf))); - } - return sb.toString(); - } - private static MessageDigest getSha1Digest() { - try { - return MessageDigest.getInstance("SHA-1"); - } catch (NoSuchAlgorithmException e) { - throw new AssertionError("All JDKs must have SHA-1"); - } + private SafeSpace createSafeSpaceLogical(String owner, String identifier) { + File ownerDirectory = new File(rootDirectory, owner); + File directory = new File(ownerDirectory, safeIdentifier(identifier)); + return new SafeSpace(identifier, directory); } - private static class PersistenceSpace { - final DefaultPersistenceSpaceIdentifier identifier; - final ConcurrentMap stateRepositories = new ConcurrentHashMap(); + private static final class SafeSpace { + private final String identifier; + private final File directory; - private PersistenceSpace(DefaultPersistenceSpaceIdentifier identifier) { + private SafeSpace(String identifier, File directory) { + this.directory = directory; this.identifier = identifier; } } - private static abstract class FileHolder { - final File directory; - FileHolder(File directory) { - this.directory = directory; - } - - public File getDirectory() { - return directory; - } - - } - private static class DefaultPersistenceSpaceIdentifier extends FileHolder implements PersistenceSpaceIdentifier { + private static final class DefaultSafeSpaceIdentifier implements SafeSpaceIdentifier { + private final SafeSpace safeSpace; - DefaultPersistenceSpaceIdentifier(File directory) { - super(directory); + private DefaultSafeSpaceIdentifier(SafeSpace safeSpace) { + this.safeSpace = safeSpace; } @Override - public Class getServiceType() { - return LocalPersistenceService.class; + public String toString() { + return safeSpace.identifier; } - } - private static class DefaultFileBasedPersistenceContext extends FileHolder implements FileBasedPersistenceContext { - - DefaultFileBasedPersistenceContext(File directory) { - super(directory); + @Override + public File getRoot() { + return safeSpace.directory; } } } diff --git a/impl/src/main/java/org/ehcache/impl/persistence/FileBasedStateRepository.java b/impl/src/main/java/org/ehcache/impl/persistence/FileBasedStateRepository.java index eca0aeee6d..e404724209 100644 --- a/impl/src/main/java/org/ehcache/impl/persistence/FileBasedStateRepository.java +++ b/impl/src/main/java/org/ehcache/impl/persistence/FileBasedStateRepository.java @@ -20,6 +20,8 @@ import org.ehcache.CachePersistenceException; import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; +import org.ehcache.impl.serialization.TransientStateHolder; +import org.ehcache.spi.persistence.StateHolder; import org.ehcache.spi.persistence.StateRepository; import java.io.Closeable; @@ -35,17 +37,17 @@ import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicInteger; -import static org.ehcache.impl.persistence.DefaultLocalPersistenceService.safeIdentifier; +import static org.ehcache.impl.persistence.FileUtils.safeIdentifier; /** * FileBasedStateRepository */ class FileBasedStateRepository implements StateRepository, Closeable { - private static final String MAP_FILE_PREFIX = "map-"; - private static final String MAP_FILE_SUFFIX = ".bin"; + private static final String HOLDER_FILE_PREFIX = "holder-"; + private static final String HOLDER_FILE_SUFFIX = ".bin"; private final File dataDirectory; - private final ConcurrentMap knownMaps; + private final ConcurrentMap knownHolders; private final AtomicInteger nextIndex = new AtomicInteger(); FileBasedStateRepository(File directory) throws CachePersistenceException { @@ -56,7 +58,7 @@ class FileBasedStateRepository implements StateRepository, Closeable { throw new IllegalArgumentException(directory + " is not a directory"); } this.dataDirectory = directory; - knownMaps = new ConcurrentHashMap(); + knownHolders = new ConcurrentHashMap(); loadMaps(); } @@ -66,7 +68,7 @@ private void loadMaps() throws CachePersistenceException { for (File file : dataDirectory.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { - return name.endsWith(MAP_FILE_SUFFIX); + return name.endsWith(HOLDER_FILE_SUFFIX); } })) { FileInputStream fis = new FileInputStream(file); @@ -78,7 +80,7 @@ public boolean accept(File dir, String name) { if (nextIndex.get() <= tuple.index) { nextIndex.set(tuple.index + 1); } - knownMaps.put(name, tuple); + knownHolders.put(name, tuple); } finally { oin.close(); } @@ -87,13 +89,13 @@ public boolean accept(File dir, String name) { } } } catch (Exception e) { - knownMaps.clear(); + knownHolders.clear(); throw new CachePersistenceException("Failed to load existing StateRepository data", e); } } private void saveMaps() throws IOException { - for (Map.Entry entry : knownMaps.entrySet()) { + for (Map.Entry entry : knownHolders.entrySet()) { File outFile = new File(dataDirectory, createFileName(entry)); FileOutputStream fos = new FileOutputStream(outFile); try { @@ -110,20 +112,23 @@ private void saveMaps() throws IOException { } } - private String createFileName(Map.Entry entry) {return MAP_FILE_PREFIX + entry.getValue().index + "-" + safeIdentifier(entry.getKey(), false) + MAP_FILE_SUFFIX;} + private String createFileName(Map.Entry entry) {return HOLDER_FILE_PREFIX + entry.getValue().index + "-" + safeIdentifier(entry.getKey(), false) + HOLDER_FILE_SUFFIX;} @Override - public ConcurrentMap getPersistentConcurrentMap(String name, Class keyClass, Class valueClass) { - Tuple result = knownMaps.get(name); + public StateHolder getPersistentStateHolder(String name, Class keyClass, Class valueClass) { + Tuple result = knownHolders.get(name); if (result == null) { - ConcurrentHashMap newMap = new ConcurrentHashMap(); - result = knownMaps.putIfAbsent(name, new Tuple(nextIndex.getAndIncrement(), newMap)); + StateHolder holder = new TransientStateHolder(); + result = knownHolders.putIfAbsent(name, new Tuple(nextIndex.getAndIncrement(), holder)); if (result == null) { - return newMap; + return holder; } } - return (ConcurrentMap) result.map; + + @SuppressWarnings("unchecked") + StateHolder holder = (StateHolder) result.holder; + return holder; } @Override @@ -133,11 +138,11 @@ public void close() throws IOException { static class Tuple implements Serializable { final int index; - final ConcurrentMap map; + final StateHolder holder; - Tuple(int index, ConcurrentMap map) { + Tuple(int index, StateHolder holder) { this.index = index; - this.map = map; + this.holder = holder; } } } diff --git a/impl/src/main/java/org/ehcache/impl/persistence/FileUtils.java b/impl/src/main/java/org/ehcache/impl/persistence/FileUtils.java new file mode 100644 index 0000000000..cfad313088 --- /dev/null +++ b/impl/src/main/java/org/ehcache/impl/persistence/FileUtils.java @@ -0,0 +1,209 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.persistence; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; + +import org.ehcache.CachePersistenceException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.nio.charset.Charset; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.ArrayDeque; +import java.util.Deque; +import java.util.HashSet; +import java.util.Locale; +import java.util.Set; + +import static java.lang.Integer.toHexString; +import static java.nio.charset.Charset.forName; + +/** + * A bunch of utility functions, mainly used by {@link DefaultLocalPersistenceService} and + * {@link FileBasedStateRepository} within this class. + */ +final class FileUtils { + private static final Logger LOGGER = LoggerFactory.getLogger(FileUtils.class); + private static final Charset UTF8 = forName("UTF8"); + private static final int DEL = 0x7F; + private static final char ESCAPE = '%'; + + private static final Set ILLEGALS = new HashSet(); + static { + ILLEGALS.add('/'); + ILLEGALS.add('\\'); + ILLEGALS.add('<'); + ILLEGALS.add('>'); + ILLEGALS.add(':'); + ILLEGALS.add('"'); + ILLEGALS.add('|'); + ILLEGALS.add('?'); + ILLEGALS.add('*'); + ILLEGALS.add('.'); + } + + static void createLocationIfRequiredAndVerify(final File rootDirectory) { + if(!rootDirectory.exists()) { + if(!rootDirectory.mkdirs()) { + throw new IllegalArgumentException("Directory couldn't be created: " + rootDirectory.getAbsolutePath()); + } + } else if(!rootDirectory.isDirectory()) { + throw new IllegalArgumentException("Location is not a directory: " + rootDirectory.getAbsolutePath()); + } + + if(!rootDirectory.canWrite()) { + throw new IllegalArgumentException("Location isn't writable: " + rootDirectory.getAbsolutePath()); + } + } + + static File createSubDirectory(File mainDirectory, String name) throws CachePersistenceException { + validateName(name); + File subDirectory = new File(mainDirectory, name); + create(subDirectory); + return subDirectory; + } + + static void validateName(String name) { + if (!name.matches("[a-zA-Z0-9\\-_]+")) { + throw new IllegalArgumentException("Name is invalid for persistence context: " + name); + } + } + + static void create(File directory) throws CachePersistenceException { + if (directory.isDirectory()) { + LOGGER.debug("Reusing {}", directory.getAbsolutePath()); + } else if (directory.mkdir()) { + LOGGER.debug("Created {}", directory.getAbsolutePath()); + } else if (directory.isDirectory()) { + // if create directory fails, check once more if it is due to concurrent creation. + LOGGER.debug("Reusing {}", directory.getAbsolutePath()); + } else { + throw new CachePersistenceException("Unable to create or reuse directory: " + directory.getAbsolutePath()); + } + } + + static boolean recursiveDeleteDirectoryContent(File file) { + File[] contents = file.listFiles(); + if (contents == null) { + throw new IllegalArgumentException("File " + file.getAbsolutePath() + " is not a directory"); + } else { + boolean deleteSuccessful = true; + for (File f : contents) { + deleteSuccessful &= tryRecursiveDelete(f); + } + return deleteSuccessful; + } + } + + private static boolean recursiveDelete(File file) { + Deque toDelete = new ArrayDeque(); + toDelete.push(file); + while (!toDelete.isEmpty()) { + File target = toDelete.pop(); + File[] contents = target.listFiles(); + if (contents == null || contents.length == 0) { + if (target.exists() && !target.delete()) { + return false; + } + } else { + toDelete.push(target); + for (File f : contents) { + toDelete.push(f); + } + } + } + return true; + } + + @SuppressFBWarnings("DM_GC") + static boolean tryRecursiveDelete(File file) { + boolean interrupted = false; + try { + for (int i = 0; i < 5; i++) { + if (recursiveDelete(file) || !isWindows()) { + return true; + } else { + System.gc(); + System.runFinalization(); + + try { + Thread.sleep(50); + } catch (InterruptedException e) { + interrupted = true; + } + } + } + } finally { + if (interrupted) { + Thread.currentThread().interrupt(); + } + } + return false; + } + + private static boolean isWindows() { + return System.getProperty("os.name").toLowerCase(Locale.ENGLISH).contains("windows"); + } + + /** + * sanitize a name for valid file or directory name + * + * @param name the name to sanitize + * @return sanitized version of name + */ + static String safeIdentifier(String name) { + return safeIdentifier(name, true); + } + + static String safeIdentifier(String name, boolean withSha1) { + int len = name.length(); + StringBuilder sb = new StringBuilder(len); + for (int i = 0; i < len; i++) { + char c = name.charAt(i); + if (c <= ' ' || c >= DEL || ILLEGALS.contains(c) || c == ESCAPE) { + sb.append(ESCAPE); + sb.append(String.format("%04x", (int) c)); + } else { + sb.append(c); + } + } + if (withSha1) { + sb.append("_").append(sha1(name)); + } + return sb.toString(); + } + + private static String sha1(String input) { + StringBuilder sb = new StringBuilder(); + for (byte b : getSha1Digest().digest(input.getBytes(UTF8))) { + sb.append(toHexString((b & 0xf0) >>> 4)); + sb.append(toHexString((b & 0xf))); + } + return sb.toString(); + } + + private static MessageDigest getSha1Digest() { + try { + return MessageDigest.getInstance("SHA-1"); + } catch (NoSuchAlgorithmException e) { + throw new AssertionError("All JDKs must have SHA-1"); + } + } +} \ No newline at end of file diff --git a/impl/src/main/java/org/ehcache/impl/serialization/ByteArraySerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/ByteArraySerializer.java index d350339ea6..8f9dbb1e6a 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/ByteArraySerializer.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/ByteArraySerializer.java @@ -16,7 +16,6 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.SerializerException; import org.ehcache.spi.serialization.Serializer; @@ -51,21 +50,6 @@ public ByteArraySerializer() { public ByteArraySerializer(ClassLoader classLoader) { } - /** - * Constructor to enable this serializer as a persistent one. - *

- * Parameters are ignored as {@code byte[]} is a base java type and this implementation requires no state. - *

- * - * @param classLoader the classloader to use - * @param stateRepository the state repository - * - * @see Serializer - */ - public ByteArraySerializer(ClassLoader classLoader, StateRepository stateRepository) { - - } - /** * {@inheritDoc} */ diff --git a/impl/src/main/java/org/ehcache/impl/serialization/CharSerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/CharSerializer.java index cd05711d41..c72ba3b82f 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/CharSerializer.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/CharSerializer.java @@ -16,7 +16,6 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.Serializer; import java.nio.ByteBuffer; @@ -46,21 +45,6 @@ public CharSerializer() { public CharSerializer(ClassLoader classLoader) { } - /** - * Constructor to enable this serializer as a persistent one. - *

- * Parameters are ignored as {@link Character} is a base java type and this implementation requires no state. - *

- * - * @param classLoader the classloader to use - * @param stateRepository the state repository - * - * @see Serializer - */ - public CharSerializer(ClassLoader classLoader, StateRepository stateRepository) { - - } - /** * {@inheritDoc} */ diff --git a/impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java index de287192ba..475a32aeba 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java @@ -18,7 +18,6 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; -import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; @@ -28,22 +27,19 @@ import java.lang.ref.WeakReference; import java.nio.ByteBuffer; import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; -import org.ehcache.core.spi.function.NullaryFunction; +import org.ehcache.spi.persistence.StateHolder; import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.SerializerException; import org.ehcache.impl.internal.util.ByteBufferInputStream; import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; /** * A trivially compressed Java serialization based serializer. @@ -53,9 +49,9 @@ * {@code Class} and the integer representation are stored in a single on-heap * map. */ -public class CompactJavaSerializer implements Serializer { +public class CompactJavaSerializer implements StatefulSerializer { - private final ConcurrentMap readLookup; + private volatile StateHolder readLookup; private final ConcurrentMap readLookupLocalCache = new ConcurrentHashMap(); private final ConcurrentMap writeLookup = new ConcurrentHashMap(); @@ -72,33 +68,18 @@ public class CompactJavaSerializer implements Serializer { * @see Serializer */ public CompactJavaSerializer(ClassLoader loader) { - this(loader, new TransientStateRepository()); - } - - public CompactJavaSerializer(ClassLoader loader, StateRepository stateRepository) { this.loader = loader; - this.readLookup = stateRepository.getPersistentConcurrentMap("CompactJavaSerializer-ObjectStreamClassIndex", Integer.class, ObjectStreamClass.class); - loadMappingsInWriteContext(readLookup.entrySet(), true); } - CompactJavaSerializer(ClassLoader loader, Map mappings) { - this(loader); - for (Entry e : mappings.entrySet()) { - Integer encoding = e.getKey(); - ObjectStreamClass disconnectedOsc = disconnect(e.getValue()); - readLookup.put(encoding, disconnectedOsc); - readLookupLocalCache.put(encoding, disconnectedOsc); - if (writeLookup.putIfAbsent(new SerializableDataKey(disconnectedOsc, true), encoding) != null) { - throw new AssertionError("Corrupted data " + mappings); - } - if (nextStreamIndex < encoding + 1) { - nextStreamIndex = encoding + 1; - } - } + @SuppressWarnings("unchecked") + public static Class> asTypedSerializer() { + return (Class) CompactJavaSerializer.class; } - Map getSerializationMappings() { - return Collections.unmodifiableMap(new HashMap(readLookup)); + @Override + public void init(final StateRepository stateRepository) { + this.readLookup = stateRepository.getPersistentStateHolder("CompactJavaSerializer-ObjectStreamClassIndex", Integer.class, ObjectStreamClass.class); + loadMappingsInWriteContext(readLookup.entrySet(), true); } /** @@ -128,7 +109,9 @@ public T read(ByteBuffer binary) throws ClassNotFoundException, SerializerExcept try { ObjectInputStream oin = getObjectInputStream(new ByteBufferInputStream(binary)); try { - return (T) oin.readObject(); + @SuppressWarnings("unchecked") + T value = (T) oin.readObject(); + return value; } finally { oin.close(); } diff --git a/impl/src/main/java/org/ehcache/impl/serialization/CompactPersistentJavaSerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/CompactPersistentJavaSerializer.java deleted file mode 100644 index e0f383c513..0000000000 --- a/impl/src/main/java/org/ehcache/impl/serialization/CompactPersistentJavaSerializer.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.serialization; - -import java.io.Closeable; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; -import java.io.ObjectStreamClass; -import java.io.OutputStream; -import java.nio.ByteBuffer; -import java.util.Map; -import org.ehcache.spi.serialization.SerializerException; -import org.ehcache.spi.serialization.Serializer; -import org.ehcache.core.spi.service.FileBasedPersistenceContext; - -/** - * A trivially compressed Java serialization based serializer with persistent mappings. - *

- * Class descriptors in the resultant bytes are encoded as integers. Mappings - * between the integer representation and the {@link ObjectStreamClass}, and the - * {@code Class} and the integer representation are stored in a single on-heap - * map. - */ -public class CompactPersistentJavaSerializer implements Serializer, Closeable { - - private final File stateFile; - private final CompactJavaSerializer serializer; - - /** - * Constructor to enable this serializer as a persistent one. - * - * @param classLoader the classloader to use - * @param persistence the persistence context to use - * - * @see Serializer - */ - public CompactPersistentJavaSerializer(ClassLoader classLoader, FileBasedPersistenceContext persistence) throws IOException, ClassNotFoundException { - this.stateFile = new File(persistence.getDirectory(), "CompactPersistentJavaSerializer.state"); - if (stateFile.exists()) { - serializer = new CompactJavaSerializer(classLoader, readSerializationMappings(stateFile)); - } else { - serializer = new CompactJavaSerializer(classLoader); - } - } - - /** - * Closes this serializer instance, causing mappings to be persisted. - * - * @throws IOException in case mappings cannot be persisted. - */ - @Override - public final void close() throws IOException { - writeSerializationMappings(stateFile, serializer.getSerializationMappings()); - } - - private static Map readSerializationMappings(File stateFile) throws IOException, ClassNotFoundException { - FileInputStream fin = new FileInputStream(stateFile); - try { - ObjectInputStream oin = new ObjectInputStream(fin); - try { - return (Map) oin.readObject(); - } finally { - oin.close(); - } - } finally { - fin.close(); - } - } - - private static void writeSerializationMappings(File stateFile, Map mappings) throws IOException { - OutputStream fout = new FileOutputStream(stateFile); - try { - ObjectOutputStream oout = new ObjectOutputStream(fout); - try { - oout.writeObject(mappings); - } finally { - oout.close(); - } - } finally { - fout.close(); - } - } - - /** - * {@inheritDoc} - */ - @Override - public ByteBuffer serialize(T object) throws SerializerException { - return serializer.serialize(object); - } - - /** - * {@inheritDoc} - */ - @Override - public T read(ByteBuffer binary) throws ClassNotFoundException, SerializerException { - return serializer.read(binary); - } - - /** - * {@inheritDoc} - */ - @Override - public boolean equals(T object, ByteBuffer binary) throws ClassNotFoundException, SerializerException { - return serializer.equals(object, binary); - } - -} diff --git a/impl/src/main/java/org/ehcache/impl/serialization/DoubleSerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/DoubleSerializer.java index e3e578d8be..9ded987569 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/DoubleSerializer.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/DoubleSerializer.java @@ -16,7 +16,6 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.Serializer; import java.nio.ByteBuffer; @@ -46,21 +45,6 @@ public DoubleSerializer() { public DoubleSerializer(ClassLoader classLoader) { } - /** - * Constructor to enable this serializer as a persistent one. - *

- * Parameters are ignored as {@link Double} is a base java type and this implementation requires no state. - *

- * - * @param classLoader the classloader to use - * @param stateRepository the state repository - * - * @see Serializer - */ - public DoubleSerializer(ClassLoader classLoader, StateRepository stateRepository) { - - } - /** * {@inheritDoc} */ diff --git a/impl/src/main/java/org/ehcache/impl/serialization/FloatSerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/FloatSerializer.java index 73760d6cab..a15cf7382c 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/FloatSerializer.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/FloatSerializer.java @@ -16,7 +16,6 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.Serializer; import java.nio.ByteBuffer; @@ -46,21 +45,6 @@ public FloatSerializer() { public FloatSerializer(ClassLoader classLoader) { } - /** - * Constructor to enable this serializer as a persistent one. - *

- * Parameters are ignored as {@link Float} is a base java type and this implementation requires no state. - *

- * - * @param classLoader the classloader to use - * @param stateRepository the state repository - * - * @see Serializer - */ - public FloatSerializer(ClassLoader classLoader, StateRepository stateRepository) { - - } - /** * {@inheritDoc} */ diff --git a/impl/src/main/java/org/ehcache/impl/serialization/IntegerSerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/IntegerSerializer.java index 659c0ac8c3..f4efe01892 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/IntegerSerializer.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/IntegerSerializer.java @@ -16,7 +16,6 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.Serializer; import java.nio.ByteBuffer; @@ -46,21 +45,6 @@ public IntegerSerializer() { public IntegerSerializer(ClassLoader classLoader) { } - /** - * Constructor to enable this serializer as a persistent one. - *

- * Parameters are ignored as {@link Integer} is a base java type and this implementation requires no state. - *

- * - * @param classLoader the classloader to use - * @param stateRepository the state repository - * - * @see Serializer - */ - public IntegerSerializer(ClassLoader classLoader, StateRepository stateRepository) { - - } - /** * {@inheritDoc} */ diff --git a/impl/src/main/java/org/ehcache/impl/serialization/LongSerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/LongSerializer.java index 2c983daee9..ce7fd97477 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/LongSerializer.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/LongSerializer.java @@ -16,7 +16,6 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.Serializer; import java.nio.ByteBuffer; @@ -46,21 +45,6 @@ public LongSerializer() { public LongSerializer(ClassLoader classLoader) { } - /** - * Constructor to enable this serializer as a persistent one. - *

- * Parameters are ignored as {@link Long} is a base java type and this implementation requires no state. - *

- * - * @param classLoader the classloader to use - * @param stateRepository the state repository - * - * @see Serializer - */ - public LongSerializer(ClassLoader classLoader, StateRepository stateRepository) { - - } - /** * {@inheritDoc} */ diff --git a/impl/src/main/java/org/ehcache/impl/serialization/PlainJavaSerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/PlainJavaSerializer.java index a1c02e1ab8..b72abb4df2 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/PlainJavaSerializer.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/PlainJavaSerializer.java @@ -17,7 +17,6 @@ package org.ehcache.impl.serialization; import org.ehcache.impl.internal.util.ByteBufferInputStream; -import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.serialization.SerializerException; @@ -43,10 +42,6 @@ public PlainJavaSerializer(ClassLoader classLoader) { this.classLoader = classLoader; } - public PlainJavaSerializer(ClassLoader classLoader, StateRepository stateRepository) throws IOException, ClassNotFoundException { - this(classLoader); - } - @Override public ByteBuffer serialize(T object) { ByteArrayOutputStream bout = new ByteArrayOutputStream(); diff --git a/impl/src/main/java/org/ehcache/impl/serialization/StringSerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/StringSerializer.java index add9eb458d..cc4f84e5a3 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/StringSerializer.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/StringSerializer.java @@ -20,7 +20,6 @@ import java.io.IOException; import java.nio.ByteBuffer; -import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.serialization.SerializerException; @@ -49,20 +48,6 @@ public StringSerializer() { public StringSerializer(ClassLoader classLoader) { } - /** - * Constructor to enable this serializer as a persistent one. - *

- * Parameters are ignored as {@link String} is a base java type and this implementation requires no state. - *

- * - * @param classLoader the classloader to use - * @param stateRepository the state repository - * - * @see Serializer - */ - public StringSerializer(ClassLoader classLoader, StateRepository stateRepository) { - } - /** * {@inheritDoc} */ diff --git a/impl/src/main/java/org/ehcache/impl/serialization/TransientStateHolder.java b/impl/src/main/java/org/ehcache/impl/serialization/TransientStateHolder.java new file mode 100644 index 0000000000..51f063ba5a --- /dev/null +++ b/impl/src/main/java/org/ehcache/impl/serialization/TransientStateHolder.java @@ -0,0 +1,61 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.serialization; + +import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; +import org.ehcache.spi.persistence.StateHolder; + +import java.io.Serializable; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentMap; + +public class TransientStateHolder implements StateHolder, Serializable { + + private final ConcurrentMap map = new ConcurrentHashMap(); + + @Override + public V putIfAbsent(final K key, final V value) { + return map.putIfAbsent(key, value); + } + + @Override + public V get(final K key) { + return map.get(key); + } + + @Override + public Set> entrySet() { + return map.entrySet(); + } + + @Override + public boolean equals(final Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + final TransientStateHolder that = (TransientStateHolder)o; + + return map.equals(that.map); + + } + + @Override + public int hashCode() { + return map.hashCode(); + } +} diff --git a/impl/src/main/java/org/ehcache/impl/serialization/TransientStateRepository.java b/impl/src/main/java/org/ehcache/impl/serialization/TransientStateRepository.java index 004f1cee73..07c07c448b 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/TransientStateRepository.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/TransientStateRepository.java @@ -17,6 +17,7 @@ package org.ehcache.impl.serialization; import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; +import org.ehcache.spi.persistence.StateHolder; import org.ehcache.spi.persistence.StateRepository; import java.io.Serializable; @@ -25,23 +26,23 @@ /** * TransientStateRepository */ -class TransientStateRepository implements StateRepository { +public class TransientStateRepository implements StateRepository { - private ConcurrentMap> knownMaps = new ConcurrentHashMap>(); + private ConcurrentMap> knownHolders = new ConcurrentHashMap>(); @Override @SuppressWarnings("unchecked") - public ConcurrentMap getPersistentConcurrentMap(String name, Class keyClass, Class valueClass) { - ConcurrentMap concurrentMap = (ConcurrentMap) knownMaps.get(name); - if (concurrentMap != null) { - return concurrentMap; + public StateHolder getPersistentStateHolder(String name, Class keyClass, Class valueClass) { + StateHolder stateHolder = (StateHolder) knownHolders.get(name); + if (stateHolder != null) { + return stateHolder; } else { - ConcurrentHashMap newMap = new ConcurrentHashMap(); - concurrentMap = (ConcurrentMap) knownMaps.putIfAbsent(name, newMap); - if (concurrentMap == null) { - return newMap; + StateHolder newHolder = new TransientStateHolder(); + stateHolder = (StateHolder) knownHolders.putIfAbsent(name, newHolder); + if (stateHolder == null) { + return newHolder; } else { - return concurrentMap; + return stateHolder; } } } diff --git a/impl/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory b/impl/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory index 9020ac13a5..7a0738d79a 100644 --- a/impl/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory +++ b/impl/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory @@ -10,6 +10,7 @@ org.ehcache.impl.internal.spi.loaderwriter.DefaultCacheLoaderWriterProviderFacto org.ehcache.impl.internal.spi.event.DefaultCacheEventListenerProviderFactory org.ehcache.impl.internal.executor.DefaultExecutionServiceFactory org.ehcache.impl.internal.persistence.DefaultLocalPersistenceServiceFactory +org.ehcache.impl.internal.persistence.DefaultDiskResourceServiceFactory org.ehcache.impl.internal.loaderwriter.writebehind.WriteBehindProviderFactory org.ehcache.impl.internal.events.CacheEventNotificationListenerServiceProviderFactory org.ehcache.impl.internal.spi.copy.DefaultCopyProviderFactory diff --git a/impl/src/test/java/org/ehcache/config/builders/CacheConfigurationBuilderTest.java b/impl/src/test/java/org/ehcache/config/builders/CacheConfigurationBuilderTest.java index de6f839fdf..d379cee755 100644 --- a/impl/src/test/java/org/ehcache/config/builders/CacheConfigurationBuilderTest.java +++ b/impl/src/test/java/org/ehcache/config/builders/CacheConfigurationBuilderTest.java @@ -16,14 +16,10 @@ package org.ehcache.config.builders; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.EvictionAdvisor; -import org.ehcache.config.ResourceType; -import org.ehcache.config.ResourceUnit; +import org.ehcache.config.*; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; -import org.ehcache.spi.loaderwriter.BulkCacheWritingException; -import org.ehcache.spi.serialization.SerializerException; +import org.ehcache.core.internal.service.ServiceLocator; import org.ehcache.expiry.Duration; import org.ehcache.expiry.Expirations; import org.ehcache.expiry.Expiry; @@ -32,23 +28,25 @@ import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration; import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration; import org.ehcache.impl.internal.classes.ClassInstanceConfiguration; -import org.ehcache.core.internal.service.ServiceLocator; import org.ehcache.spi.copy.Copier; +import org.ehcache.spi.loaderwriter.BulkCacheWritingException; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.SerializerException; +import org.ehcache.spi.service.ServiceConfiguration; import org.hamcrest.Matcher; import org.hamcrest.Matchers; +import org.hamcrest.core.IsSame; import org.junit.Test; import java.nio.ByteBuffer; import java.util.Map; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.sameInstance; -import static org.hamcrest.Matchers.notNullValue; -import static org.junit.Assert.assertThat; +import static org.hamcrest.Matchers.*; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThat; +import static org.mockito.Mockito.mock; public class CacheConfigurationBuilderTest { @@ -65,7 +63,10 @@ public boolean adviseAgainstEviction(Object key, Object value) { .withEvictionAdvisor(evictionAdvisor) .build(); - assertThat(evictionAdvisor, (Matcher)sameInstance(cacheConfiguration.getEvictionAdvisor())); + @SuppressWarnings("unchecked") + Matcher> evictionAdvisorMatcher = (Matcher) sameInstance(cacheConfiguration + .getEvictionAdvisor()); + assertThat(evictionAdvisor, evictionAdvisorMatcher); } @Test @@ -279,4 +280,33 @@ public void testSizeOf() { assertEquals(sizeOfEngineConfiguration.getMaxObjectGraphSize(), 1000); } + + @Test + public void testCopyingOfExistingConfiguration() { + Class keyClass = Integer.class; + Class valueClass = String.class; + ClassLoader loader = mock(ClassLoader.class); + @SuppressWarnings("unchecked") + EvictionAdvisor eviction = mock(EvictionAdvisor.class); + @SuppressWarnings("unchecked") + Expiry expiry = mock(Expiry.class); + ServiceConfiguration service = mock(ServiceConfiguration.class); + + CacheConfiguration configuration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Integer.class, String.class, heap(10)) + .withClassLoader(loader) + .withEvictionAdvisor(eviction) + .withExpiry(expiry) + .add(service) + .build(); + + CacheConfiguration copy = CacheConfigurationBuilder.newCacheConfigurationBuilder(configuration).build(); + + assertThat(copy.getKeyType(), equalTo(keyClass)); + assertThat(copy.getValueType(), equalTo(valueClass)); + assertThat(copy.getClassLoader(), equalTo(loader)); + + assertThat(copy.getEvictionAdvisor(), IsSame.>sameInstance(eviction)); + assertThat(copy.getExpiry(), IsSame.>sameInstance(expiry)); + assertThat(copy.getServiceConfigurations(), containsInAnyOrder(IsSame.>sameInstance(service))); + } } diff --git a/impl/src/test/java/org/ehcache/config/builders/CacheManagerBuilderTest.java b/impl/src/test/java/org/ehcache/config/builders/CacheManagerBuilderTest.java index 9db60743bb..e346337242 100644 --- a/impl/src/test/java/org/ehcache/config/builders/CacheManagerBuilderTest.java +++ b/impl/src/test/java/org/ehcache/config/builders/CacheManagerBuilderTest.java @@ -23,6 +23,7 @@ import org.ehcache.impl.copy.SerializingCopier; import org.ehcache.impl.serialization.CompactJavaSerializer; import org.ehcache.impl.serialization.JavaSerializer; +import org.ehcache.spi.serialization.Serializer; import org.junit.Test; import java.util.concurrent.atomic.AtomicInteger; @@ -56,16 +57,21 @@ public CacheManagerBuilder builder(final CacheManagerBui @Test public void testCanOverrideCopierInConfig() { + @SuppressWarnings("unchecked") CacheManagerBuilder managerBuilder = newCacheManagerBuilder() .withCopier(Long.class, (Class) IdentityCopier.class); - assertNotNull(managerBuilder.withCopier(Long.class, (Class) SerializingCopier.class)); + assertNotNull(managerBuilder.withCopier(Long.class, SerializingCopier.asCopierClass())); } @Test public void testCanOverrideSerializerConfig() { - CacheManagerBuilder managerBuilder = newCacheManagerBuilder() - .withSerializer(String.class, (Class) JavaSerializer.class); - assertNotNull(managerBuilder.withSerializer(String.class, (Class) CompactJavaSerializer.class)); + @SuppressWarnings("unchecked") + Class> serializer1 = (Class) JavaSerializer.class; + CacheManagerBuilder managerBuilder = newCacheManagerBuilder() + .withSerializer(String.class, serializer1); + @SuppressWarnings("unchecked") + Class> serializer2 = (Class) CompactJavaSerializer.class; + assertNotNull(managerBuilder.withSerializer(String.class, serializer2)); } @Test(expected = IllegalArgumentException.class) diff --git a/impl/src/test/java/org/ehcache/config/builders/PersistentCacheManagerTest.java b/impl/src/test/java/org/ehcache/config/builders/PersistentCacheManagerTest.java index 9b16fff933..aa6326391e 100644 --- a/impl/src/test/java/org/ehcache/config/builders/PersistentCacheManagerTest.java +++ b/impl/src/test/java/org/ehcache/config/builders/PersistentCacheManagerTest.java @@ -27,20 +27,24 @@ import org.junit.rules.TemporaryFolder; import java.io.File; -import java.io.FilenameFilter; import java.io.IOException; +import java.util.Arrays; -import static junit.framework.TestCase.assertNotNull; import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; -import static org.junit.Assert.assertNull; +import static org.ehcache.impl.internal.util.FileExistenceMatchers.containsCacheDirectory; +import static org.ehcache.impl.internal.util.FileExistenceMatchers.isLocked; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.not; +import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; /** * @author Alex Snaps */ public class PersistentCacheManagerTest { + private static final String TEST_CACHE_ALIAS = "test123"; + @Rule public ExpectedException thrown = ExpectedException.none(); @@ -52,7 +56,7 @@ public class PersistentCacheManagerTest { @Before public void setup() throws IOException { - rootDirectory = folder.newFolder("testInitializesLocalPersistenceService"); + rootDirectory = folder.newFolder("testInitializesDiskResourceService"); assertTrue(rootDirectory.delete()); builder = newCacheManagerBuilder().with(new CacheManagerPersistenceConfiguration(rootDirectory)); } @@ -61,6 +65,15 @@ public void setup() throws IOException { public void testInitializesLocalPersistenceService() throws IOException { builder.build(true); assertTrue(rootDirectory.isDirectory()); + assertThat(Arrays.asList(rootDirectory.list()), contains(".lock")); + } + + @Test + public void testInitializesLocalPersistenceServiceAndCreateCache() throws IOException { + buildCacheManagerWithCache(true); + + assertThat(rootDirectory, isLocked()); + assertThat(rootDirectory, containsCacheDirectory(TEST_CACHE_ALIAS)); } @Test @@ -74,47 +87,82 @@ public void testDestroyCache_NullAliasNotAllowed() throws CachePersistenceExcept @Test public void testDestroyCache_UnexistingCacheDoesNothing() throws CachePersistenceException { PersistentCacheManager manager = builder.build(true); - manager.destroyCache("test"); + manager.destroyCache(TEST_CACHE_ALIAS); } @Test public void testDestroyCache_Initialized_DestroyExistingCache() throws CachePersistenceException { - PersistentCacheManager manager = builder - .withCache("test", - CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder().disk(10, MemoryUnit.MB, true))) - .build(true); - assertNotNull(getCacheDirectory()); - manager.destroyCache("test"); - assertNull(getCacheDirectory()); + PersistentCacheManager manager = buildCacheManagerWithCache(true); + + manager.destroyCache(TEST_CACHE_ALIAS); + + assertThat(rootDirectory, isLocked()); + assertThat(rootDirectory, not(containsCacheDirectory(TEST_CACHE_ALIAS))); } + @Test public void testDestroyCache_Uninitialized_DestroyExistingCache() throws CachePersistenceException { - PersistentCacheManager manager = builder - .withCache("test", - CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder().disk(10, MemoryUnit.MB, true))) - .build(true); - assertNotNull(getCacheDirectory()); + PersistentCacheManager manager = buildCacheManagerWithCache(true); + + manager.close(); + manager.destroyCache(TEST_CACHE_ALIAS); + + assertThat(rootDirectory, not(isLocked())); + assertThat(rootDirectory, not(containsCacheDirectory(TEST_CACHE_ALIAS))); + } + + @Test + public void testDestroyCache_CacheManagerUninitialized() throws CachePersistenceException { + PersistentCacheManager manager = buildCacheManagerWithCache(false); + + manager.destroyCache(TEST_CACHE_ALIAS); + + assertThat(rootDirectory, not(isLocked())); + assertThat(rootDirectory, not(containsCacheDirectory(TEST_CACHE_ALIAS))); + } + + @Test + public void testClose_DiskCacheLockReleased() throws CachePersistenceException { + PersistentCacheManager manager = buildCacheManagerWithCache(true); + + // Should lock the file when the CacheManager is opened + assertThat(rootDirectory, isLocked()); + manager.close(); // pass it to uninitialized - manager.destroyCache("test"); - assertNull(getCacheDirectory()); + + // Should unlock the file when the CacheManager is closed + assertThat(rootDirectory, not(isLocked())); } - private File getCacheDirectory() { - File[] files = rootDirectory.listFiles(new FilenameFilter() { - @Override - public boolean accept(final File dir, final String name) { - return name.startsWith("test"); - } - }); - if(files == null || files.length == 0) { - return null; + @Test + public void testCloseAndThenOpenOnTheSameFile() throws CachePersistenceException { + // Open a CacheManager that will create a cache, close it and put it out of scope + { + PersistentCacheManager manager = buildCacheManagerWithCache(true); + manager.close(); } - if(files.length > 1) { - fail("Too many cache directories"); + // Create a new CacheManager that will have the same cache. The cache should be there but the cache manager unlocked since the CacheManager isn't started + { + PersistentCacheManager manager = builder.build(false); + assertThat(rootDirectory, not(isLocked())); + assertThat(rootDirectory, containsCacheDirectory(TEST_CACHE_ALIAS)); } - return files[0]; + } + + public static class A { + + public A() throws IOException { + throw new IOException(".."); + } + + } + + private PersistentCacheManager buildCacheManagerWithCache(boolean init) { + return builder + .withCache(TEST_CACHE_ALIAS, + CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().disk(10, MemoryUnit.MB, true))) + .build(init); } } diff --git a/impl/src/test/java/org/ehcache/config/builders/UserManagedCacheBuilderTest.java b/impl/src/test/java/org/ehcache/config/builders/UserManagedCacheBuilderTest.java index 02a2c68591..f7f97e8319 100644 --- a/impl/src/test/java/org/ehcache/config/builders/UserManagedCacheBuilderTest.java +++ b/impl/src/test/java/org/ehcache/config/builders/UserManagedCacheBuilderTest.java @@ -50,7 +50,7 @@ public void testIsExtensible() { public UserManagedCacheBuilder> builder(final UserManagedCacheBuilder> builder) { return new UserManagedCacheBuilder>(String.class, Object.class) { @Override - TestUserManagedCache build(final ServiceLocator serviceProvider) { + TestUserManagedCache build(final ServiceLocator.DependencySet dependencySet) { return new TestUserManagedCache(); } }; diff --git a/impl/src/test/java/org/ehcache/core/spi/ServiceProviderTest.java b/impl/src/test/java/org/ehcache/core/spi/ServiceProviderTest.java index ae9260bbf6..339dce04f6 100644 --- a/impl/src/test/java/org/ehcache/core/spi/ServiceProviderTest.java +++ b/impl/src/test/java/org/ehcache/core/spi/ServiceProviderTest.java @@ -17,15 +17,19 @@ package org.ehcache.core.spi; import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.impl.internal.store.disk.OffHeapDiskStore; import org.ehcache.impl.internal.store.heap.OnHeapStore; import org.ehcache.impl.internal.store.offheap.OffHeapStore; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; import org.ehcache.core.spi.store.tiering.CachingTier; +import org.hamcrest.core.IsCollectionContaining; import org.hamcrest.core.IsSame; import org.junit.Test; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.junit.Assert.assertThat; +import static org.mockito.Mockito.mock; /** * @@ -35,23 +39,25 @@ public class ServiceProviderTest { @Test public void testSupportsMultipleAuthoritativeTierProviders() throws Exception { - ServiceLocator serviceLocator = new ServiceLocator(); + ServiceLocator.DependencySet dependencySet = dependencySet(); OnHeapStore.Provider cachingTierProvider = new OnHeapStore.Provider(); OffHeapStore.Provider authoritativeTierProvider = new OffHeapStore.Provider(); OffHeapDiskStore.Provider diskStoreProvider = new OffHeapDiskStore.Provider(); - serviceLocator.addService(cachingTierProvider); - serviceLocator.addService(authoritativeTierProvider); - serviceLocator.addService(diskStoreProvider); + dependencySet.with(cachingTierProvider); + dependencySet.with(authoritativeTierProvider); + dependencySet.with(diskStoreProvider); + dependencySet.with(mock(DiskResourceService.class)); + ServiceLocator serviceLocator = dependencySet.build(); serviceLocator.startAllServices(); - assertThat(serviceLocator.getServicesOfType(CachingTier.Provider.class).iterator().next(), - IsSame.sameInstance(cachingTierProvider)); - assertThat(serviceLocator.getServicesOfType(AuthoritativeTier.Provider.class).iterator().next(), - IsSame.sameInstance(authoritativeTierProvider)); - assertThat(serviceLocator.getServicesOfType(diskStoreProvider.getClass()).iterator().next(), - IsSame.sameInstance(diskStoreProvider)); + assertThat(serviceLocator.getServicesOfType(CachingTier.Provider.class), + IsCollectionContaining.hasItem(IsSame.sameInstance(cachingTierProvider))); + assertThat(serviceLocator.getServicesOfType(AuthoritativeTier.Provider.class), + IsCollectionContaining.hasItem(IsSame.sameInstance(authoritativeTierProvider))); + assertThat(serviceLocator.getServicesOfType(OffHeapDiskStore.Provider.class), + IsCollectionContaining.hasItem(IsSame.sameInstance(diskStoreProvider))); } } diff --git a/impl/src/test/java/org/ehcache/docs/GettingStarted.java b/impl/src/test/java/org/ehcache/docs/GettingStarted.java index 012afc2325..4109e7d317 100644 --- a/impl/src/test/java/org/ehcache/docs/GettingStarted.java +++ b/impl/src/test/java/org/ehcache/docs/GettingStarted.java @@ -44,11 +44,16 @@ import org.ehcache.event.EventType; import org.ehcache.impl.copy.ReadWriteCopier; import org.junit.Test; +import org.terracotta.context.ContextElement; +import org.terracotta.context.TreeNode; +import org.terracotta.statistics.StatisticsManager; import java.io.File; import java.io.Serializable; import java.net.URISyntaxException; import java.util.EnumSet; +import java.util.Map; +import java.util.Set; import java.util.concurrent.TimeUnit; import static java.util.Collections.singletonMap; @@ -120,6 +125,8 @@ public void offheapCacheManager() { ) .build(true); + Cache tieredCache = cacheManager.getCache("tieredCache", Long.class, String.class); + cacheManager.close(); // end::offheapCacheManager[] } @@ -138,6 +145,9 @@ public void threeTiersCacheManager() throws Exception { ) ).build(true); + Cache threeTieredCache = persistentCacheManager.getCache("threeTieredCache", Long.class, String.class); + + persistentCacheManager.close(); // end::threeTiersCacheManager[] } diff --git a/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java b/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java index 03ba3fce80..02c7193b39 100644 --- a/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java +++ b/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java @@ -21,20 +21,14 @@ import org.ehcache.config.builders.CacheEventListenerConfigurationBuilder; import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.config.builders.UserManagedCacheBuilder; -import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; -import org.ehcache.event.EventType; -import org.ehcache.impl.config.persistence.UserManagedPersistenceContext; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.docs.plugs.ListenerObject; -import org.ehcache.docs.plugs.LongCopier; -import org.ehcache.impl.serialization.LongSerializer; -import org.ehcache.docs.plugs.OddKeysEvictionAdvisor; -import org.ehcache.docs.plugs.SampleLoaderWriter; -import org.ehcache.docs.plugs.StringCopier; -import org.ehcache.impl.serialization.StringSerializer; +import org.ehcache.event.EventType; import org.ehcache.impl.persistence.DefaultLocalPersistenceService; -import org.ehcache.core.spi.service.LocalPersistenceService; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.impl.config.persistence.UserManagedPersistenceContext; import org.junit.Test; import java.io.File; diff --git a/impl/src/test/java/org/ehcache/docs/plugs/ListenerObject.java b/impl/src/test/java/org/ehcache/docs/plugs/ListenerObject.java index 4e11805814..87b088be32 100644 --- a/impl/src/test/java/org/ehcache/docs/plugs/ListenerObject.java +++ b/impl/src/test/java/org/ehcache/docs/plugs/ListenerObject.java @@ -28,7 +28,7 @@ public class ListenerObject implements CacheEventListener { private int evicted; @Override - public void onEvent(CacheEvent event) { + public void onEvent(CacheEvent event) { Logger logger = LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "-" + "GettingStarted"); logger.info(event.getType().toString()); if(event.getType() == EventType.EVICTED){ diff --git a/impl/src/test/java/org/ehcache/docs/plugs/StringCopier.java b/impl/src/test/java/org/ehcache/docs/plugs/StringCopier.java index afbe1f2517..46655b214c 100644 --- a/impl/src/test/java/org/ehcache/docs/plugs/StringCopier.java +++ b/impl/src/test/java/org/ehcache/docs/plugs/StringCopier.java @@ -25,15 +25,21 @@ public class StringCopier implements Copier { private static final Logger LOG = LoggerFactory.getLogger(StringCopier.class); + private static final Copier STRING_COPIER = new StringCopier(); + + public static Copier copier() { + return STRING_COPIER; + } + @Override public String copyForRead(String obj) { - LOG.info("Copying for read {}", obj); + LOG.debug("Copying for read {}", obj); return obj; } @Override public String copyForWrite(String obj) { - LOG.info("Copying for write {}", obj); + LOG.debug("Copying for write {}", obj); return obj; } } diff --git a/impl/src/test/java/org/ehcache/impl/config/event/DefaultCacheEventListenerConfigurationTest.java b/impl/src/test/java/org/ehcache/impl/config/event/DefaultCacheEventListenerConfigurationTest.java index d34f18a4e0..c2eb674696 100644 --- a/impl/src/test/java/org/ehcache/impl/config/event/DefaultCacheEventListenerConfigurationTest.java +++ b/impl/src/test/java/org/ehcache/impl/config/event/DefaultCacheEventListenerConfigurationTest.java @@ -39,9 +39,10 @@ public void testFailsToConstructWithEmptyEventSetAndInstance() { @Test(expected = IllegalArgumentException.class) public void testFailsToConstructWithEmptyEventSetAndClass() { Set fireOn = emptySet(); - new DefaultCacheEventListenerConfiguration(fireOn, (Class)TestCacheEventListener.class); + Class eventListenerClass = TestCacheEventListener.class; + new DefaultCacheEventListenerConfiguration(fireOn, eventListenerClass); } - abstract static class TestCacheEventListener implements CacheEventListener { + abstract static class TestCacheEventListener implements CacheEventListener { } } diff --git a/impl/src/test/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfigurationTest.java b/impl/src/test/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfigurationTest.java index 3421a2cf35..c4cbb3ae98 100644 --- a/impl/src/test/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfigurationTest.java +++ b/impl/src/test/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfigurationTest.java @@ -16,56 +16,75 @@ package org.ehcache.impl.config.serializer; +import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.SerializerException; import org.ehcache.spi.serialization.Serializer; import org.ehcache.core.spi.service.FileBasedPersistenceContext; +import org.ehcache.spi.serialization.StatefulSerializer; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.ExpectedException; import java.nio.ByteBuffer; import static org.junit.Assert.*; -/** - * Created by alsu on 30/09/15. - */ public class DefaultSerializationProviderConfigurationTest { + @Rule + public ExpectedException expectedException = ExpectedException.none(); + @Test - public void testAddSerializerForTransient() throws Exception { + public void testAddSerializerFor() throws Exception { DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); - config.addSerializerFor(Long.class, TransientSerializer.class); + config.addSerializerFor(Long.class, MinimalSerializer.class); - assertTrue(config.getPersistentSerializers().isEmpty()); - assertSame(TransientSerializer.class, config.getTransientSerializers().get(Long.class)); + assertSame(MinimalSerializer.class, config.getDefaultSerializers().get(Long.class)); } @Test - public void testAddSerializerForPersistent() throws Exception { + public void testAddSerializerForDuplicateThrows() throws Exception { DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); - config.addSerializerFor(Long.class, PersistentSerializer.class); + config.addSerializerFor(Long.class, MinimalSerializer.class); + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage("Duplicate serializer for class"); + config.addSerializerFor(Long.class, MinimalSerializer.class); + } - assertTrue(config.getTransientSerializers().isEmpty()); - assertSame(PersistentSerializer.class, config.getPersistentSerializers().get(Long.class)); + @Test + public void testAddSerializerForConstructorless() throws Exception { + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); + DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); + config.addSerializerFor(Long.class, UnusableSerializer.class); } @Test - public void testAddSerializerForTransientPersistentCombo() throws Exception { + public void testAddSerializerForStatefulSerializer() throws Exception { DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); - config.addSerializerFor(Long.class, ComboSerializer.class); + config.addSerializerFor(Long.class, MinimalStatefulSerializer.class); + assertSame(MinimalStatefulSerializer.class, config.getDefaultSerializers().get(Long.class)); + } - assertSame(ComboSerializer.class, config.getPersistentSerializers().get(Long.class)); - assertSame(ComboSerializer.class, config.getTransientSerializers().get(Long.class)); + @Test + public void testAddSerializerForStatefulConstructorless() throws Exception { + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); + DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); + config.addSerializerFor(Long.class, UnusableStatefulSerializer.class); } - @Test(expected = IllegalArgumentException.class) - public void testAddSerializerForUnusable() throws Exception { + @Test + public void testAddSerializerForLegacySerializer() throws Exception { + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); - config.addSerializerFor(Long.class, UnusableSerializer.class); + config.addSerializerFor(Long.class, LegacySerializer.class); } - private static class TransientSerializer implements Serializer { + private static class MinimalSerializer implements Serializer { - public TransientSerializer(ClassLoader loader) { + public MinimalSerializer(ClassLoader loader) { } @Override @@ -84,10 +103,28 @@ public boolean equals(final Long object, final ByteBuffer binary) throws ClassNo } } - private static class PersistentSerializer implements Serializer { + private static class LegacySerializer implements Serializer { + + public LegacySerializer(ClassLoader loader, FileBasedPersistenceContext context) { + } + + @Override + public ByteBuffer serialize(final Long object) throws SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public Long read(final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } - public PersistentSerializer(ClassLoader loader, FileBasedPersistenceContext context) { + @Override + public boolean equals(final Long object, final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + throw new UnsupportedOperationException("Implement me!"); } + } + + private static class UnusableSerializer implements Serializer { @Override public ByteBuffer serialize(final Long object) throws SerializerException { @@ -105,12 +142,14 @@ public boolean equals(final Long object, final ByteBuffer binary) throws ClassNo } } - private static class ComboSerializer implements Serializer { + private static class MinimalStatefulSerializer implements StatefulSerializer { - public ComboSerializer(ClassLoader loader) { + public MinimalStatefulSerializer(ClassLoader loader) { } - public ComboSerializer(ClassLoader loader, FileBasedPersistenceContext context) { + @Override + public void init(final StateRepository stateRepository) { + throw new UnsupportedOperationException("Implement me!"); } @Override @@ -129,7 +168,12 @@ public boolean equals(final Long object, final ByteBuffer binary) throws ClassNo } } - private static class UnusableSerializer implements Serializer { + private static class UnusableStatefulSerializer implements StatefulSerializer { + + @Override + public void init(final StateRepository stateRepository) { + throw new UnsupportedOperationException("Implement me!"); + } @Override public ByteBuffer serialize(final Long object) throws SerializerException { @@ -146,4 +190,4 @@ public boolean equals(final Long object, final ByteBuffer binary) throws ClassNo throw new UnsupportedOperationException("Implement me!"); } } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/config/serializer/SerializerCountingTest.java b/impl/src/test/java/org/ehcache/impl/config/serializer/SerializerCountingTest.java index 7c19f07aa2..0f1cc1b92c 100644 --- a/impl/src/test/java/org/ehcache/impl/config/serializer/SerializerCountingTest.java +++ b/impl/src/test/java/org/ehcache/impl/config/serializer/SerializerCountingTest.java @@ -58,6 +58,7 @@ public class SerializerCountingTest { public TemporaryFolder folder = new TemporaryFolder(); @Before + @SuppressWarnings("unchecked") public void setUp() { cacheManager = newCacheManagerBuilder() .using(new DefaultSerializationProviderConfiguration().addSerializerFor(Serializable.class, (Class) CountingSerializer.class) @@ -79,8 +80,8 @@ public void tearDown() { public void testOnHeapPutGet() { Cache cache = cacheManager.createCache("onHeap", newCacheConfigurationBuilder(Long.class, String.class, heap(10)) - .add(new DefaultCopierConfiguration(SerializingCopier.class, DefaultCopierConfiguration.Type.KEY)) - .add(new DefaultCopierConfiguration(SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE)) + .add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)) + .add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)) .build()); cache.put(42L, "TheAnswer!"); @@ -121,8 +122,8 @@ public void testOffHeapPutGet() { public void testOffHeapOnHeapCopyPutGet() { Cache cache = cacheManager.createCache("offHeap", newCacheConfigurationBuilder(Long.class, String.class, newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).offheap(10, MemoryUnit.MB)) - .add(new DefaultCopierConfiguration(SerializingCopier.class, DefaultCopierConfiguration.Type.KEY)) - .add(new DefaultCopierConfiguration(SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE)) + .add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)) + .add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)) .build() ); @@ -145,8 +146,8 @@ public void testOffHeapOnHeapCopyPutGet() { public void testDiskOffHeapOnHeapCopyPutGet() { Cache cache = cacheManager.createCache("offHeap", newCacheConfigurationBuilder(Long.class, String.class, newResourcePoolsBuilder().heap(2, EntryUnit.ENTRIES).offheap(10, MemoryUnit.MB).disk(100, MemoryUnit.MB)) - .add(new DefaultCopierConfiguration(SerializingCopier.class, DefaultCopierConfiguration.Type.KEY)) - .add(new DefaultCopierConfiguration(SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE)) + .add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)) + .add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)) .build() ); diff --git a/impl/src/test/java/org/ehcache/impl/internal/DefaultTimeSourceServiceTest.java b/impl/src/test/java/org/ehcache/impl/internal/DefaultTimeSourceServiceTest.java index f102d03379..7a3d6d7a49 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/DefaultTimeSourceServiceTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/DefaultTimeSourceServiceTest.java @@ -20,9 +20,11 @@ import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.time.TimeSourceService; import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceDependencies; import org.junit.Test; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.hamcrest.CoreMatchers.sameInstance; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; @@ -30,13 +32,12 @@ /** * DefaultTimeSourceServiceTest */ -@ServiceDependencies(TimeSourceService.class) public class DefaultTimeSourceServiceTest { @Test public void testResolvesDefaultTimeSource() { - ServiceLocator serviceLocator = new ServiceLocator(); - serviceLocator.loadDependenciesOf(this.getClass()); + ServiceLocator.DependencySet dependencySet = dependencySet().with(TimeSourceService.class); + ServiceLocator serviceLocator = dependencySet.build(); assertThat(serviceLocator.getService(TimeSourceService.class).getTimeSource(), sameInstance(SystemTimeSource.INSTANCE)); } @@ -44,9 +45,9 @@ public void testResolvesDefaultTimeSource() { @Test public void testCanConfigureAlternateTimeSource() { TimeSource timeSource = mock(TimeSource.class); - ServiceLocator serviceLocator = new ServiceLocator(); - TimeSourceService timeSourceService = serviceLocator.getOrCreateServiceFor(new TimeSourceConfiguration(timeSource)); + ServiceLocator serviceLocator = dependencySet().with(new TimeSourceConfiguration(timeSource)).build(); + TimeSourceService timeSourceService = serviceLocator.getService(TimeSourceService.class); assertThat(timeSourceService.getTimeSource(), sameInstance(timeSource)); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/classes/ClassInstanceProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/classes/ClassInstanceProviderTest.java index 707b159ea8..69ca762d45 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/classes/ClassInstanceProviderTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/classes/ClassInstanceProviderTest.java @@ -39,9 +39,12 @@ */ public class ClassInstanceProviderTest { + @SuppressWarnings("unchecked") + private Class> configClass = (Class)ClassInstanceConfiguration.class; + @Test public void testNewInstanceUsingAliasAndNoArgs() throws Exception { - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, (Class)ClassInstanceConfiguration.class); + ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, configClass); classInstanceProvider.preconfigured.put("test stuff", new ClassInstanceConfiguration(TestService.class)); TestService obj = classInstanceProvider.newInstance("test stuff", (ServiceConfiguration) null); @@ -51,7 +54,7 @@ public void testNewInstanceUsingAliasAndNoArgs() throws Exception { @Test public void testNewInstanceUsingAliasAndArg() throws Exception { - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, (Class)ClassInstanceConfiguration.class); + ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, configClass); classInstanceProvider.preconfigured.put("test stuff", new ClassInstanceConfiguration(TestService.class, "test string")); TestService obj = classInstanceProvider.newInstance("test stuff", (ServiceConfiguration) null); @@ -61,7 +64,7 @@ public void testNewInstanceUsingAliasAndArg() throws Exception { @Test public void testNewInstanceUsingServiceConfig() throws Exception { - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, (Class)ClassInstanceConfiguration.class); + ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, configClass); TestServiceConfiguration config = new TestServiceConfiguration(); TestService obj = classInstanceProvider.newInstance("test stuff", config); @@ -74,7 +77,7 @@ public void testNewInstanceUsingServiceConfigFactory() throws Exception { TestServiceProviderConfiguration factoryConfig = new TestServiceProviderConfiguration(); factoryConfig.getDefaults().put("test stuff", new ClassInstanceConfiguration(TestService.class)); - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(factoryConfig, (Class)ClassInstanceConfiguration.class); + ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(factoryConfig, configClass); classInstanceProvider.start(null); TestService obj = classInstanceProvider.newInstance("test stuff", (ServiceConfiguration) null); @@ -121,7 +124,7 @@ public void testReleaseCloseableInstanceThrows() throws Exception { @Test public void testNewInstanceWithActualInstanceInServiceConfig() throws Exception { - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, (Class)ClassInstanceConfiguration.class); + ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, configClass); TestService service = new TestService(); TestServiceConfiguration config = new TestServiceConfiguration(service); @@ -133,7 +136,7 @@ public void testNewInstanceWithActualInstanceInServiceConfig() throws Exception @Test public void testSameInstanceRetrievedMultipleTimesUpdatesTheProvidedCount() throws Exception { - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, (Class)ClassInstanceConfiguration.class); + ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, configClass); TestService service = new TestService(); TestServiceConfiguration config = new TestServiceConfiguration(service); @@ -148,7 +151,9 @@ public void testSameInstanceRetrievedMultipleTimesUpdatesTheProvidedCount() thro @Test public void testInstancesNotCreatedByProviderDoesNotClose() throws IOException { - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, (Class)ClassInstanceConfiguration.class); + @SuppressWarnings("unchecked") + Class> configClass = (Class) ClassInstanceConfiguration.class; + ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, configClass); TestCloaseableService service = mock(TestCloaseableService.class); TestCloaseableServiceConfig config = new TestCloaseableServiceConfig(service); diff --git a/impl/src/test/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMapTest.java b/impl/src/test/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMapTest.java index 8764e6a3ec..90857b5dd6 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMapTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMapTest.java @@ -25,6 +25,7 @@ import org.ehcache.config.Eviction; import org.ehcache.config.EvictionAdvisor; +import static org.ehcache.config.Eviction.noAdvice; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.core.IsNull.notNullValue; import static org.hamcrest.core.IsNull.nullValue; @@ -117,21 +118,21 @@ public int compareTo(BadHashKey o) { @Test public void testRandomSampleOnEmptyMap() { ConcurrentHashMap map = new ConcurrentHashMap(); - assertThat(map.getEvictionCandidate(new Random(), 1, null, Eviction.noAdvice()), nullValue()); + assertThat(map.getEvictionCandidate(new Random(), 1, null, noAdvice()), nullValue()); } @Test public void testEmptyRandomSample() { ConcurrentHashMap map = new ConcurrentHashMap(); map.put("foo", "bar"); - assertThat(map.getEvictionCandidate(new Random(), 0, null, Eviction.noAdvice()), nullValue()); + assertThat(map.getEvictionCandidate(new Random(), 0, null, noAdvice()), nullValue()); } @Test public void testOversizedRandomSample() { ConcurrentHashMap map = new ConcurrentHashMap(); map.put("foo", "bar"); - Entry candidate = map.getEvictionCandidate(new Random(), 2, null, Eviction.noAdvice()); + Entry candidate = map.getEvictionCandidate(new Random(), 2, null, noAdvice()); assertThat(candidate.getKey(), is("foo")); assertThat(candidate.getValue(), is("bar")); } @@ -147,7 +148,7 @@ public void testUndersizedRandomSample() { public int compare(String t, String t1) { return 0; } - }, Eviction.noAdvice()); + }, noAdvice()); assertThat(candidate, notNullValue()); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/copy/SerializingCopierTest.java b/impl/src/test/java/org/ehcache/impl/internal/copy/SerializingCopierTest.java index 361736d26c..f55a8446f5 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/copy/SerializingCopierTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/copy/SerializingCopierTest.java @@ -33,6 +33,7 @@ public class SerializingCopierTest { @Test public void testCopy() throws Exception { + @SuppressWarnings("unchecked") Serializer serializer = mock(Serializer.class); String in = new String("foo"); ByteBuffer buff = mock(ByteBuffer.class); diff --git a/impl/src/test/java/org/ehcache/impl/internal/events/CacheEventDispatcherFactoryImplTest.java b/impl/src/test/java/org/ehcache/impl/internal/events/CacheEventDispatcherFactoryImplTest.java index 12526e6e01..1148d5e6b7 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/events/CacheEventDispatcherFactoryImplTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/events/CacheEventDispatcherFactoryImplTest.java @@ -31,7 +31,7 @@ import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; -import static org.mockito.Matchers.anyObject; +import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -45,28 +45,34 @@ public class CacheEventDispatcherFactoryImplTest { @Test public void testConfigurationOfThreadPoolAlias() { + @SuppressWarnings("unchecked") ServiceProvider serviceProvider = mock(ServiceProvider.class); when(serviceProvider.getService(ExecutionService.class)).thenReturn(mock(ExecutionService.class)); CacheEventDispatcherFactoryImpl factory = new CacheEventDispatcherFactoryImpl(); factory.start(serviceProvider); DefaultCacheEventDispatcherConfiguration config = spy(new DefaultCacheEventDispatcherConfiguration("aName")); - factory.createCacheEventDispatcher(mock(Store.class), config); + @SuppressWarnings("unchecked") + Store store = mock(Store.class); + factory.createCacheEventDispatcher(store, config); verify(config).getThreadPoolAlias(); } @Test + @SuppressWarnings("unchecked") public void testCreateCacheEventDispatcherReturnsDisabledDispatcherWhenNoThreadPool() throws Exception { ServiceProvider serviceProvider = mock(ServiceProvider.class); ExecutionService executionService = mock(ExecutionService.class); when(serviceProvider.getService(ExecutionService.class)).thenReturn(executionService); - when(executionService.getOrderedExecutor(eq("myAlias"), (BlockingQueue) anyObject())).thenThrow(IllegalArgumentException.class); - when(executionService.getUnorderedExecutor(eq("myAlias"), (BlockingQueue) anyObject())).thenThrow(IllegalArgumentException.class); + when(executionService.getOrderedExecutor(eq("myAlias"), any(BlockingQueue.class))).thenThrow(IllegalArgumentException.class); + when(executionService.getUnorderedExecutor(eq("myAlias"), any(BlockingQueue.class))).thenThrow(IllegalArgumentException.class); CacheEventDispatcherFactoryImpl cacheEventDispatcherFactory = new CacheEventDispatcherFactoryImpl(); cacheEventDispatcherFactory.start(serviceProvider); + @SuppressWarnings("unchecked") + Store store = mock(Store.class); try { - cacheEventDispatcherFactory.createCacheEventDispatcher(mock(Store.class), new DefaultCacheEventDispatcherConfiguration("myAlias")); + cacheEventDispatcherFactory.createCacheEventDispatcher(store, new DefaultCacheEventDispatcherConfiguration("myAlias")); fail("expected IllegalArgumentException"); } catch (IllegalArgumentException iae) { // expected @@ -74,17 +80,19 @@ public void testCreateCacheEventDispatcherReturnsDisabledDispatcherWhenNoThreadP } @Test + @SuppressWarnings("unchecked") public void testCreateCacheEventReturnsDisabledDispatcherWhenThreadPoolFound() throws Exception { ServiceProvider serviceProvider = mock(ServiceProvider.class); ExecutionService executionService = mock(ExecutionService.class); when(serviceProvider.getService(ExecutionService.class)).thenReturn(executionService); - when(executionService.getOrderedExecutor(eq("myAlias"), (BlockingQueue) anyObject())).thenReturn(mock(ExecutorService.class)); - when(executionService.getUnorderedExecutor(eq("myAlias"), (BlockingQueue) anyObject())).thenReturn(mock(ExecutorService.class)); + when(executionService.getOrderedExecutor(eq("myAlias"), any(BlockingQueue.class))).thenReturn(mock(ExecutorService.class)); + when(executionService.getUnorderedExecutor(eq("myAlias"), any(BlockingQueue.class))).thenReturn(mock(ExecutorService.class)); CacheEventDispatcherFactoryImpl cacheEventDispatcherFactory = new CacheEventDispatcherFactoryImpl(); cacheEventDispatcherFactory.start(serviceProvider); - CacheEventDispatcher dispatcher = cacheEventDispatcherFactory.createCacheEventDispatcher(mock(Store.class), new DefaultCacheEventDispatcherConfiguration("myAlias")); + Store store = mock(Store.class); + CacheEventDispatcher dispatcher = cacheEventDispatcherFactory.createCacheEventDispatcher(store, new DefaultCacheEventDispatcherConfiguration("myAlias")); assertThat(dispatcher, instanceOf(CacheEventDispatcherImpl.class)); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/events/FudgingInvocationScopedEventSinkTest.java b/impl/src/test/java/org/ehcache/impl/internal/events/FudgingInvocationScopedEventSinkTest.java index 8a4e7dd845..49b6f36812 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/events/FudgingInvocationScopedEventSinkTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/events/FudgingInvocationScopedEventSinkTest.java @@ -16,9 +16,11 @@ package org.ehcache.impl.internal.events; +import org.ehcache.core.spi.store.events.StoreEvent; import org.ehcache.event.EventType; import org.ehcache.core.spi.store.events.StoreEventFilter; import org.ehcache.core.spi.store.events.StoreEventListener; +import org.hamcrest.Matcher; import org.junit.Before; import org.junit.Test; import org.mockito.InOrder; @@ -40,10 +42,13 @@ */ public class FudgingInvocationScopedEventSinkTest { - private StoreEventListener listener; + private StoreEventListener listener; private FudgingInvocationScopedEventSink eventSink; + private Matcher> createdMatcher = eventType(EventType.CREATED); + private Matcher> evictedMatcher = eventType(EventType.EVICTED); @Before + @SuppressWarnings("unchecked") public void setUp() { HashSet> storeEventListeners = new HashSet>(); listener = mock(StoreEventListener.class); @@ -60,8 +65,8 @@ public void testEvictedDifferentKeyNoImpact() { eventSink.close(); InOrder inOrder = inOrder(listener); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.CREATED))); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.EVICTED))); + inOrder.verify(listener).onEvent(argThat(createdMatcher)); + inOrder.verify(listener).onEvent(argThat(evictedMatcher)); verifyNoMoreInteractions(listener); } @@ -72,8 +77,8 @@ public void testEvictedSameKeyAfterUpdateReplacesWithEvictCreate() { eventSink.close(); InOrder inOrder = inOrder(listener); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.EVICTED))); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.CREATED))); + inOrder.verify(listener).onEvent(argThat(evictedMatcher)); + inOrder.verify(listener).onEvent(argThat(createdMatcher)); verifyNoMoreInteractions(listener); } @@ -85,8 +90,8 @@ public void testEvictedSameKeyAfterCreateFudgesExpiryToo() { eventSink.close(); InOrder inOrder = inOrder(listener); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.EVICTED))); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.CREATED))); + inOrder.verify(listener).onEvent(argThat(evictedMatcher)); + inOrder.verify(listener).onEvent(argThat(createdMatcher)); verifyNoMoreInteractions(listener); } @@ -99,8 +104,8 @@ public void testEvictedSameKeyAfterUpdateReplacesWithEvictCreateEvenWithMultiple eventSink.close(); InOrder inOrder = inOrder(listener); - inOrder.verify(listener, times(3)).onEvent(argThat(eventType(EventType.EVICTED))); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.CREATED))); + inOrder.verify(listener, times(3)).onEvent(argThat(evictedMatcher)); + inOrder.verify(listener).onEvent(argThat(createdMatcher)); verifyNoMoreInteractions(listener); } @@ -114,8 +119,8 @@ public void testEvictedSameKeyAfterCreateFudgesExpiryTooEvenWithMultipleEvictsIn eventSink.close(); InOrder inOrder = inOrder(listener); - inOrder.verify(listener, times(3)).onEvent(argThat(eventType(EventType.EVICTED))); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.CREATED))); + inOrder.verify(listener, times(3)).onEvent(argThat(evictedMatcher)); + inOrder.verify(listener).onEvent(argThat(createdMatcher)); verifyNoMoreInteractions(listener); } @@ -127,9 +132,10 @@ public void testEvictedKeyDoesNotFudgeOlderEvents() { eventSink.close(); InOrder inOrder = inOrder(listener); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.UPDATED))); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.CREATED))); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.EVICTED))); + Matcher> updatedMatcher = eventType(EventType.UPDATED); + inOrder.verify(listener).onEvent(argThat(updatedMatcher)); + inOrder.verify(listener).onEvent(argThat(createdMatcher)); + inOrder.verify(listener).onEvent(argThat(evictedMatcher)); verifyNoMoreInteractions(listener); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/events/InvocationScopedEventSinkTest.java b/impl/src/test/java/org/ehcache/impl/internal/events/InvocationScopedEventSinkTest.java index 3e081b5cd7..77c8a16c1d 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/events/InvocationScopedEventSinkTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/events/InvocationScopedEventSinkTest.java @@ -16,9 +16,11 @@ package org.ehcache.impl.internal.events; +import org.ehcache.core.spi.store.events.StoreEvent; import org.ehcache.core.spi.store.events.StoreEventFilter; import org.ehcache.core.spi.store.events.StoreEventListener; import org.ehcache.event.EventType; +import org.hamcrest.Matcher; import org.junit.Before; import org.junit.Test; import org.mockito.InOrder; @@ -39,10 +41,11 @@ */ public class InvocationScopedEventSinkTest { - private StoreEventListener listener; + private StoreEventListener listener; private InvocationScopedEventSink eventSink; @Before + @SuppressWarnings("unchecked") public void setUp() { HashSet> storeEventListeners = new HashSet>(); listener = mock(StoreEventListener.class); @@ -63,10 +66,13 @@ public void testReset() { eventSink.close(); InOrder inOrder = inOrder(listener); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.CREATED))); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.UPDATED))); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.EVICTED))); + Matcher> createdMatcher = eventType(EventType.CREATED); + inOrder.verify(listener).onEvent(argThat(createdMatcher)); + Matcher> updatedMatcher = eventType(EventType.UPDATED); + inOrder.verify(listener).onEvent(argThat(updatedMatcher)); + Matcher> evictedMatcher = eventType(EventType.EVICTED); + inOrder.verify(listener).onEvent(argThat(evictedMatcher)); verifyNoMoreInteractions(listener); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/events/ScopedStoreEventDispatcherTest.java b/impl/src/test/java/org/ehcache/impl/internal/events/ScopedStoreEventDispatcherTest.java index 7f68a8c764..8a18af5c67 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/events/ScopedStoreEventDispatcherTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/events/ScopedStoreEventDispatcherTest.java @@ -25,6 +25,7 @@ import org.ehcache.core.spi.store.events.StoreEventListener; import org.hamcrest.Matcher; import org.junit.Test; +import org.mockito.Matchers; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -63,8 +64,10 @@ public void testRegistersOrderingChange() { } @Test + @SuppressWarnings("unchecked") public void testListenerNotifiedUnordered() { ScopedStoreEventDispatcher dispatcher = new ScopedStoreEventDispatcher(1); + @SuppressWarnings("unchecked") StoreEventListener listener = mock(StoreEventListener.class); dispatcher.addEventListener(listener); @@ -76,8 +79,10 @@ public void testListenerNotifiedUnordered() { } @Test + @SuppressWarnings("unchecked") public void testListenerNotifiedOrdered() { ScopedStoreEventDispatcher dispatcher = new ScopedStoreEventDispatcher(1); + @SuppressWarnings("unchecked") StoreEventListener listener = mock(StoreEventListener.class); dispatcher.addEventListener(listener); dispatcher.setEventOrdering(true); @@ -92,9 +97,11 @@ public void testListenerNotifiedOrdered() { @Test public void testEventFiltering() { ScopedStoreEventDispatcher dispatcher = new ScopedStoreEventDispatcher(1); + @SuppressWarnings("unchecked") StoreEventListener listener = mock(StoreEventListener.class); dispatcher.addEventListener(listener); + @SuppressWarnings("unchecked") StoreEventFilter filter = mock(StoreEventFilter.class); when(filter.acceptEvent(eq(EventType.CREATED), anyString(), anyString(), anyString())).thenReturn(true); when(filter.acceptEvent(eq(EventType.REMOVED), anyString(), anyString(), anyString())).thenReturn(false); @@ -188,4 +195,4 @@ public Long apply(Long key, Long value) { assertThat(resultMap, is(map)); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedOrderedExecutorTest.java b/impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedOrderedExecutorTest.java index 1430705e90..cc51d8ac86 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedOrderedExecutorTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedOrderedExecutorTest.java @@ -297,7 +297,7 @@ public void testJobsAreExecutedInOrder() throws InterruptedException, ExecutionE List> tasks = new ArrayList>(); for (int i = 0; i < 100; i++) { final int index = i; - tasks.add(executor.submit(new Callable() { + tasks.add(executor.submit(new Callable() { @Override public Object call() throws Exception { diff --git a/impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/AbstractWriteBehindTestBase.java b/impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/AbstractWriteBehindTestBase.java index 6e8f49d821..831a613c79 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/AbstractWriteBehindTestBase.java +++ b/impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/AbstractWriteBehindTestBase.java @@ -71,8 +71,7 @@ public abstract class AbstractWriteBehindTestBase { @Test public void testWriteOrdering() throws Exception { WriteBehindTestLoaderWriter loaderWriter = new WriteBehindTestLoaderWriter(); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -104,8 +103,7 @@ public void testWriteOrdering() throws Exception { @Test public void testWrites() throws Exception { WriteBehindTestLoaderWriter loaderWriter = new WriteBehindTestLoaderWriter(); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -133,8 +131,7 @@ public void testWrites() throws Exception { @Test public void testBulkWrites() throws Exception { WriteBehindTestLoaderWriter loaderWriter = new WriteBehindTestLoaderWriter(); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -181,8 +178,7 @@ public void testBulkWrites() throws Exception { @Test public void testThatAllGetsReturnLatestData() throws BulkCacheWritingException, Exception { WriteBehindTestLoaderWriter loaderWriter = new WriteBehindTestLoaderWriter(); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); @@ -230,8 +226,7 @@ public void testThatAllGetsReturnLatestData() throws BulkCacheWritingException, @Test public void testAllGetsReturnLatestDataWithKeyCollision() { WriteBehindTestLoaderWriter loaderWriter = new WriteBehindTestLoaderWriter(); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -260,10 +255,10 @@ public void testAllGetsReturnLatestDataWithKeyCollision() { @Test public void testBatchedDeletedKeyReturnsNull() throws Exception { + @SuppressWarnings("unchecked") CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); when(loaderWriter.load("key")).thenReturn("value"); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -285,6 +280,7 @@ public void testBatchedDeletedKeyReturnsNull() throws Exception { public void testUnBatchedDeletedKeyReturnsNull() throws Exception { final Semaphore semaphore = new Semaphore(0); + @SuppressWarnings("unchecked") CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); when(loaderWriter.load("key")).thenReturn("value"); doAnswer(new Answer() { @@ -294,8 +290,7 @@ public Object answer(InvocationOnMock invocation) throws Throwable { return null; } }).when(loaderWriter).delete("key"); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -316,10 +311,10 @@ public Object answer(InvocationOnMock invocation) throws Throwable { @Test public void testBatchedOverwrittenKeyReturnsNewValue() throws Exception { + @SuppressWarnings("unchecked") CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); when(loaderWriter.load("key")).thenReturn("value"); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -341,6 +336,7 @@ public void testBatchedOverwrittenKeyReturnsNewValue() throws Exception { public void testUnBatchedOverwrittenKeyReturnsNewValue() throws Exception { final Semaphore semaphore = new Semaphore(0); + @SuppressWarnings("unchecked") CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); when(loaderWriter.load("key")).thenReturn("value"); doAnswer(new Answer() { @@ -350,8 +346,7 @@ public Object answer(InvocationOnMock invocation) throws Throwable { return null; } }).when(loaderWriter).delete("key"); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -373,8 +368,7 @@ public Object answer(InvocationOnMock invocation) throws Throwable { @Test public void testCoaslecedWritesAreNotSeen() throws InterruptedException { WriteBehindTestLoaderWriter loaderWriter = new WriteBehindTestLoaderWriter(); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -401,8 +395,7 @@ public void testCoaslecedWritesAreNotSeen() throws InterruptedException { @Test public void testUnBatchedWriteBehindStopWaitsForEmptyQueue() { WriteBehindTestLoaderWriter loaderWriter = new WriteBehindTestLoaderWriter(); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -420,8 +413,7 @@ public void testUnBatchedWriteBehindStopWaitsForEmptyQueue() { @Test public void testBatchedWriteBehindStopWaitsForEmptyQueue() { WriteBehindTestLoaderWriter loaderWriter = new WriteBehindTestLoaderWriter(); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -439,6 +431,7 @@ public void testBatchedWriteBehindStopWaitsForEmptyQueue() { @Test public void testUnBatchedWriteBehindBlocksWhenFull() throws Exception { final Semaphore gate = new Semaphore(0); + @SuppressWarnings("unchecked") CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); doAnswer(new Answer() { @@ -449,8 +442,7 @@ public Object answer(InvocationOnMock invocation) throws Throwable { } }).when(loaderWriter).write(anyString(), anyString()); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -489,6 +481,7 @@ public void run() { } @Test + @SuppressWarnings("unchecked") public void testBatchedWriteBehindBlocksWhenFull() throws Exception { final Semaphore gate = new Semaphore(0); CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); @@ -501,8 +494,7 @@ public Object answer(InvocationOnMock invocation) throws Throwable { } }).when(loaderWriter).writeAll(any(Iterable.class)); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -543,8 +535,7 @@ public void run() { @Test public void testFilledBatchedIsWritten() throws Exception { WriteBehindTestLoaderWriter loaderWriter = new WriteBehindTestLoaderWriter(); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -572,8 +563,7 @@ public void testFilledBatchedIsWritten() throws Exception { @Test public void testAgedBatchedIsWritten() throws Exception { WriteBehindTestLoaderWriter loaderWriter = new WriteBehindTestLoaderWriter(); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -604,6 +594,7 @@ class TestWriteBehindProvider extends WriteBehindProviderFactory.Provider { private WriteBehind writeBehind = null; @Override + @SuppressWarnings("unchecked") public WriteBehind createWriteBehindLoaderWriter(final CacheLoaderWriter cacheLoaderWriter, final WriteBehindConfiguration configuration) { this.writeBehind = super.createWriteBehindLoaderWriter(cacheLoaderWriter, configuration); return writeBehind; @@ -632,4 +623,12 @@ public WriteBehind getWriteBehind() { cacheManager.close(); } } + + @SuppressWarnings("unchecked") + protected CacheLoaderWriterProvider getMockedCacheLoaderWriterProvider(CacheLoaderWriter loaderWriter) { + CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); + when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn(loaderWriter); + return cacheLoaderWriterProvider; + } + } diff --git a/impl/src/test/java/org/ehcache/impl/internal/persistence/CacheManagerDestroyRemovesPersistenceTest.java b/impl/src/test/java/org/ehcache/impl/internal/persistence/CacheManagerDestroyRemovesPersistenceTest.java index 9964a74a2e..cfe907e3d8 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/persistence/CacheManagerDestroyRemovesPersistenceTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/persistence/CacheManagerDestroyRemovesPersistenceTest.java @@ -16,29 +16,32 @@ package org.ehcache.impl.internal.persistence; import org.ehcache.Cache; +import org.ehcache.CachePersistenceException; import org.ehcache.PersistentCacheManager; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; -import org.ehcache.CachePersistenceException; +import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; import org.junit.Test; import java.io.File; import java.net.URISyntaxException; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.hamcrest.core.Is.is; +import static org.ehcache.impl.internal.util.FileExistenceMatchers.containsCacheDirectory; +import static org.ehcache.impl.internal.util.FileExistenceMatchers.isLocked; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertThat; /** - * @author rism + * */ public class CacheManagerDestroyRemovesPersistenceTest { - PersistentCacheManager persistentCacheManager; + public static final String PERSISTENT_CACHE = "persistent-cache"; + private PersistentCacheManager persistentCacheManager; @Test public void testDestroyRemovesPersistenceData () throws URISyntaxException, CachePersistenceException { @@ -50,7 +53,7 @@ public void testDestroyRemovesPersistenceData () throws URISyntaxException, Cach persistentCacheManager.close(); persistentCacheManager.destroy(); - assertThat(file.list().length, is(0)); + assertThat(file, not(isLocked())); } @Test @@ -58,9 +61,9 @@ public void testDestroyCacheDestroysPersistenceContext() throws URISyntaxExcepti File file = new File(getStoragePath(), "testDestroy"); initCacheManager(file); - persistentCacheManager.destroyCache("persistent-cache"); + persistentCacheManager.destroyCache(PERSISTENT_CACHE); - assertThat(file.list().length, is(1)); + assertThat(file, not(containsCacheDirectory(PERSISTENT_CACHE))); } @Test @@ -68,15 +71,15 @@ public void testCreateCacheWithSameAliasAfterDestroy() throws URISyntaxException File file = new File(getStoragePath(), "testDestroy"); initCacheManager(file); - persistentCacheManager.destroyCache("persistent-cache"); + persistentCacheManager.destroyCache(PERSISTENT_CACHE); - persistentCacheManager.createCache("persistent-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + persistentCacheManager.createCache(PERSISTENT_CACHE, CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, newResourcePoolsBuilder() .heap(10, EntryUnit.ENTRIES) .disk(10L, MemoryUnit.MB, true)) .build()); - assertNotNull(persistentCacheManager.getCache("persistent-cache", Long.class, String.class)); + assertNotNull(persistentCacheManager.getCache(PERSISTENT_CACHE, Long.class, String.class)); persistentCacheManager.close(); } @@ -86,7 +89,7 @@ public void testDestroyCacheWithUnknownAlias() throws URISyntaxException, CacheP File file = new File(getStoragePath(), "testDestroyUnknownAlias"); initCacheManager(file); - Cache cache = persistentCacheManager.getCache("persistent-cache", Long.class, String.class); + Cache cache = persistentCacheManager.getCache(PERSISTENT_CACHE, Long.class, String.class); cache.put(1L, "One"); @@ -95,17 +98,15 @@ public void testDestroyCacheWithUnknownAlias() throws URISyntaxException, CacheP PersistentCacheManager anotherPersistentCacheManager = CacheManagerBuilder.newCacheManagerBuilder() .with(new CacheManagerPersistenceConfiguration(file)).build(true); - anotherPersistentCacheManager.destroyCache("persistent-cache"); - - assertThat(file.list().length, is(1)); + anotherPersistentCacheManager.destroyCache(PERSISTENT_CACHE); + assertThat(file, not(containsCacheDirectory(PERSISTENT_CACHE))); } - - public void initCacheManager(File file) throws URISyntaxException { + private void initCacheManager(File file) throws URISyntaxException { persistentCacheManager = CacheManagerBuilder.newCacheManagerBuilder() .with(new CacheManagerPersistenceConfiguration(file)) - .withCache("persistent-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + .withCache(PERSISTENT_CACHE, CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, newResourcePoolsBuilder() .heap(10, EntryUnit.ENTRIES) .disk(10L, MemoryUnit.MB, true)) @@ -115,11 +116,12 @@ public void initCacheManager(File file) throws URISyntaxException { private void putValuesInCacheAndCloseCacheManager() { Cache preConfigured = - persistentCacheManager.getCache("persistent-cache", Long.class, String.class); - preConfigured.put(1l, "foo"); + persistentCacheManager.getCache(PERSISTENT_CACHE, Long.class, String.class); + preConfigured.put(1L, "foo"); persistentCacheManager.close(); } + @SuppressWarnings("ConstantConditions") private String getStoragePath() throws URISyntaxException { return getClass().getClassLoader().getResource(".").toURI().getPath(); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/persistence/TestLocalPersistenceService.java b/impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java similarity index 62% rename from impl/src/test/java/org/ehcache/impl/internal/persistence/TestLocalPersistenceService.java rename to impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java index 156d2a8bb2..2d4be23fb0 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/persistence/TestLocalPersistenceService.java +++ b/impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java @@ -16,58 +16,68 @@ package org.ehcache.impl.internal.persistence; -import java.io.File; -import java.util.Collection; +import org.ehcache.CachePersistenceException; import org.ehcache.config.CacheConfiguration; - -import org.ehcache.config.ResourcePool; import org.ehcache.config.ResourceType; +import org.ehcache.core.spi.service.DiskResourceService; +import org.ehcache.core.spi.service.FileBasedPersistenceContext; +import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; -import org.ehcache.CachePersistenceException; +import org.ehcache.impl.persistence.DefaultDiskResourceService; import org.ehcache.impl.persistence.DefaultLocalPersistenceService; -import org.ehcache.spi.persistence.PersistableResourceService; import org.ehcache.spi.persistence.StateRepository; -import org.ehcache.spi.service.ServiceProvider; -import org.ehcache.core.spi.service.FileBasedPersistenceContext; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.spi.service.MaintainableService; import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceConfiguration; +import org.ehcache.spi.service.ServiceProvider; import org.junit.rules.ExternalResource; import org.junit.rules.TemporaryFolder; +import org.mockito.Mockito; + +import java.io.File; + +import static org.mockito.Mockito.mock; /** * * @author cdennis */ -public class TestLocalPersistenceService extends ExternalResource implements LocalPersistenceService { +public class TestDiskResourceService extends ExternalResource implements DiskResourceService { private final TemporaryFolder folder; - private LocalPersistenceService persistenceService; + private LocalPersistenceService fileService; + private DiskResourceService diskResourceService; - public TestLocalPersistenceService(File folder) { + public TestDiskResourceService(File folder) { this.folder = new TemporaryFolder(folder); } - public TestLocalPersistenceService() { + public TestDiskResourceService() { this.folder = new TemporaryFolder(); } @Override protected void before() throws Throwable { folder.create(); - persistenceService = new DefaultLocalPersistenceService(new CacheManagerPersistenceConfiguration(folder.newFolder())); - persistenceService.start(null); + fileService = new DefaultLocalPersistenceService(new CacheManagerPersistenceConfiguration(folder.newFolder())); + fileService.start(null); + diskResourceService = new DefaultDiskResourceService(); + @SuppressWarnings("unchecked") + ServiceProvider sp = mock(ServiceProvider.class); + Mockito.when(sp.getService(LocalPersistenceService.class)).thenReturn(fileService); + diskResourceService.start(sp); } @Override protected void after() { - LocalPersistenceService ps = persistenceService; - persistenceService = null; + DiskResourceService ps = diskResourceService; + LocalPersistenceService ls = fileService; + diskResourceService = null; + fileService = null; try { ps.stop(); + ls.stop(); } finally { folder.delete(); } @@ -75,37 +85,37 @@ protected void after() { @Override public boolean handlesResourceType(ResourceType resourceType) { - return persistenceService.handlesResourceType(resourceType); + return diskResourceService.handlesResourceType(resourceType); } @Override public PersistenceSpaceIdentifier getPersistenceSpaceIdentifier(String name, CacheConfiguration config) throws CachePersistenceException { - return persistenceService.getPersistenceSpaceIdentifier(name, config); + return diskResourceService.getPersistenceSpaceIdentifier(name, config); } @Override public void releasePersistenceSpaceIdentifier(PersistenceSpaceIdentifier identifier) throws CachePersistenceException { - persistenceService.releasePersistenceSpaceIdentifier(identifier); + diskResourceService.releasePersistenceSpaceIdentifier(identifier); } @Override public StateRepository getStateRepositoryWithin(PersistenceSpaceIdentifier identifier, String name) throws CachePersistenceException { - return persistenceService.getStateRepositoryWithin(identifier, name); + return diskResourceService.getStateRepositoryWithin(identifier, name); } @Override public void destroy(String name) throws CachePersistenceException { - persistenceService.destroy(name); + diskResourceService.destroy(name); } @Override public FileBasedPersistenceContext createPersistenceContextWithin(PersistenceSpaceIdentifier identifier, String name) throws CachePersistenceException { - return persistenceService.createPersistenceContextWithin(identifier, name); + return diskResourceService.createPersistenceContextWithin(identifier, name); } @Override public void destroyAll() throws CachePersistenceException { - persistenceService.destroyAll(); + diskResourceService.destroyAll(); } @Override @@ -114,7 +124,7 @@ public void start(ServiceProvider serviceProvider) { } @Override - public void startForMaintenance(ServiceProvider serviceProvider) { + public void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope) { //ignore } diff --git a/impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineTest.java b/impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineTest.java index 6aa58a31d7..b9aae5f351 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineTest.java @@ -37,7 +37,10 @@ public class DefaultSizeOfEngineTest { public void testMaxObjectGraphSizeExceededException() { SizeOfEngine sizeOfEngine = new DefaultSizeOfEngine(3, Long.MAX_VALUE); try { - sizeOfEngine.sizeof(new MaxDepthGreaterThanThree(), new CopiedOnHeapValueHolder(new MaxDepthGreaterThanThree(), 0l, true, new IdentityCopier())); + @SuppressWarnings("unchecked") + IdentityCopier valueCopier = new IdentityCopier(); + sizeOfEngine.sizeof(new MaxDepthGreaterThanThree(), + new CopiedOnHeapValueHolder(new MaxDepthGreaterThanThree(), 0L, true, valueCopier)); fail(); } catch (Exception limitExceededException) { assertThat(limitExceededException, instanceOf(LimitExceededException.class)); @@ -49,7 +52,9 @@ public void testMaxObjectSizeExceededException() { SizeOfEngine sizeOfEngine = new DefaultSizeOfEngine(Long.MAX_VALUE, 1000); try { String overSized = new String(new byte[1000]); - sizeOfEngine.sizeof(overSized, new CopiedOnHeapValueHolder("test", 0l, true, new IdentityCopier())); + @SuppressWarnings("unchecked") + IdentityCopier valueCopier = new IdentityCopier(); + sizeOfEngine.sizeof(overSized, new CopiedOnHeapValueHolder("test", 0L, true, valueCopier)); fail(); } catch (Exception limitExceededException) { assertThat(limitExceededException, instanceOf(LimitExceededException.class)); diff --git a/impl/src/test/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderTest.java index b9cda2869a..2e6e39aecc 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderTest.java @@ -42,6 +42,7 @@ public class DefaultCopyProviderTest { public void testCreateKeyCopierWithCustomCopierConfig() { DefaultCopyProvider provider = new DefaultCopyProvider(null); + @SuppressWarnings("unchecked") DefaultCopierConfiguration config = new DefaultCopierConfiguration( (Class)TestCopier.class, DefaultCopierConfiguration.Type.KEY); @@ -59,15 +60,18 @@ public void testCreateKeyCopierWithoutConfig() { public void testCreateKeyCopierWithSerializer() { DefaultCopyProvider copyProvider = new DefaultCopyProvider(null); DefaultCopierConfiguration config = new DefaultCopierConfiguration( - (Class)SerializingCopier.class, DefaultCopierConfiguration.Type.KEY); + SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY); - assertThat(copyProvider.createKeyCopier(Long.class, mock(Serializer.class), config), instanceOf(SerializingCopier.class)); + @SuppressWarnings("unchecked") + Serializer serializer = mock(Serializer.class); + assertThat(copyProvider.createKeyCopier(Long.class, serializer, config), instanceOf(SerializingCopier.class)); } @Test public void testCreateValueCopierWithCustomCopierConfig() { DefaultCopyProvider provider = new DefaultCopyProvider(null); + @SuppressWarnings("unchecked") DefaultCopierConfiguration config = new DefaultCopierConfiguration( (Class)TestCopier.class, DefaultCopierConfiguration.Type.VALUE); @@ -85,9 +89,11 @@ public void testCreateValueCopierWithoutConfig() { public void testCreateValueCopierWithSerializer() { DefaultCopyProvider copyProvider = new DefaultCopyProvider(null); DefaultCopierConfiguration config = new DefaultCopierConfiguration( - (Class)SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE); + SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE); - assertThat(copyProvider.createValueCopier(Long.class, mock(Serializer.class), config), instanceOf(SerializingCopier.class)); + @SuppressWarnings("unchecked") + Serializer serializer = mock(Serializer.class); + assertThat(copyProvider.createValueCopier(Long.class, serializer, config), instanceOf(SerializingCopier.class)); } @Test @@ -96,7 +102,9 @@ public void testUserProvidedCloseableCopierInstanceDoesNotCloseOnRelease() throw TestCloseableCopier testCloseableCopier = new TestCloseableCopier(); DefaultCopierConfiguration config = new DefaultCopierConfiguration(testCloseableCopier, DefaultCopierConfiguration.Type.KEY); - assertThat(copyProvider.createKeyCopier(Long.class, mock(Serializer.class), config), sameInstance((Copier)testCloseableCopier)); + @SuppressWarnings("unchecked") + Serializer serializer = mock(Serializer.class); + assertThat(copyProvider.createKeyCopier(Long.class, serializer, config), sameInstance((Copier)testCloseableCopier)); copyProvider.releaseCopier(testCloseableCopier); @@ -130,4 +138,4 @@ public T copy(final T obj) { return obj; } } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProviderTest.java index 7b99a844ad..5eff70c206 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProviderTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProviderTest.java @@ -86,8 +86,8 @@ public String toString() { }; @Override - public void onEvent(CacheEvent event) { + public void onEvent(CacheEvent event) { //noop } } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProviderTest.java index 43f477e500..386b9dd704 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProviderTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProviderTest.java @@ -24,6 +24,7 @@ import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterProviderConfiguration; +import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.service.ServiceConfiguration; @@ -108,13 +109,17 @@ public void testCreationConfigurationPreservedAfterStopStart() { configuration.addLoaderFor("cache", MyLoader.class); DefaultCacheLoaderWriterProvider loaderWriterProvider = new DefaultCacheLoaderWriterProvider(configuration); - loaderWriterProvider.start(mock(ServiceProvider.class)); - assertThat(loaderWriterProvider.createCacheLoaderWriter("cache", mock(CacheConfiguration.class)), CoreMatchers.instanceOf(MyLoader.class)); + @SuppressWarnings("unchecked") + ServiceProvider serviceProvider = mock(ServiceProvider.class); + loaderWriterProvider.start(serviceProvider); + @SuppressWarnings("unchecked") + CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); + assertThat(loaderWriterProvider.createCacheLoaderWriter("cache", cacheConfiguration), CoreMatchers.instanceOf(MyLoader.class)); loaderWriterProvider.stop(); - loaderWriterProvider.start(mock(ServiceProvider.class)); + loaderWriterProvider.start(serviceProvider); - assertThat(loaderWriterProvider.createCacheLoaderWriter("cache", mock(CacheConfiguration.class)), CoreMatchers.instanceOf(MyLoader.class)); + assertThat(loaderWriterProvider.createCacheLoaderWriter("cache", cacheConfiguration), CoreMatchers.instanceOf(MyLoader.class)); } public static class MyLoader implements CacheLoaderWriter { @@ -140,7 +145,7 @@ public Map loadAll(final Iterable keys) throws Exception { @Override public void write(final Object key, final Object value) throws Exception { - this.lastWritten = value; + lastWritten = value; } @Override @@ -177,8 +182,8 @@ public Object load(final Object key) throws Exception { @Override public void write(final Object key, final Object value) throws Exception { - this.lastWritten = value; + lastWritten = value; } } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java index da0ed8b45d..f168ff2ef8 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java @@ -16,30 +16,30 @@ package org.ehcache.impl.internal.spi.serialization; import org.ehcache.CachePersistenceException; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.core.spi.service.FileBasedPersistenceContext; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration; import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration; -import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; import org.ehcache.impl.serialization.ByteArraySerializer; import org.ehcache.impl.serialization.CharSerializer; import org.ehcache.impl.serialization.CompactJavaSerializer; -import org.ehcache.impl.serialization.CompactPersistentJavaSerializer; import org.ehcache.impl.serialization.DoubleSerializer; import org.ehcache.impl.serialization.FloatSerializer; import org.ehcache.impl.serialization.IntegerSerializer; import org.ehcache.impl.serialization.LongSerializer; -import org.ehcache.impl.serialization.PlainJavaSerializer; import org.ehcache.impl.serialization.StringSerializer; import org.ehcache.spi.persistence.PersistableResourceService; import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.serialization.SerializerException; +import org.ehcache.spi.serialization.StatefulSerializer; import org.ehcache.spi.serialization.UnsupportedTypeException; +import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceProvider; import org.hamcrest.Matchers; import org.junit.Rule; import org.junit.Test; +import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; import java.io.Closeable; @@ -61,8 +61,6 @@ import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** @@ -73,6 +71,9 @@ public class DefaultSerializationProviderTest { @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); + @Rule + public ExpectedException expectedException = ExpectedException.none(); + @Test public void testCreateSerializerNoConfig() throws Exception { DefaultSerializationProviderConfiguration dspfConfig = new DefaultSerializationProviderConfiguration(); @@ -94,7 +95,8 @@ public void testCreateSerializerWithConfig() throws Exception { DefaultSerializationProvider dsp = new DefaultSerializationProvider(dspfConfig); dsp.start(providerContaining()); - DefaultSerializerConfiguration dspConfig = new DefaultSerializerConfiguration((Class) TestSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + @SuppressWarnings("unchecked") + DefaultSerializerConfiguration dspConfig = new DefaultSerializerConfiguration(getSerializerClass(), DefaultSerializerConfiguration.Type.VALUE); assertThat(dsp.createValueSerializer(String.class, ClassLoader.getSystemClassLoader(), dspConfig), instanceOf(TestSerializer.class)); assertThat(dsp.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), dspConfig), instanceOf(TestSerializer.class)); @@ -103,7 +105,8 @@ public void testCreateSerializerWithConfig() throws Exception { @Test public void testCreateSerializerWithFactoryConfig() throws Exception { DefaultSerializationProviderConfiguration dspfConfig = new DefaultSerializationProviderConfiguration(); - dspfConfig.addSerializerFor(Long.class, (Class) TestSerializer.class); + Class> serializerClass = getSerializerClass(); + dspfConfig.addSerializerFor(Long.class, serializerClass); DefaultSerializationProvider dsp = new DefaultSerializationProvider(dspfConfig); dsp.start(providerContaining()); @@ -111,10 +114,16 @@ public void testCreateSerializerWithFactoryConfig() throws Exception { assertThat(dsp.createValueSerializer(HashMap.class, ClassLoader.getSystemClassLoader()), instanceOf(CompactJavaSerializer.class)); } + @SuppressWarnings("unchecked") + private Class> getSerializerClass() { + return (Class) TestSerializer.class; + } + @Test public void testCreateTransientSerializers() throws Exception { DefaultSerializationProviderConfiguration dspfConfig = new DefaultSerializationProviderConfiguration(); - dspfConfig.addSerializerFor(String.class, (Class) TestSerializer.class); + Class> serializerClass = getSerializerClass(); + dspfConfig.addSerializerFor(String.class, serializerClass); DefaultSerializationProvider dsp = new DefaultSerializationProvider(dspfConfig); dsp.start(providerContaining()); @@ -127,7 +136,8 @@ public void testCreateTransientSerializers() throws Exception { @Test public void tesCreateTransientSerializersWithOverriddenSerializableType() throws Exception { DefaultSerializationProviderConfiguration dspfConfig = new DefaultSerializationProviderConfiguration(); - dspfConfig.addSerializerFor(Serializable.class, (Class) TestSerializer.class); + Class> serializerClass = getSerializerClass(); + dspfConfig.addSerializerFor(Serializable.class, serializerClass); DefaultSerializationProvider dsp = new DefaultSerializationProvider(dspfConfig); dsp.start(providerContaining()); @@ -140,12 +150,15 @@ public void tesCreateTransientSerializersWithOverriddenSerializableType() throws @Test public void testRemembersCreationConfigurationAfterStopStart() throws UnsupportedTypeException { DefaultSerializationProviderConfiguration configuration = new DefaultSerializationProviderConfiguration(); - configuration.addSerializerFor(String.class, (Class) TestSerializer.class); + Class> serializerClass = getSerializerClass(); + configuration.addSerializerFor(String.class, serializerClass); DefaultSerializationProvider serializationProvider = new DefaultSerializationProvider(configuration); - serializationProvider.start(mock(ServiceProvider.class)); + @SuppressWarnings("unchecked") + ServiceProvider serviceProvider = mock(ServiceProvider.class); + serializationProvider.start(serviceProvider); assertThat(serializationProvider.createKeySerializer(String.class, getSystemClassLoader()), instanceOf(TestSerializer.class)); serializationProvider.stop(); - serializationProvider.start(mock(ServiceProvider.class)); + serializationProvider.start(serviceProvider); assertThat(serializationProvider.createKeySerializer(String.class, getSystemClassLoader()), instanceOf(TestSerializer.class)); } @@ -161,7 +174,9 @@ public void testReleaseSerializerWithProvidedCloseableSerializerDoesNotClose() t @Test public void testReleaseSerializerWithInstantiatedCloseableSerializerDoesClose() throws Exception { - DefaultSerializerConfiguration config = new DefaultSerializerConfiguration(CloseableSerializer.class, DefaultSerializerConfiguration.Type.KEY); + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) CloseableSerializer.class; + DefaultSerializerConfiguration config = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.KEY); DefaultSerializationProvider provider = new DefaultSerializationProvider(null); Serializer serializer = provider.createKeySerializer(String.class, getSystemClassLoader(), config); @@ -190,8 +205,9 @@ public void testReleaseSameInstanceMultipleTimesThrows() throws Exception { @Test public void testCreateKeySerializerWithActualInstanceInServiceConfig() throws Exception { DefaultSerializationProvider provider = new DefaultSerializationProvider(null); - TestSerializer serializer = mock(TestSerializer.class); - DefaultSerializerConfiguration config = new DefaultSerializerConfiguration(serializer, DefaultSerializerConfiguration.Type.KEY); + @SuppressWarnings("unchecked") + TestSerializer serializer = mock(TestSerializer.class); + DefaultSerializerConfiguration config = new DefaultSerializerConfiguration(serializer, DefaultSerializerConfiguration.Type.KEY); Serializer created = provider.createKeySerializer(TestSerializer.class, getSystemClassLoader(), config); assertSame(serializer, created); } @@ -199,8 +215,9 @@ public void testCreateKeySerializerWithActualInstanceInServiceConfig() throws Ex @Test public void testSameInstanceRetrievedMultipleTimesUpdatesTheProvidedCount() throws Exception { DefaultSerializationProvider provider = new DefaultSerializationProvider(null); - TestSerializer serializer = mock(TestSerializer.class); - DefaultSerializerConfiguration config = new DefaultSerializerConfiguration(serializer, DefaultSerializerConfiguration.Type.KEY); + @SuppressWarnings("unchecked") + TestSerializer serializer = mock(TestSerializer.class); + DefaultSerializerConfiguration config = new DefaultSerializerConfiguration(serializer, DefaultSerializerConfiguration.Type.KEY); Serializer created = provider.createKeySerializer(TestSerializer.class, getSystemClassLoader(), config); assertSame(serializer, created); @@ -291,21 +308,252 @@ public void testDefaultByteArraySerializer() throws Exception { assertThat(keySerializer, instanceOf(ByteArraySerializer.class)); } + @Test + public void testCreateTransientSerializerWithoutConstructor() throws Exception { + expectedException.expect(RuntimeException.class); + expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) BaseSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); + } + + @Test + public void testCreatePersistentSerializerWithoutConstructor() throws Exception { + expectedException.expect(RuntimeException.class); + expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) BaseSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); + } + + @Test + public void testCreateTransientStatefulSerializerWithoutConstructor() throws Exception { + expectedException.expect(RuntimeException.class); + expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) StatefulBaseSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); + } + + @Test + public void testCreatePersistentStatefulSerializerWithoutConstructor() throws Exception { + expectedException.expect(RuntimeException.class); + expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) StatefulBaseSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); + } + + @Test + public void testCreateTransientMinimalSerializer() throws Exception { + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + MinimalSerializer.baseConstructorInvoked = false; + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) MinimalSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); + Serializer valueSerializer = + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); + assertThat(valueSerializer, instanceOf(MinimalSerializer.class)); + assertThat(MinimalSerializer.baseConstructorInvoked, is(true)); + } + + @Test + public void testCreatePersistentMinimalSerializer() throws Exception { + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + MinimalSerializer.baseConstructorInvoked = false; + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) MinimalSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); + Serializer valueSerializer = + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); + assertThat(valueSerializer, instanceOf(MinimalSerializer.class)); + assertThat(MinimalSerializer.baseConstructorInvoked, is(true)); + } + + @Test + public void testTransientMinimalStatefulSerializer() throws Exception { + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + MinimalStatefulSerializer.baseConstructorInvoked = false; + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) MinimalStatefulSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); + Serializer valueSerializer = + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); + assertThat(valueSerializer, instanceOf(MinimalStatefulSerializer.class)); + assertThat(MinimalStatefulSerializer.baseConstructorInvoked, is(true)); + } + + @Test + public void testPersistentMinimalStatefulSerializer() throws Exception { + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + MinimalStatefulSerializer.baseConstructorInvoked = false; + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) MinimalStatefulSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); + Serializer valueSerializer = + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); + assertThat(valueSerializer, instanceOf(MinimalStatefulSerializer.class)); + assertThat(MinimalStatefulSerializer.baseConstructorInvoked, is(true)); + } + + @Test + public void testTransientLegacySerializer() throws Exception { + expectedException.expect(RuntimeException.class); + expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) LegacySerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); + } + + @Test + public void testPersistentLegacySerializer() throws Exception { + DefaultSerializationProvider provider = getStartedProvider(); + + LegacySerializer.legacyConstructorInvoked = false; + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) LegacySerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); + expectedException.expect(RuntimeException.class); + expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); + Serializer valueSerializer = + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); + assertThat(valueSerializer, instanceOf(LegacySerializer.class)); + assertThat(LegacySerializer.legacyConstructorInvoked, is(true)); + } + + @Test + public void testTransientLegacyComboSerializer() throws Exception { + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + LegacyComboSerializer.baseConstructorInvoked = false; + LegacyComboSerializer.legacyConstructorInvoked = false; + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) LegacyComboSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); + Serializer valueSerializer = + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); + assertThat(valueSerializer, instanceOf(LegacyComboSerializer.class)); + assertThat(LegacyComboSerializer.baseConstructorInvoked, is(true)); + assertThat(LegacyComboSerializer.legacyConstructorInvoked, is(false)); + } + + @Test + public void testPersistentLegacyComboSerializer() throws Exception { + DefaultSerializationProvider provider = getStartedProvider(); + + LegacyComboSerializer.baseConstructorInvoked = false; + LegacyComboSerializer.legacyConstructorInvoked = false; + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) LegacyComboSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); + Serializer valueSerializer = + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); + assertThat(valueSerializer, instanceOf(LegacyComboSerializer.class)); + assertThat(LegacyComboSerializer.baseConstructorInvoked, is(true)); + assertThat(LegacyComboSerializer.legacyConstructorInvoked, is(false)); + } + + @Test + public void testCreateTransientStatefulLegacySerializer() throws Exception { + expectedException.expect(RuntimeException.class); + expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) StatefulLegacySerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); + } + + @Test + public void testCreatePersistentStatefulLegacySerializer() throws Exception { + expectedException.expect(RuntimeException.class); + expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) StatefulLegacySerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); + } + + @Test + public void testTransientStatefulLegacyComboSerializer() throws Exception { + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + StatefulLegacyComboSerializer.baseConstructorInvoked = false; + StatefulLegacyComboSerializer.legacyConstructorInvoked = false; + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) StatefulLegacyComboSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); + Serializer valueSerializer = + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); + assertThat(valueSerializer, instanceOf(StatefulLegacyComboSerializer.class)); + assertThat(StatefulLegacyComboSerializer.baseConstructorInvoked, is(true)); + assertThat(StatefulLegacyComboSerializer.legacyConstructorInvoked, is(false)); + } + + @Test + public void testPersistentStatefulLegacyComboSerializer() throws Exception { + DefaultSerializationProvider provider = getStartedProvider(); + + StatefulLegacyComboSerializer.baseConstructorInvoked = false; + StatefulLegacyComboSerializer.legacyConstructorInvoked = false; + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) StatefulLegacyComboSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); + Serializer valueSerializer = + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); + assertThat(valueSerializer, instanceOf(StatefulLegacyComboSerializer.class)); + assertThat(StatefulLegacyComboSerializer.baseConstructorInvoked, is(true)); + assertThat(StatefulLegacyComboSerializer.legacyConstructorInvoked, is(false)); + } + private PersistableResourceService.PersistenceSpaceIdentifier getPersistenceSpaceIdentifierMock() { - PersistableResourceService.PersistenceSpaceIdentifier spaceIdentifier = mock(LocalPersistenceService.PersistenceSpaceIdentifier.class); - when(spaceIdentifier.getServiceType()).thenReturn(LocalPersistenceService.class); + PersistableResourceService.PersistenceSpaceIdentifier spaceIdentifier = mock(DiskResourceService.PersistenceSpaceIdentifier.class); + when(spaceIdentifier.getServiceType()).thenReturn(DiskResourceService.class); return spaceIdentifier; } private DefaultSerializationProvider getStartedProvider() throws CachePersistenceException { DefaultSerializationProvider defaultProvider = new DefaultSerializationProvider(null); - ServiceProvider serviceProvider = mock(ServiceProvider.class); - LocalPersistenceService persistenceService = mock(LocalPersistenceService.class); - StateRepository stateRepository = mock(StateRepository.class); - when(stateRepository.getPersistentConcurrentMap(any(String.class), any(Class.class), any(Class.class))).thenReturn(new ConcurrentHashMap()); - when(persistenceService.getStateRepositoryWithin(any(PersistableResourceService.PersistenceSpaceIdentifier.class), any(String.class))).thenReturn(stateRepository); - when(persistenceService.createPersistenceContextWithin(any(PersistableResourceService.PersistenceSpaceIdentifier.class), anyString())) + @SuppressWarnings("unchecked") + ServiceProvider serviceProvider = mock(ServiceProvider.class); + DiskResourceService diskResourceService = mock(DiskResourceService.class); + when(diskResourceService.createPersistenceContextWithin(any(PersistableResourceService.PersistenceSpaceIdentifier.class), anyString())) .thenReturn(new FileBasedPersistenceContext() { @Override public File getDirectory() { @@ -317,7 +565,7 @@ public File getDirectory() { } } }); - when(serviceProvider.getService(LocalPersistenceService.class)).thenReturn(persistenceService); + when(serviceProvider.getService(DiskResourceService.class)).thenReturn(diskResourceService); defaultProvider.start(serviceProvider); return defaultProvider; } @@ -339,7 +587,7 @@ public boolean equals(T object, ByteBuffer binary) { } } - public static class CloseableSerializer implements Serializer, Closeable { + public static class CloseableSerializer implements Serializer, Closeable { boolean closed = false; @@ -362,7 +610,7 @@ public ByteBuffer serialize(Object object) throws SerializerException { } @Override - public Object read(ByteBuffer binary) throws ClassNotFoundException, SerializerException { + public T read(ByteBuffer binary) throws ClassNotFoundException, SerializerException { return null; } @@ -371,4 +619,103 @@ public boolean equals(Object object, ByteBuffer binary) throws ClassNotFoundExce return false; } } + + public static class BaseSerializer implements Serializer { + + @Override + public ByteBuffer serialize(final T object) throws SerializerException { + return null; + } + + @Override + public T read(final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + return null; + } + + @Override + public boolean equals(final T object, final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + return false; + } + } + + public static class MinimalSerializer extends BaseSerializer { + + private static boolean baseConstructorInvoked = false; + + public MinimalSerializer(ClassLoader loader) { + baseConstructorInvoked = true; + } + + } + + //Stateful but no constructor + public static class StatefulBaseSerializer extends BaseSerializer implements StatefulSerializer { + + @Override + public void init(final StateRepository stateRepository) { + } + } + + public static class MinimalStatefulSerializer extends BaseSerializer implements StatefulSerializer { + + private static boolean baseConstructorInvoked = false; + + public MinimalStatefulSerializer(ClassLoader loader) { + baseConstructorInvoked = true; + } + + @Override + public void init(final StateRepository stateRepository) { + } + } + + public static class LegacySerializer extends BaseSerializer { + + private static boolean legacyConstructorInvoked = false; + + public LegacySerializer(ClassLoader loader, FileBasedPersistenceContext context) { + legacyConstructorInvoked = true; + } + } + + public static class LegacyComboSerializer extends BaseSerializer { + + private static boolean baseConstructorInvoked = false; + private static boolean legacyConstructorInvoked = false; + + public LegacyComboSerializer(ClassLoader loader) { + baseConstructorInvoked = true; + } + + public LegacyComboSerializer(ClassLoader loader, FileBasedPersistenceContext context) { + legacyConstructorInvoked = true; + } + } + + public static class StatefulLegacySerializer extends StatefulBaseSerializer { + + private static boolean legacyConstructorInvoked = false; + + public StatefulLegacySerializer(ClassLoader loader, FileBasedPersistenceContext context) { + legacyConstructorInvoked = true; + } + } + + public static class StatefulLegacyComboSerializer extends BaseSerializer implements StatefulSerializer { + + private static boolean baseConstructorInvoked = false; + private static boolean legacyConstructorInvoked = false; + + public StatefulLegacyComboSerializer(final ClassLoader loader) { + baseConstructorInvoked = true; + } + + public StatefulLegacyComboSerializer(ClassLoader loader, FileBasedPersistenceContext context) { + legacyConstructorInvoked = true; + } + + @Override + public void init(final StateRepository stateRepository) { + } + } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/EhcachePersistentConcurrentOffHeapClockCacheTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/disk/EhcachePersistentConcurrentOffHeapClockCacheTest.java index e3c848ca8c..9fd728fb5f 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/EhcachePersistentConcurrentOffHeapClockCacheTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/disk/EhcachePersistentConcurrentOffHeapClockCacheTest.java @@ -38,6 +38,7 @@ import java.io.IOException; +import static org.ehcache.config.Eviction.noAdvice; import static org.ehcache.impl.internal.store.disk.OffHeapDiskStore.persistent; import static org.ehcache.impl.internal.spi.TestServiceProvider.providerContaining; import static org.mockito.Mockito.mock; @@ -49,11 +50,13 @@ public class EhcachePersistentConcurrentOffHeapClockCacheTest extends AbstractEh public final TemporaryFolder folder = new TemporaryFolder(); @Override + @SuppressWarnings("unchecked") protected EhcachePersistentConcurrentOffHeapClockCache createTestSegment() throws IOException { - return createTestSegment(Eviction.noAdvice(), mock(EvictionListener.class)); + return createTestSegment(noAdvice(), mock(EvictionListener.class)); } @Override + @SuppressWarnings("unchecked") protected EhcacheOffHeapBackingMap createTestSegment(EvictionAdvisor evictionPredicate) throws IOException { return createTestSegment(evictionPredicate, mock(EvictionListener.class)); } @@ -114,4 +117,4 @@ protected boolean isPinned(String key, EhcacheOffHeapBackingMap protected int getMetadata(String key, int mask, EhcacheOffHeapBackingMap segment) { return ((EhcachePersistentConcurrentOffHeapClockCache) segment).getMetadata(key, mask); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java index 6c8af6efe3..6aba43f141 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java @@ -25,13 +25,14 @@ import org.ehcache.config.SizedResourcePool; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.internal.service.ServiceLocator; -import org.ehcache.core.spi.service.LocalPersistenceService; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.core.spi.store.Store; import org.ehcache.expiry.Expirations; import org.ehcache.expiry.Expiry; import org.ehcache.impl.internal.DefaultTimeSourceService; import org.ehcache.impl.serialization.LongSerializer; import org.ehcache.impl.serialization.StringSerializer; +import org.ehcache.spi.persistence.PersistableResourceService; import org.ehcache.spi.serialization.SerializationProvider; import org.ehcache.spi.serialization.Serializer; import org.junit.Test; @@ -45,7 +46,9 @@ import java.util.Set; import static java.util.Collections.singleton; -import static org.hamcrest.Matchers.is; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.Matchers.empty; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; import static org.terracotta.context.query.Matchers.attributes; @@ -57,36 +60,38 @@ * OffHeapStoreProviderTest */ public class OffHeapDiskStoreProviderTest { + @Test public void testStatisticsAssociations() throws Exception { OffHeapDiskStore.Provider provider = new OffHeapDiskStore.Provider(); - ServiceLocator serviceLocator = new ServiceLocator(mock(SerializationProvider.class), new DefaultTimeSourceService(null), mock(LocalPersistenceService.class)); - + ServiceLocator serviceLocator = dependencySet().with(mock(SerializationProvider.class)) + .with(new DefaultTimeSourceService(null)).with(mock(DiskResourceService.class)).build(); provider.start(serviceLocator); - OffHeapDiskStore store = provider.createStore(getStoreConfig()); + OffHeapDiskStore store = provider.createStore(getStoreConfig(), mock(PersistableResourceService.PersistenceSpaceIdentifier.class)); - Query storeQuery = queryBuilder() - .children() - .filter(context(attributes(Matchers.>allOf( - hasAttribute("tags", new Matcher>() { - @Override - protected boolean matchesSafely(Set object) { - return object.containsAll(singleton("store")); - } - }))))) - .build(); + @SuppressWarnings("unchecked") + Query storeQuery = queryBuilder() + .children() + .filter(context(attributes(Matchers.>allOf( + hasAttribute("tags", new Matcher>() { + @Override + protected boolean matchesSafely(Set object) { + return object.containsAll(singleton("Disk")); + } + }))))) + .build(); Set nodes = singleton(ContextManager.nodeFor(store)); Set storeResult = storeQuery.execute(nodes); - assertThat(storeResult.isEmpty(), is(false)); + assertThat(storeResult, not(empty())); provider.releaseStore(store); storeResult = storeQuery.execute(nodes); - assertThat(storeResult.isEmpty(), is(true)); + assertThat(storeResult, empty()); } private Store.Configuration getStoreConfig() { @@ -120,8 +125,9 @@ public ClassLoader getClassLoader() { public ResourcePools getResourcePools() { return new ResourcePools() { @Override - public ResourcePool getPoolForResource(ResourceType resourceType) { - return new SizedResourcePool() { + @SuppressWarnings("unchecked") + public

P getPoolForResource(ResourceType

resourceType) { + return (P) new SizedResourcePool() { @Override public ResourceType getType() { return ResourceType.Core.DISK; @@ -150,6 +156,7 @@ public void validateUpdate(ResourcePool newPool) { } @Override + @SuppressWarnings("unchecked") public Set> getResourceTypeSet() { return (Set) singleton(ResourceType.Core.OFFHEAP); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreSPITest.java index a2269a9b17..2d9736fc82 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreSPITest.java @@ -21,7 +21,6 @@ import org.ehcache.config.ResourcePools; import org.ehcache.core.internal.store.StoreConfigurationImpl; import org.ehcache.config.SizedResourcePool; -import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.config.units.MemoryUnit; import org.ehcache.CachePersistenceException; import org.ehcache.expiry.Expirations; @@ -29,7 +28,7 @@ import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; import org.ehcache.impl.internal.events.TestStoreEventDispatcher; import org.ehcache.impl.internal.executor.OnDemandExecutionService; -import org.ehcache.impl.internal.persistence.TestLocalPersistenceService; +import org.ehcache.impl.internal.persistence.TestDiskResourceService; import org.ehcache.impl.internal.store.offheap.BasicOffHeapValueHolder; import org.ehcache.impl.internal.store.offheap.OffHeapValueHolder; import org.ehcache.core.spi.time.SystemTimeSource; @@ -56,6 +55,7 @@ import static org.ehcache.config.ResourceType.Core.DISK; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -71,7 +71,7 @@ public class OffHeapDiskStoreSPITest extends AuthoritativeTierSPITest newStore(Long capacity, EvictionAdviso CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); when(cacheConfiguration.getResourcePools()).thenReturn(newResourcePoolsBuilder().disk(1, MemoryUnit.MB, false).build()); String spaceName = "OffheapDiskStore-" + index.getAndIncrement(); - PersistenceSpaceIdentifier space = persistenceService.getPersistenceSpaceIdentifier(spaceName, cacheConfiguration); + PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier(spaceName, cacheConfiguration); ResourcePools resourcePools = getDiskResourcePool(capacity); SizedResourcePool diskPool = resourcePools.getPoolForResource(DISK); MemoryUnit unit = (MemoryUnit)diskPool.getUnit(); @@ -116,7 +116,7 @@ private AuthoritativeTier newStore(Long capacity, EvictionAdviso Store.Configuration config = new StoreConfigurationImpl(getKeyType(), getValueType(), evictionAdvisor, getClass().getClassLoader(), expiry, resourcePools, 0, keySerializer, valueSerializer); OffHeapDiskStore store = new OffHeapDiskStore( - persistenceService.createPersistenceContextWithin(space, "store"), + diskResourceService.createPersistenceContextWithin(space, "store"), new OnDemandExecutionService(), null, 1, config, timeSource, new TestStoreEventDispatcher(), @@ -157,7 +157,7 @@ public ServiceConfiguration[] getServiceConfigurations() { CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); when(cacheConfiguration.getResourcePools()).thenReturn(newResourcePoolsBuilder().disk(1, MemoryUnit.MB, false).build()); String spaceName = "OffheapDiskStore-" + index.getAndIncrement(); - PersistenceSpaceIdentifier space = persistenceService.getPersistenceSpaceIdentifier(spaceName, cacheConfiguration); + PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier(spaceName, cacheConfiguration); return new ServiceConfiguration[] {space}; } catch (CachePersistenceException e) { throw new RuntimeException(e); @@ -166,7 +166,7 @@ public ServiceConfiguration[] getServiceConfigurations() { @Override public ServiceLocator getServiceProvider() { - ServiceLocator serviceLocator = new ServiceLocator(); + ServiceLocator serviceLocator = dependencySet().build(); try { serviceLocator.startAllServices(); } catch (Exception e) { @@ -191,12 +191,12 @@ public String createValue(long seed) { public void close(final Store store) { String spaceName = createdStores.get(store); try { - OffHeapDiskStore.Provider.close((OffHeapDiskStore)store); + OffHeapDiskStore.Provider.close((OffHeapDiskStore)store); } catch (IOException ex) { throw new RuntimeException(ex); } try { - persistenceService.destroy(spaceName); + diskResourceService.destroy(spaceName); } catch (CachePersistenceException ex) { throw new AssertionError(ex); } finally { @@ -210,11 +210,11 @@ public void close(final Store store) { public void tearDown() throws CachePersistenceException, IOException { try { for (Map.Entry, String> entry : createdStores.entrySet()) { - OffHeapDiskStore.Provider.close((OffHeapDiskStore) entry.getKey()); - persistenceService.destroy(entry.getValue()); + OffHeapDiskStore.Provider.close((OffHeapDiskStore) entry.getKey()); + diskResourceService.destroy(entry.getValue()); } } finally { - persistenceService.stop(); + diskResourceService.stop(); } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java index dc5aedcab6..20d6d08f77 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java @@ -31,7 +31,7 @@ import org.ehcache.expiry.Expiry; import org.ehcache.impl.internal.events.TestStoreEventDispatcher; import org.ehcache.impl.internal.executor.OnDemandExecutionService; -import org.ehcache.impl.internal.persistence.TestLocalPersistenceService; +import org.ehcache.impl.internal.persistence.TestDiskResourceService; import org.ehcache.impl.internal.store.offheap.AbstractOffHeapStore; import org.ehcache.impl.internal.store.offheap.AbstractOffHeapStoreTest; import org.ehcache.impl.internal.spi.serialization.DefaultSerializationProvider; @@ -78,6 +78,7 @@ import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.ehcache.config.units.MemoryUnit.MB; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.ehcache.expiry.Expirations.noExpiration; import static org.ehcache.impl.internal.spi.TestServiceProvider.providerContaining; import static org.hamcrest.CoreMatchers.containsString; @@ -98,7 +99,7 @@ public class OffHeapDiskStoreTest extends AbstractOffHeapStoreTest { public final TemporaryFolder temporaryFolder = new TemporaryFolder(); @Rule - public final TestLocalPersistenceService persistenceService = new TestLocalPersistenceService(); + public final TestDiskResourceService diskResourceService = new TestDiskResourceService(); @Test public void testRecovery() throws StoreAccessException, IOException { @@ -119,16 +120,15 @@ public void testRecovery() throws StoreAccessException, IOException { @Test public void testRecoveryFailureWhenValueTypeChangesToIncompatibleClass() throws Exception { OffHeapDiskStore.Provider provider = new OffHeapDiskStore.Provider(); - ServiceLocator serviceLocator = new ServiceLocator(); - serviceLocator.addService(persistenceService); - serviceLocator.addService(provider); + ServiceLocator serviceLocator = dependencySet().with(diskResourceService).with(provider).build(); serviceLocator.startAllServices(); CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); when(cacheConfiguration.getResourcePools()).thenReturn(newResourcePoolsBuilder().disk(1, MemoryUnit.MB, false).build()); - PersistenceSpaceIdentifier space = persistenceService.getPersistenceSpaceIdentifier("cache", cacheConfiguration); + PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier("cache", cacheConfiguration); { + @SuppressWarnings("unchecked") Store.Configuration storeConfig1 = mock(Store.Configuration.class); when(storeConfig1.getKeyType()).thenReturn(Long.class); when(storeConfig1.getValueType()).thenReturn(String.class); @@ -144,6 +144,7 @@ public void testRecoveryFailureWhenValueTypeChangesToIncompatibleClass() throws } { + @SuppressWarnings("unchecked") Store.Configuration storeConfig2 = mock(Store.Configuration.class); when(storeConfig2.getKeyType()).thenReturn(Long.class); when(storeConfig2.getValueType()).thenReturn(Serializable.class); @@ -169,16 +170,15 @@ public void testRecoveryFailureWhenValueTypeChangesToIncompatibleClass() throws @Test public void testRecoveryWithArrayType() throws Exception { OffHeapDiskStore.Provider provider = new OffHeapDiskStore.Provider(); - ServiceLocator serviceLocator = new ServiceLocator(); - serviceLocator.addService(persistenceService); - serviceLocator.addService(provider); + ServiceLocator serviceLocator = dependencySet().with(diskResourceService).with(provider).build(); serviceLocator.startAllServices(); CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); when(cacheConfiguration.getResourcePools()).thenReturn(newResourcePoolsBuilder().disk(1, MemoryUnit.MB, false).build()); - PersistenceSpaceIdentifier space = persistenceService.getPersistenceSpaceIdentifier("cache", cacheConfiguration); + PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier("cache", cacheConfiguration); { + @SuppressWarnings("unchecked") Store.Configuration storeConfig1 = mock(Store.Configuration.class); when(storeConfig1.getKeyType()).thenReturn(Long.class); when(storeConfig1.getValueType()).thenReturn(Object[].class); @@ -194,6 +194,7 @@ public void testRecoveryWithArrayType() throws Exception { } { + @SuppressWarnings("unchecked") Store.Configuration storeConfig2 = mock(Store.Configuration.class); when(storeConfig2.getKeyType()).thenReturn(Long.class); when(storeConfig2.getValueType()).thenReturn(Object[].class); @@ -215,7 +216,7 @@ public void testRecoveryWithArrayType() throws Exception { protected OffHeapDiskStore createAndInitStore(final TimeSource timeSource, final Expiry expiry) { try { SerializationProvider serializationProvider = new DefaultSerializationProvider(null); - serializationProvider.start(providerContaining(persistenceService)); + serializationProvider.start(providerContaining(diskResourceService)); ClassLoader classLoader = getClass().getClassLoader(); Serializer keySerializer = serializationProvider.createKeySerializer(String.class, classLoader); Serializer valueSerializer = serializationProvider.createValueSerializer(String.class, classLoader); @@ -238,7 +239,7 @@ protected OffHeapDiskStore createAndInitStore(final TimeSource t protected OffHeapDiskStore createAndInitStore(TimeSource timeSource, Expiry expiry, EvictionAdvisor evictionAdvisor) { try { SerializationProvider serializationProvider = new DefaultSerializationProvider(null); - serializationProvider.start(providerContaining(persistenceService)); + serializationProvider.start(providerContaining(diskResourceService)); ClassLoader classLoader = getClass().getClassLoader(); Serializer keySerializer = serializationProvider.createKeySerializer(String.class, classLoader); Serializer valueSerializer = serializationProvider.createValueSerializer(byte[].class, classLoader); @@ -269,25 +270,17 @@ protected void destroyStore(AbstractOffHeapStore store) { @Test public void testStoreInitFailsWithoutLocalPersistenceService() throws Exception { OffHeapDiskStore.Provider provider = new OffHeapDiskStore.Provider(); - ServiceLocator serviceLocator = new ServiceLocator(); - serviceLocator.addService(provider); - serviceLocator.startAllServices(); - Store.Configuration storeConfig = mock(Store.Configuration.class); - when(storeConfig.getKeyType()).thenReturn(String.class); - when(storeConfig.getValueType()).thenReturn(String.class); - when(storeConfig.getResourcePools()).thenReturn(ResourcePoolsBuilder.newResourcePoolsBuilder() - .disk(10, MB) - .build()); - when(storeConfig.getDispatcherConcurrency()).thenReturn(1); try { - provider.createStore(storeConfig); + ServiceLocator serviceLocator = dependencySet().with(provider).build(); fail("IllegalStateException expected"); } catch (IllegalStateException e) { - assertThat(e.getMessage(), containsString("No LocalPersistenceService could be found - did you configure it at the CacheManager level?")); + assertThat(e.getMessage(), containsString("Failed to find provider with satisfied dependency set for interface" + + " org.ehcache.core.spi.service.DiskResourceService")); } } @Test + @SuppressWarnings("unchecked") public void testAuthoritativeRank() throws Exception { OffHeapDiskStore.Provider provider = new OffHeapDiskStore.Provider(); assertThat(provider.rankAuthority(ResourceType.Core.DISK, EMPTY_LIST), is(1)); @@ -321,8 +314,8 @@ private FileBasedPersistenceContext getPersistenceContext() { try { CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); when(cacheConfiguration.getResourcePools()).thenReturn(newResourcePoolsBuilder().disk(1, MB, false).build()); - PersistenceSpaceIdentifier space = persistenceService.getPersistenceSpaceIdentifier("cache", cacheConfiguration); - return persistenceService.createPersistenceContextWithin(space, "store"); + PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier("cache", cacheConfiguration); + return diskResourceService.createPersistenceContextWithin(space, "store"); } catch (CachePersistenceException e) { throw new AssertionError(e); } @@ -419,10 +412,11 @@ public Void call() { Query invalidateAllQuery = QueryBuilder.queryBuilder().descendants().filter(context(attributes(hasAttribute("tags", new Matcher>() { @Override protected boolean matchesSafely(Set object) { - return object.contains("local-offheap"); + return object.contains("OffHeap"); } })))).filter(context(attributes(hasAttribute("name", "invalidateAll")))).ensureUnique().build(); + @SuppressWarnings("unchecked") OperationStatistic invalidateAll = (OperationStatistic) invalidateAllQuery.execute(singleton(nodeFor(cache))).iterator().next().getContext().attributes().get("this"); assertThat(invalidateAll.sum(), is(0L)); @@ -454,4 +448,4 @@ public boolean equals(Object o) { } } } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentTest.java index d260fefc7e..8b27c44388 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentTest.java @@ -16,7 +16,6 @@ package org.ehcache.impl.internal.store.disk.factories; -import org.ehcache.config.Eviction; import org.ehcache.config.EvictionAdvisor; import org.ehcache.impl.internal.store.disk.factories.EhcachePersistentSegmentFactory.EhcachePersistentSegment; import org.ehcache.impl.internal.store.offheap.SwitchableEvictionAdvisor; @@ -38,6 +37,7 @@ import java.io.IOException; +import static org.ehcache.config.Eviction.noAdvice; import static org.ehcache.impl.internal.store.disk.OffHeapDiskStore.persistent; import static org.ehcache.impl.internal.spi.TestServiceProvider.providerContaining; import static org.hamcrest.CoreMatchers.is; @@ -51,19 +51,21 @@ public class EhcachePersistentSegmentTest { @Rule public final TemporaryFolder folder = new TemporaryFolder(); + @SuppressWarnings("unchecked") private EhcachePersistentSegmentFactory.EhcachePersistentSegment createTestSegment() throws IOException { - return createTestSegment(Eviction.noAdvice(), mock(EvictionListener.class)); + return createTestSegment(noAdvice(), mock(EvictionListener.class)); } + @SuppressWarnings("unchecked") private EhcachePersistentSegmentFactory.EhcachePersistentSegment createTestSegment(EvictionAdvisor evictionPredicate) throws IOException { return createTestSegment(evictionPredicate, mock(EvictionListener.class)); } private EhcachePersistentSegmentFactory.EhcachePersistentSegment createTestSegment(EvictionListener evictionListener) throws IOException { - return createTestSegment(Eviction.noAdvice(), evictionListener); + return createTestSegment(noAdvice(), evictionListener); } - private EhcachePersistentSegmentFactory.EhcachePersistentSegment createTestSegment(final EvictionAdvisor evictionPredicate, EvictionListener evictionListener) throws IOException { + private EhcachePersistentSegmentFactory.EhcachePersistentSegment createTestSegment(final EvictionAdvisor evictionPredicate, EvictionListener evictionListener) throws IOException { try { HeuristicConfiguration configuration = new HeuristicConfiguration(1024 * 1024); SerializationProvider serializationProvider = new DefaultSerializationProvider(null); @@ -144,6 +146,7 @@ public void testAdviceAgainstEvictionPreventsEviction() throws IOException { @Test public void testEvictionFiresEvent() throws IOException { + @SuppressWarnings("unchecked") EvictionListener evictionListener = mock(EvictionListener.class); EhcachePersistentSegment segment = createTestSegment(evictionListener); try { @@ -154,4 +157,4 @@ public void testEvictionFiresEvent() throws IOException { segment.destroy(); } } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/BaseOnHeapStoreTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/BaseOnHeapStoreTest.java index fc7464e22d..1b5afe003e 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/BaseOnHeapStoreTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/BaseOnHeapStoreTest.java @@ -119,6 +119,7 @@ private String buildThreadDump() { }; @Before + @SuppressWarnings("unchecked") public void setUp() { eventDispatcher = mock(StoreEventDispatcher.class); eventSink = mock(StoreEventSink.class); @@ -128,19 +129,21 @@ public void setUp() { @Test public void testEvictEmptyStoreDoesNothing() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); assertThat(store.evict(eventSink), is(false)); - verify(eventSink, never()).evicted(anyString(), any(ValueSupplier.class)); + verify(eventSink, never()).evicted(anyString(), anyValueSupplier()); } @Test public void testEvictWithNoEvictionAdvisorDoesEvict() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); for (int i = 0; i < 100; i++) { store.put(Integer.toString(i), Integer.toString(i)); } assertThat(store.evict(eventSink), is(true)); assertThat(storeSize(store), is(99)); - verify(eventSink, times(1)).evicted(anyString(), any(ValueSupplier.class)); + verify(eventSink, times(1)).evicted(anyString(), anyValueSupplier()); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); } @@ -152,12 +155,13 @@ public boolean adviseAgainstEviction(String key, String value) { return true; } }); + StoreEventSink eventSink = getStoreEventSink(); for (int i = 0; i < 100; i++) { store.put(Integer.toString(i), Integer.toString(i)); } assertThat(store.evict(eventSink), is(true)); assertThat(storeSize(store), is(99)); - verify(eventSink, times(1)).evicted(anyString(), any(ValueSupplier.class)); + verify(eventSink, times(1)).evicted(anyString(), anyValueSupplier()); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); } @@ -169,12 +173,13 @@ public boolean adviseAgainstEviction(String key, String value) { throw new UnsupportedOperationException("Broken advisor!"); } }); + StoreEventSink eventSink = getStoreEventSink(); for (int i = 0; i < 100; i++) { store.put(Integer.toString(i), Integer.toString(i)); } assertThat(store.evict(eventSink), is(true)); assertThat(storeSize(store), is(99)); - verify(eventSink, times(1)).evicted(anyString(), any(ValueSupplier.class)); + verify(eventSink, times(1)).evicted(anyString(), anyValueSupplier()); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); } @@ -196,6 +201,8 @@ public void testGetNoPut() throws Exception { @Test public void testGetExpired() throws Exception { TestTimeSource timeSource = new TestTimeSource(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); OnHeapStore store = newStore(timeSource, Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); store.put("key", "value"); @@ -214,10 +221,11 @@ public void testGetNoExpired() throws Exception { TestTimeSource timeSource = new TestTimeSource(); OnHeapStore store = newStore(timeSource, Expirations.timeToLiveExpiration(new Duration(2, TimeUnit.MILLISECONDS))); + StoreEventSink eventSink = getStoreEventSink(); store.put("key", "value"); timeSource.advanceTime(1); assertThat(store.get("key").value(), equalTo("value")); - verify(eventSink, never()).expired(anyString(), any(ValueSupplier.class)); + verify(eventSink, never()).expired(anyString(), anyValueSupplier()); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.GetOutcome.HIT)); } @@ -251,6 +259,8 @@ public void testNotContainsKey() throws Exception { @Test public void testContainsKeyExpired() throws Exception { TestTimeSource timeSource = new TestTimeSource(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); OnHeapStore store = newStore(timeSource, Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); @@ -266,6 +276,9 @@ public void testContainsKeyExpired() throws Exception { @Test public void testPut() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); + store.put("key", "value"); verify(eventSink).created(eq("key"), eq("value")); verifyListenerReleaseEventsInOrder(eventDispatcher); @@ -276,6 +289,9 @@ public void testPut() throws Exception { @Test public void testPutOverwrite() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); + store.put("key", "value"); store.put("key", "value2"); @@ -306,6 +322,8 @@ public void testInvalidate() throws Exception { @Test public void testPutIfAbsentNoValue() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); ValueHolder prev = store.putIfAbsent("key", "value"); @@ -347,13 +365,15 @@ public void testPutIfAbsentExpired() throws Exception { ValueHolder prev = store.putIfAbsent("key", "value2"); assertThat(prev, nullValue()); assertThat(store.get("key").value(), equalTo("value2")); - checkExpiryEvent(eventSink, "key", "value"); + checkExpiryEvent(getStoreEventSink(), "key", "value"); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.ExpirationOutcome.SUCCESS)); } @Test public void testRemove() throws StoreAccessException { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); store.put("key", "value"); store.remove("key"); @@ -365,6 +385,8 @@ public void testRemove() throws StoreAccessException { @Test public void testRemoveTwoArgMatch() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); store.put("key", "value"); @@ -389,8 +411,10 @@ public void testRemoveTwoArgNoMatch() throws Exception { @Test public void testRemoveTwoArgExpired() throws Exception { TestTimeSource timeSource = new TestTimeSource(); + StoreEventSink eventSink = getStoreEventSink(); OnHeapStore store = newStore(timeSource, Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); + store.put("key", "value"); assertThat(store.get("key").value(), equalTo("value")); timeSource.advanceTime(1); @@ -403,6 +427,8 @@ public void testRemoveTwoArgExpired() throws Exception { @Test public void testReplaceTwoArgPresent() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); store.put("key", "value"); @@ -427,6 +453,7 @@ public void testReplaceTwoArgAbsent() throws Exception { @Test public void testReplaceTwoArgExpired() throws Exception { TestTimeSource timeSource = new TestTimeSource(); + StoreEventSink eventSink = getStoreEventSink(); OnHeapStore store = newStore(timeSource, Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); @@ -442,6 +469,8 @@ public void testReplaceTwoArgExpired() throws Exception { @Test public void testReplaceThreeArgMatch() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); store.put("key", "value"); @@ -471,6 +500,7 @@ public void testReplaceThreeArgNoMatch() throws Exception { @Test public void testReplaceThreeArgExpired() throws Exception { TestTimeSource timeSource = new TestTimeSource(); + StoreEventSink eventSink = getStoreEventSink(); OnHeapStore store = newStore(timeSource, Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); @@ -548,6 +578,8 @@ public void testIteratorDoesNotUpdateAccessTime() throws Exception { public void testComputeReplaceTrue() throws Exception { TestTimeSource timeSource = new TestTimeSource(); OnHeapStore store = newStore(timeSource, Expirations.noExpiration()); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); store.put("key", "value"); ValueHolder installedHolder = store.get("key"); @@ -607,6 +639,8 @@ public Boolean apply() { @Test public void testCompute() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); ValueHolder newValue = store.compute("key", new BiFunction() { @Override @@ -627,6 +661,8 @@ public String apply(String mappedKey, String mappedValue) { @Test public void testComputeNull() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); ValueHolder newValue = store.compute("key", new BiFunction() { @Override @@ -677,6 +713,8 @@ public String apply(String mappedKey, String mappedValue) { @Test public void testComputeExistingValue() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); store.put("key", "value"); @@ -699,6 +737,7 @@ public String apply(String mappedKey, String mappedValue) { @Test public void testComputeExpired() throws Exception { TestTimeSource timeSource = new TestTimeSource(); + StoreEventSink eventSink = getStoreEventSink(); OnHeapStore store = newStore(timeSource, Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); store.put("key", "value"); @@ -828,6 +867,8 @@ public Boolean apply() { @Test public void testComputeIfAbsent() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); ValueHolder newValue = store.computeIfAbsent("key", new Function() { @Override @@ -1012,7 +1053,8 @@ public void testGetOrComputeIfAbsentExpiresOnHit() throws Exception { TestTimeSource timeSource = new TestTimeSource(); OnHeapStore store = newStore(timeSource, Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); - CachingTier.InvalidationListener invalidationListener = mock(CachingTier.InvalidationListener.class); + @SuppressWarnings("unchecked") + CachingTier.InvalidationListener invalidationListener = mock(CachingTier.InvalidationListener.class); store.setInvalidationListener(invalidationListener); store.put("key", "value"); @@ -1185,6 +1227,7 @@ public ValueHolder apply(String key) { } @Test + @SuppressWarnings("unchecked") public void testConcurrentFaultingAndInvalidate() throws Exception { final OnHeapStore store = newStore(); CachingTier.InvalidationListener invalidationListener = mock(CachingTier.InvalidationListener.class); @@ -1382,7 +1425,14 @@ public void onInvalidation(String key, ValueHolder valueHolder) { store.iterator(); } + @SuppressWarnings("unchecked") + private ValueSupplier anyValueSupplier() { + return any(ValueSupplier.class); + } + private void verifyListenerReleaseEventsInOrder(StoreEventDispatcher listener) { + StoreEventSink eventSink = getStoreEventSink(); + InOrder inOrder = inOrder(listener); inOrder.verify(listener).eventSink(); inOrder.verify(listener).releaseEventSink(eventSink); @@ -1403,6 +1453,7 @@ private StoreEventListener addListener(OnHeapStore store) { eventDispatcher = mock(StoreEventDispatcher.class); eventSink = mock(StoreEventSink.class); when(eventDispatcher.eventSink()).thenReturn(eventSink); + @SuppressWarnings("unchecked") StoreEventListener listener = mock(StoreEventListener.class); return listener; } @@ -1452,6 +1503,16 @@ private void advanceTime(long delta) { } } + @SuppressWarnings("unchecked") + protected StoreEventSink getStoreEventSink() { + return eventSink; + } + + @SuppressWarnings("unchecked") + protected StoreEventDispatcher getStoreEventDispatcher() { + return eventDispatcher; + } + protected OnHeapStore newStore() { return newStore(SystemTimeSource.INSTANCE, Expirations.noExpiration(), Eviction.noAdvice()); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByRefSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByRefSPITest.java index 13de6b7c7f..fd5866176c 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByRefSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByRefSPITest.java @@ -37,6 +37,7 @@ import org.junit.Before; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; public class ByteSizedOnHeapStoreByRefSPITest extends StoreSPITest { @@ -74,6 +75,7 @@ public Store newStoreWithEvictionAdvisor(EvictionAdvisor newStore(Long capacity, EvictionAdvisor evictionAdvisor, Expiry expiry, TimeSource timeSource) { ResourcePools resourcePools = buildResourcePools(capacity); Store.Configuration config = new StoreConfigurationImpl(getKeyType(), getValueType(), @@ -83,6 +85,7 @@ private Store newStore(Long capacity, EvictionAdvisor newValueHolder(final String value) { return new CopiedOnHeapValueHolder(value, SystemTimeSource.INSTANCE.getTimeMillis(), false, DEFAULT_COPIER); } @@ -127,7 +130,7 @@ public void close(final Store store) { @Override public ServiceLocator getServiceProvider() { - ServiceLocator locator = new ServiceLocator(); + ServiceLocator locator = dependencySet().build(); try { locator.startAllServices(); } catch (Exception e) { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByValueSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByValueSPITest.java index 73a22f70b8..fd1ae624de 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByValueSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByValueSPITest.java @@ -40,6 +40,7 @@ import static java.lang.ClassLoader.getSystemClassLoader; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; public class ByteSizedOnHeapStoreByValueSPITest extends StoreSPITest { @@ -133,7 +134,7 @@ public void close(final Store store) { @Override public ServiceLocator getServiceProvider() { - ServiceLocator serviceLocator = new ServiceLocator(); + ServiceLocator serviceLocator = dependencySet().build(); try { serviceLocator.startAllServices(); } catch (Exception e) { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByRefTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByRefTest.java index 53423c9cca..d58618eb4e 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByRefTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByRefTest.java @@ -44,6 +44,7 @@ protected void updateStoreCapacity(OnHeapStore store, int newCapacity) { } @Override + @SuppressWarnings("unchecked") protected OnHeapStore newStore(final TimeSource timeSource, final Expiry expiry, final EvictionAdvisor evictionAdvisor, final int capacity) { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByValueTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByValueTest.java index 8452067a5e..ff726e142a 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByValueTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByValueTest.java @@ -21,6 +21,7 @@ import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; import org.ehcache.config.units.EntryUnit; +import org.ehcache.core.events.StoreEventDispatcher; import org.ehcache.expiry.Expiry; import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; import org.ehcache.core.spi.time.TimeSource; @@ -47,6 +48,7 @@ protected void updateStoreCapacity(OnHeapStore store, int newCapacity) { protected OnHeapStore newStore(final TimeSource timeSource, final Expiry expiry, final EvictionAdvisor evictionAdvisor, final Copier keyCopier, final Copier valueCopier, final int capacity) { + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); return new OnHeapStore(new Store.Configuration() { @SuppressWarnings("unchecked") diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreBulkMethodsTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreBulkMethodsTest.java index 1688e58f51..464eb3fb9c 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreBulkMethodsTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreBulkMethodsTest.java @@ -62,14 +62,15 @@ protected Store.Configuration mockStoreConfig() { return config; } + @SuppressWarnings("unchecked") protected OnHeapStore newStore() { Store.Configuration configuration = mockStoreConfig(); return new OnHeapStore(configuration, SystemTimeSource.INSTANCE, DEFAULT_COPIER, DEFAULT_COPIER, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); } - @SuppressWarnings("unchecked") @Test + @SuppressWarnings("unchecked") public void testBulkComputeFunctionGetsValuesOfEntries() throws Exception { @SuppressWarnings("rawtypes") Store.Configuration config = mock(Store.Configuration.class); @@ -164,6 +165,7 @@ public void testBulkComputeHappyPath() throws Exception { public void testBulkComputeStoreRemovesValueWhenFunctionReturnsNullMappings() throws Exception { Store.Configuration configuration = mockStoreConfig(); + @SuppressWarnings("unchecked") OnHeapStore store = new OnHeapStore(configuration, SystemTimeSource.INSTANCE, DEFAULT_COPIER, DEFAULT_COPIER, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); store.put(1, "one"); store.put(2, "two"); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefSPITest.java index 35c1e12790..398d8dd1f8 100755 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefSPITest.java @@ -37,6 +37,7 @@ import org.junit.Before; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; /** * Test the {@link org.ehcache.internal.store.heap.OnHeapStore} compliance to the @@ -80,6 +81,7 @@ public Store newStoreWithEvictionAdvisor(EvictionAdvisor newStore(Long capacity, EvictionAdvisor evictionAdvisor, Expiry expiry, TimeSource timeSource) { ResourcePools resourcePools = buildResourcePools(capacity); Store.Configuration config = new StoreConfigurationImpl(getKeyType(), getValueType(), @@ -88,6 +90,7 @@ private Store newStore(Long capacity, EvictionAdvisor newValueHolder(final String value) { return new CopiedOnHeapValueHolder(value, SystemTimeSource.INSTANCE.getTimeMillis(), false, DEFAULT_COPIER); } @@ -132,7 +135,7 @@ public void close(final Store store) { @Override public ServiceLocator getServiceProvider() { - ServiceLocator locator = new ServiceLocator(); + ServiceLocator locator = dependencySet().build(); try { locator.startAllServices(); } catch (Exception e) { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueSPITest.java index 590f70f55a..53ea429bdb 100755 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueSPITest.java @@ -40,6 +40,7 @@ import static java.lang.ClassLoader.getSystemClassLoader; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; /** * Test the {@link OnHeapStore} compliance to the @@ -138,7 +139,7 @@ public void close(final Store store) { @Override public ServiceLocator getServiceProvider() { - ServiceLocator serviceLocator = new ServiceLocator(); + ServiceLocator serviceLocator = dependencySet().build(); try { serviceLocator.startAllServices(); } catch (Exception e) { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueTest.java index d1b2c2b8ae..d72ecb6723 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueTest.java @@ -28,7 +28,6 @@ import org.ehcache.expiry.Expiry; import org.ehcache.core.spi.function.Function; import org.ehcache.impl.copy.SerializingCopier; -import org.ehcache.impl.serialization.CompactJavaSerializer; import org.ehcache.core.spi.time.SystemTimeSource; import org.ehcache.core.spi.time.TimeSource; import org.ehcache.impl.internal.store.AbstractValueHolder; @@ -73,7 +72,7 @@ public Long copyForWrite(Long obj) { public void testKeyCopierCalledOnGetOrComputeIfAbsent() throws Exception { LongCopier keyCopier = new LongCopier(); OnHeapStore store = newStore(SystemTimeSource.INSTANCE, Expirations.noExpiration(), Eviction.noAdvice(), - keyCopier, new SerializingCopier(new CompactJavaSerializer(ClassLoader.getSystemClassLoader())), 100); + keyCopier, new SerializingCopier(new JavaSerializer(ClassLoader.getSystemClassLoader())), 100); ValueHolder computed = store.getOrComputeIfAbsent(1L, new Function>() { @Override @@ -161,8 +160,8 @@ public void testStoreByValue() { CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder().build(false); cacheManager.init(); - DefaultCopierConfiguration copierConfiguration = new DefaultCopierConfiguration( - SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE); + DefaultCopierConfiguration copierConfiguration = new DefaultCopierConfiguration( + SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE); final Cache cache1 = cacheManager.createCache("cache1", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(1)) .build()); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByRefSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByRefSPITest.java index dc0e2ab845..6e997c2d79 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByRefSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByRefSPITest.java @@ -27,7 +27,6 @@ import org.ehcache.core.spi.time.SystemTimeSource; import org.ehcache.internal.tier.CachingTierFactory; import org.ehcache.internal.tier.CachingTierSPITest; -import org.ehcache.core.internal.service.ServiceLocator; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.tiering.CachingTier; @@ -37,6 +36,7 @@ import org.junit.Before; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; /** * This factory instantiates a CachingTier @@ -53,6 +53,7 @@ protected CachingTierFactory getCachingTierFactory() { } @Before + @SuppressWarnings("unchecked") public void setUp() { cachingTierFactory = new CachingTierFactory() { @@ -124,7 +125,7 @@ public void disposeOf(CachingTier tier) { @Override public ServiceProvider getServiceProvider() { - return new ServiceLocator(); + return dependencySet().build(); } }; diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByValueSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByValueSPITest.java index a4953e076c..b885d7c965 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByValueSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByValueSPITest.java @@ -40,6 +40,7 @@ import static java.lang.ClassLoader.getSystemClassLoader; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; /** * This factory instantiates a CachingTier @@ -88,7 +89,7 @@ public Store.ValueHolder newValueHolder(final String value) { @Override public Store.Provider newProvider() { Store.Provider service = new OnHeapStore.Provider(); - service.start(new ServiceLocator()); + service.start(dependencySet().build()); return service; } @@ -131,7 +132,7 @@ public void disposeOf(CachingTier tier) { @Override public ServiceProvider getServiceProvider() { - return new ServiceLocator(); + return dependencySet().build(); } }; diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreEvictionTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreEvictionTest.java index 89c8f6137e..96f6b6ffbe 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreEvictionTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreEvictionTest.java @@ -49,6 +49,7 @@ import java.util.concurrent.Executors; import java.util.concurrent.Semaphore; +import static org.ehcache.config.Eviction.noAdvice; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.hamcrest.MatcherAssert.assertThat; @@ -95,7 +96,7 @@ public String apply(String mappedKey) { public void testFaultsDoNotGetToEvictionAdvisor() throws StoreAccessException { final Semaphore semaphore = new Semaphore(0); - final OnHeapStoreForTests store = newStore(SystemTimeSource.INSTANCE, Eviction.noAdvice()); + final OnHeapStoreForTests store = newStore(SystemTimeSource.INSTANCE, noAdvice()); ExecutorService executor = Executors.newCachedThreadPool(); try { @@ -129,7 +130,7 @@ public String value() { public void testEvictionCandidateLimits() throws Exception { TestTimeSource timeSource = new TestTimeSource(); StoreConfigurationImpl configuration = new StoreConfigurationImpl( - String.class, String.class, Eviction.noAdvice(), + String.class, String.class, noAdvice(), getClass().getClassLoader(), Expirations.noExpiration(), heap(1).build(), 1, null, null); TestStoreEventDispatcher eventDispatcher = new TestStoreEventDispatcher(); final String firstKey = "daFirst"; @@ -205,10 +206,12 @@ public static class OnHeapStoreForTests extends OnHeapStore { private static final Copier DEFAULT_COPIER = new IdentityCopier(); + @SuppressWarnings("unchecked") public OnHeapStoreForTests(final Configuration config, final TimeSource timeSource) { super(config, timeSource, DEFAULT_COPIER, DEFAULT_COPIER, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); } + @SuppressWarnings("unchecked") public OnHeapStoreForTests(final Configuration config, final TimeSource timeSource, final SizeOfEngine engine) { super(config, timeSource, DEFAULT_COPIER, DEFAULT_COPIER, engine, NullStoreEventDispatcher.nullStoreEventDispatcher()); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreKeyCopierTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreKeyCopierTest.java index bc53dba961..83b0d189c2 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreKeyCopierTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreKeyCopierTest.java @@ -93,6 +93,8 @@ public void setUp() { when(configuration.getKeyType()).thenReturn(Key.class); when(configuration.getValueType()).thenReturn(String.class); when(configuration.getExpiry()).thenReturn(Expirations.noExpiration()); + @SuppressWarnings("unchecked") + Store.Configuration config = configuration; Copier keyCopier = new Copier() { @Override @@ -112,7 +114,7 @@ public Key copyForWrite(Key obj) { } }; - store = new OnHeapStore(configuration, SystemTimeSource.INSTANCE, keyCopier, new IdentityCopier(), new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); + store = new OnHeapStore(config, SystemTimeSource.INSTANCE, keyCopier, new IdentityCopier(), new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); } @Test diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreProviderTest.java index d8bc9be01c..33274f4598 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreProviderTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreProviderTest.java @@ -30,7 +30,6 @@ import static java.util.Collections.EMPTY_LIST; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.*; /** * Basic tests for {@link org.ehcache.impl.internal.store.heap.OnHeapStore.Provider}. @@ -73,6 +72,7 @@ public int getTierHeight() { } @Test + @SuppressWarnings("unchecked") public void testRankCachingTier() throws Exception { OnHeapStore.Provider provider = new OnHeapStore.Provider(); @@ -86,4 +86,4 @@ private void assertRank(final Store.Provider provider, final int expectedRank, f Collections.>emptyList()), is(expectedRank)); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreValueCopierTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreValueCopierTest.java index 24579869bc..b236d40ce1 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreValueCopierTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreValueCopierTest.java @@ -92,6 +92,8 @@ public void setUp() { when(configuration.getKeyType()).thenReturn(Long.class); when(configuration.getValueType()).thenReturn(Value.class); when(configuration.getExpiry()).thenReturn(Expirations.noExpiration()); + @SuppressWarnings("unchecked") + Store.Configuration config = configuration; Copier valueCopier = new Copier() { @Override @@ -111,7 +113,7 @@ public Value copyForWrite(Value obj) { } }; - store = new OnHeapStore(configuration, SystemTimeSource.INSTANCE, new IdentityCopier(), valueCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); + store = new OnHeapStore(config, SystemTimeSource.INSTANCE, new IdentityCopier(), valueCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); } @Test diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteAccountingTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteAccountingTest.java index 89c511c1f6..cd86261d1f 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteAccountingTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteAccountingTest.java @@ -579,7 +579,7 @@ public String apply(String a, String b) { } }); - assertThat(store.getCurrentUsageInBytes(), is(0l)); + assertThat(store.getCurrentUsageInBytes(), is(0L)); } @Test @@ -780,7 +780,8 @@ public void testExpiry() throws StoreAccessException { @Test public void testEviction() throws StoreAccessException { OnHeapStoreForTests store = newStore(1); - StoreEventListener listener = mock(StoreEventListener.class); + @SuppressWarnings("unchecked") + StoreEventListener listener = mock(StoreEventListener.class); store.getStoreEventSource().addEventListener(listener); store.put(KEY, VALUE); @@ -794,7 +795,7 @@ public void testEviction() throws StoreAccessException { long requiredSize = getSize(key1, value1); store.put(key1, value1); - Matcher> matcher = eventType(EventType.EVICTED); + Matcher> matcher = eventType(EventType.EVICTED); verify(listener, times(1)).onEvent(argThat(matcher)); if (store.get(key1) != null) { assertThat(store.getCurrentUsageInBytes(), is(requiredSize)); @@ -805,7 +806,8 @@ public void testEviction() throws StoreAccessException { } static long getSize(String key, String value) { - CopiedOnHeapValueHolder valueHolder = new CopiedOnHeapValueHolder(value, 0l, 0l, true, DEFAULT_COPIER); + @SuppressWarnings("unchecked") + CopiedOnHeapValueHolder valueHolder = new CopiedOnHeapValueHolder(value, 0L, 0L, true, DEFAULT_COPIER); long size = 0L; try { size = SIZE_OF_ENGINE.sizeof(key, valueHolder); @@ -819,6 +821,7 @@ static class OnHeapStoreForTests extends OnHeapStore { private static final Copier DEFAULT_COPIER = new IdentityCopier(); + @SuppressWarnings("unchecked") OnHeapStoreForTests(final Configuration config, final TimeSource timeSource, final SizeOfEngine engine, StoreEventDispatcher eventDispatcher) { super(config, timeSource, DEFAULT_COPIER, DEFAULT_COPIER, engine, eventDispatcher); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByRefTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByRefTest.java index 9bc20543aa..1c102303ad 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByRefTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByRefTest.java @@ -48,6 +48,7 @@ protected void updateStoreCapacity(OnHeapStore store, int newCapacity) { } @Override + @SuppressWarnings("unchecked") protected OnHeapStore newStore(final TimeSource timeSource, final Expiry expiry, final EvictionAdvisor evictionAdvisor, final int capacity) { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByValueTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByValueTest.java index 6e24c971c2..0605201b3f 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByValueTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByValueTest.java @@ -21,6 +21,7 @@ import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.events.StoreEventDispatcher; import org.ehcache.expiry.Expiry; import org.ehcache.impl.internal.sizeof.DefaultSizeOfEngine; import org.ehcache.impl.internal.store.heap.OnHeapStore; @@ -52,6 +53,7 @@ protected OnHeapStore newStore(final TimeSource timeSource, final Expiry expiry, final EvictionAdvisor evictionAdvisor, final Copier keyCopier, final Copier valueCopier, final int capacity) { + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); return new OnHeapStore(new Store.Configuration() { @SuppressWarnings("unchecked") diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreBulkMethodsTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreBulkMethodsTest.java index 8b6e9589f3..a72213bb1d 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreBulkMethodsTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreBulkMethodsTest.java @@ -54,6 +54,7 @@ protected Store.Configuration mockStoreConfig() { return config; } + @SuppressWarnings("unchecked") protected OnHeapStore newStore() { Store.Configuration configuration = mockStoreConfig(); return new OnHeapStore(configuration, SystemTimeSource.INSTANCE, DEFAULT_COPIER, DEFAULT_COPIER, diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByRefSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByRefSPITest.java index 4010fa228d..425b4b4f85 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByRefSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByRefSPITest.java @@ -40,6 +40,7 @@ import java.util.Arrays; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; public class OnHeapStoreCachingTierByRefSPITest extends CachingTierSPITest { @@ -51,6 +52,7 @@ protected CachingTierFactory getCachingTierFactory() { } @Before + @SuppressWarnings("unchecked") public void setUp() { cachingTierFactory = new CachingTierFactory() { @@ -124,7 +126,7 @@ public void disposeOf(CachingTier tier) { @Override public ServiceProvider getServiceProvider() { - return new ServiceLocator(); + return dependencySet().build(); } }; diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByValueSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByValueSPITest.java index b2567b3b1c..6801d09857 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByValueSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByValueSPITest.java @@ -43,6 +43,7 @@ import static java.lang.ClassLoader.getSystemClassLoader; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; public class OnHeapStoreCachingTierByValueSPITest extends CachingTierSPITest { @@ -86,7 +87,7 @@ public Store.ValueHolder newValueHolder(final String value) { @Override public Store.Provider newProvider() { Store.Provider service = new OnHeapStore.Provider(); - service.start(new ServiceLocator()); + service.start(dependencySet().build()); return service; } @@ -130,7 +131,7 @@ public void disposeOf(CachingTier tier) { @Override public ServiceProvider getServiceProvider() { - return new ServiceLocator(); + return dependencySet().build(); } }; diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStoreTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStoreTest.java index b8abc3b4f4..141c930c1c 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStoreTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStoreTest.java @@ -52,7 +52,6 @@ import java.util.List; import java.util.Random; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import static org.ehcache.core.internal.util.ValueSuppliers.supplierOf; @@ -322,7 +321,7 @@ public void testFlushUpdatesHits() throws StoreAccessException { ((AbstractValueHolder)valueHolder).accessed(timeSource.getTimeMillis(), new Duration(1L, TimeUnit.MILLISECONDS)); assertThat(store.flush(key, new DelegatingValueHolder(valueHolder)), is(true)); } - assertThat(store.getAndFault(key).hits(), is(5l)); + assertThat(store.getAndFault(key).hits(), is(5L)); } @Test @@ -702,6 +701,7 @@ public void testIteratorOnEmptyStore() throws Exception { private void performEvictionTest(TestTimeSource timeSource, Expiry expiry, EvictionAdvisor evictionAdvisor) throws StoreAccessException {AbstractOffHeapStore offHeapStore = createAndInitStore(timeSource, expiry, evictionAdvisor); try { + @SuppressWarnings("unchecked") StoreEventListener listener = mock(StoreEventListener.class); offHeapStore.getStoreEventSource().addEventListener(listener); @@ -734,6 +734,7 @@ public void describeTo(Description description) { }; } + @SuppressWarnings("unchecked") private OperationStatistic getExpirationStatistic(Store store) { StatisticsManager statisticsManager = new StatisticsManager(); statisticsManager.root(store); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCacheTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCacheTest.java index 695142afb3..bce531f1ba 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCacheTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCacheTest.java @@ -42,11 +42,13 @@ public class EhcacheConcurrentOffHeapClockCacheTest extends AbstractEhcacheOffHeapBackingMapTest { @Override + @SuppressWarnings("unchecked") protected EhcacheConcurrentOffHeapClockCache createTestSegment() { return createTestSegment(Eviction.noAdvice(), mock(EhcacheSegmentFactory.EhcacheSegment.EvictionListener.class)); } @Override + @SuppressWarnings("unchecked") protected EhcacheConcurrentOffHeapClockCache createTestSegment(EvictionAdvisor evictionPredicate) { return createTestSegment(evictionPredicate, mock(EhcacheSegmentFactory.EhcacheSegment.EvictionListener.class)); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreProviderTest.java deleted file mode 100644 index 3e305e3841..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreProviderTest.java +++ /dev/null @@ -1,179 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.store.offheap; - -import org.ehcache.config.Eviction; -import org.ehcache.config.EvictionAdvisor; -import org.ehcache.config.ResourcePool; -import org.ehcache.config.ResourcePools; -import org.ehcache.config.ResourceType; -import org.ehcache.config.ResourceUnit; -import org.ehcache.config.SizedResourcePool; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.internal.service.ServiceLocator; -import org.ehcache.core.spi.store.Store; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; -import org.ehcache.impl.internal.DefaultTimeSourceService; -import org.ehcache.impl.serialization.LongSerializer; -import org.ehcache.impl.serialization.StringSerializer; -import org.ehcache.spi.serialization.SerializationProvider; -import org.ehcache.spi.serialization.Serializer; -import org.junit.Test; -import org.terracotta.context.ContextManager; -import org.terracotta.context.TreeNode; -import org.terracotta.context.query.Matcher; -import org.terracotta.context.query.Matchers; -import org.terracotta.context.query.Query; - -import java.util.Map; -import java.util.Set; - -import static java.util.Collections.singleton; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; -import static org.mockito.Mockito.mock; -import static org.terracotta.context.query.Matchers.attributes; -import static org.terracotta.context.query.Matchers.context; -import static org.terracotta.context.query.Matchers.hasAttribute; -import static org.terracotta.context.query.QueryBuilder.queryBuilder; - -/** - * OffHeapStoreProviderTest - */ -public class OffHeapStoreProviderTest { - @Test - public void testStatisticsAssociations() throws Exception { - OffHeapStore.Provider provider = new OffHeapStore.Provider(); - - ServiceLocator serviceLocator = new ServiceLocator(mock(SerializationProvider.class), new DefaultTimeSourceService(null)); - - provider.start(serviceLocator); - - OffHeapStore store = provider.createStore(getStoreConfig()); - - Query storeQuery = queryBuilder() - .children() - .filter(context(attributes(Matchers.>allOf( - hasAttribute("tags", new Matcher>() { - @Override - protected boolean matchesSafely(Set object) { - return object.containsAll(singleton("store")); - } - }))))) - .build(); - - Set nodes = singleton(ContextManager.nodeFor(store)); - - Set storeResult = storeQuery.execute(nodes); - assertThat(storeResult.isEmpty(), is(false)); - - provider.releaseStore(store); - - storeResult = storeQuery.execute(nodes); - assertThat(storeResult.isEmpty(), is(true)); - } - - private Store.Configuration getStoreConfig() { - return new Store.Configuration() { - @Override - public Class getKeyType() { - return Long.class; - } - - @Override - public Class getValueType() { - return String.class; - } - - @Override - public EvictionAdvisor getEvictionAdvisor() { - return Eviction.noAdvice(); - } - - @Override - public ClassLoader getClassLoader() { - return getClass().getClassLoader(); - } - - @Override - public Expiry getExpiry() { - return Expirations.noExpiration(); - } - - @Override - public ResourcePools getResourcePools() { - return new ResourcePools() { - @Override - public ResourcePool getPoolForResource(ResourceType resourceType) { - return new SizedResourcePool() { - @Override - public ResourceType getType() { - return ResourceType.Core.OFFHEAP; - } - - @Override - public long getSize() { - return 1; - } - - @Override - public ResourceUnit getUnit() { - return MemoryUnit.MB; - } - - @Override - public boolean isPersistent() { - return false; - } - - @Override - public void validateUpdate(ResourcePool newPool) { - throw new UnsupportedOperationException("TODO Implement me!"); - } - }; - } - - @Override - public Set> getResourceTypeSet() { - return (Set) singleton(ResourceType.Core.OFFHEAP); - } - - @Override - public ResourcePools validateAndMerge(ResourcePools toBeUpdated) throws IllegalArgumentException, UnsupportedOperationException { - throw new UnsupportedOperationException("TODO Implement me!"); - } - }; - } - - @Override - public Serializer getKeySerializer() { - return new LongSerializer(); - } - - @Override - public Serializer getValueSerializer() { - return new StringSerializer(); - } - - @Override - public int getDispatcherConcurrency() { - return 1; - } - - }; - }} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreSPITest.java index 87f2c466b9..f4f6b5f0c9 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreSPITest.java @@ -41,6 +41,7 @@ import java.util.Arrays; import static org.ehcache.config.ResourceType.Core.OFFHEAP; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; /** * OffHeapStoreSPITest @@ -117,7 +118,7 @@ public ServiceConfiguration[] getServiceConfigurations() { @Override public ServiceLocator getServiceProvider() { - ServiceLocator serviceLocator = new ServiceLocator(); + ServiceLocator serviceLocator = dependencySet().build(); try { serviceLocator.startAllServices(); } catch (Exception e) { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreTest.java index e16a7ee2f6..7dad593401 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreTest.java @@ -17,7 +17,6 @@ package org.ehcache.impl.internal.store.offheap; import org.ehcache.config.EvictionAdvisor; -import org.ehcache.config.ResourcePool; import org.ehcache.config.ResourceType; import org.ehcache.core.internal.store.StoreConfigurationImpl; import org.ehcache.config.units.MemoryUnit; @@ -81,6 +80,7 @@ protected OffHeapStore createAndInitStore(TimeSource timeSource, } @Test + @SuppressWarnings("unchecked") public void testRankAuthority() throws Exception { OffHeapStore.Provider provider = new OffHeapStore.Provider(); @@ -115,4 +115,4 @@ private void assertRank(final Store.Provider provider, final int expectedRank, f protected void destroyStore(AbstractOffHeapStore store) { OffHeapStore.Provider.close((OffHeapStore) store); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/factories/EhcacheSegmentTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/factories/EhcacheSegmentTest.java index fee5301370..7e922b169f 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/factories/EhcacheSegmentTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/factories/EhcacheSegmentTest.java @@ -42,10 +42,12 @@ public class EhcacheSegmentTest { + @SuppressWarnings("unchecked") private EhcacheSegmentFactory.EhcacheSegment createTestSegment() { return createTestSegment(Eviction.noAdvice(), mock(EhcacheSegmentFactory.EhcacheSegment.EvictionListener.class)); } + @SuppressWarnings("unchecked") private EhcacheSegmentFactory.EhcacheSegment createTestSegment(EvictionAdvisor evictionPredicate) { return createTestSegment(evictionPredicate, mock(EhcacheSegmentFactory.EhcacheSegment.EvictionListener.class)); } @@ -135,6 +137,7 @@ public void testAdviceAgainstEvictionPreventsEviction() { @Test public void testEvictionFiresEvent() { + @SuppressWarnings("unchecked") EhcacheSegmentFactory.EhcacheSegment.EvictionListener evictionListener = mock(EhcacheSegmentFactory.EhcacheSegment.EvictionListener.class); EhcacheSegmentFactory.EhcacheSegment segment = createTestSegment(evictionListener); try { @@ -145,4 +148,4 @@ public void testEvictionFiresEvent() { segment.destroy(); } } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierSPITest.java index 4ebb16df63..8775583452 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierSPITest.java @@ -45,6 +45,7 @@ import static java.lang.ClassLoader.getSystemClassLoader; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; /** * This factory instantiates a CachingTier @@ -140,7 +141,7 @@ public void disposeOf(CachingTier tier) { @Override public ServiceProvider getServiceProvider() { - return new ServiceLocator(); + return dependencySet().build(); } }; diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierTest.java index df5d547323..f57bda05e4 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierTest.java @@ -52,6 +52,7 @@ public class CompoundCachingTierTest { @Test + @SuppressWarnings("unchecked") public void testGetOrComputeIfAbsentComputesWhenBothTiersEmpty() throws Exception { HigherCachingTier higherTier = mock(HigherCachingTier.class); LowerCachingTier lowerTier = mock(LowerCachingTier.class); @@ -83,6 +84,7 @@ public Store.ValueHolder apply(String s) { } @Test + @SuppressWarnings("unchecked") public void testGetOrComputeIfAbsentDoesNotComputesWhenHigherTierContainsValue() throws Exception { HigherCachingTier higherTier = mock(HigherCachingTier.class); LowerCachingTier lowerTier = mock(LowerCachingTier.class); @@ -107,6 +109,7 @@ public Store.ValueHolder apply(String s) { } @Test + @SuppressWarnings("unchecked") public void testGetOrComputeIfAbsentDoesNotComputesWhenLowerTierContainsValue() throws Exception { HigherCachingTier higherTier = mock(HigherCachingTier.class); LowerCachingTier lowerTier = mock(LowerCachingTier.class); @@ -138,6 +141,7 @@ public Store.ValueHolder apply(String s) { } @Test + @SuppressWarnings("unchecked") public void testGetOrComputeIfAbsentComputesWhenLowerTierExpires() throws Exception { HigherCachingTier higherTier = mock(HigherCachingTier.class); final LowerCachingTier lowerTier = mock(LowerCachingTier.class); @@ -186,6 +190,7 @@ public Store.ValueHolder apply(String s) { } @Test + @SuppressWarnings("unchecked") public void testInvalidateNoArg() throws Exception { HigherCachingTier higherTier = mock(HigherCachingTier.class); LowerCachingTier lowerTier = mock(LowerCachingTier.class); @@ -198,6 +203,7 @@ public void testInvalidateNoArg() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testInvalidateWhenNoValueDoesNotFireListener() throws Exception { HigherCachingTier higherTier = mock(HigherCachingTier.class); LowerCachingTier lowerTier = mock(LowerCachingTier.class); @@ -218,6 +224,7 @@ public void onInvalidation(String key, Store.ValueHolder valueHolder) { } @Test + @SuppressWarnings("unchecked") public void testInvalidateWhenValueInLowerTierFiresListener() throws Exception { HigherCachingTier higherTier = mock(HigherCachingTier.class); LowerCachingTier lowerTier = mock(LowerCachingTier.class); @@ -275,6 +282,7 @@ public void onInvalidation(String key, Store.ValueHolder valueHolder) { } @Test + @SuppressWarnings("unchecked") public void testInvalidateWhenValueInHigherTierFiresListener() throws Exception { HigherCachingTier higherTier = mock(HigherCachingTier.class); LowerCachingTier lowerTier = mock(LowerCachingTier.class); @@ -333,6 +341,7 @@ public void onInvalidation(String key, Store.ValueHolder valueHolder) { } @Test + @SuppressWarnings("unchecked") public void testInvalidateAllCoversBothTiers() throws Exception { HigherCachingTier higherTier = mock(HigherCachingTier.class); LowerCachingTier lowerTier = mock(LowerCachingTier.class); @@ -346,6 +355,7 @@ public void testInvalidateAllCoversBothTiers() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testRankCachingTier() throws Exception { CompoundCachingTier.Provider provider = new CompoundCachingTier.Provider(); HashSet> resourceTypes = new HashSet>(); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreFlushWhileShutdownTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreFlushWhileShutdownTest.java index ad1f2dafcc..576cc4d290 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreFlushWhileShutdownTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreFlushWhileShutdownTest.java @@ -19,17 +19,18 @@ import org.ehcache.config.CacheConfiguration; import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; -import org.ehcache.core.spi.service.LocalPersistenceService; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; import org.ehcache.expiry.Expirations; import org.ehcache.expiry.Expiry; -import org.ehcache.impl.persistence.DefaultLocalPersistenceService; +import org.ehcache.impl.persistence.DefaultDiskResourceService; import org.ehcache.impl.internal.store.disk.OffHeapDiskStore; import org.ehcache.impl.internal.store.heap.OnHeapStore; import org.ehcache.core.internal.service.ServiceLocator; import org.ehcache.core.spi.store.Store; +import org.ehcache.impl.persistence.DefaultLocalPersistenceService; import org.ehcache.impl.serialization.JavaSerializer; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.persistence.PersistableResourceService.PersistenceSpaceIdentifier; @@ -42,6 +43,7 @@ import java.io.Serializable; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; @@ -59,17 +61,17 @@ public void testTieredStoreReleaseFlushesEntries() throws Exception { Store.Configuration configuration = new Store.Configuration() { @Override - public Class getKeyType() { + public Class getKeyType() { return Number.class; } @Override - public Class getValueType() { - return Serializable.class; + public Class getValueType() { + return String.class; } @Override - public EvictionAdvisor getEvictionAdvisor() { + public EvictionAdvisor getEvictionAdvisor() { return null; } @@ -79,7 +81,7 @@ public ClassLoader getClassLoader() { } @Override - public Expiry getExpiry() { + public Expiry getExpiry() { return Expirations.noExpiration(); } @@ -113,9 +115,9 @@ public int getDispatcherConcurrency() { CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); when(cacheConfiguration.getResourcePools()).thenReturn(newResourcePoolsBuilder().disk(1, MemoryUnit.MB, true).build()); - LocalPersistenceService persistenceService = serviceLocator.getService(LocalPersistenceService.class); - PersistenceSpaceIdentifier persistenceSpace = persistenceService.getPersistenceSpaceIdentifier("testTieredStoreReleaseFlushesEntries", cacheConfiguration); - Store tieredStore = tieredStoreProvider.createStore(configuration, new ServiceConfiguration[] {persistenceSpace}); + DiskResourceService diskResourceService = serviceLocator.getService(DiskResourceService.class); + PersistenceSpaceIdentifier persistenceSpace = diskResourceService.getPersistenceSpaceIdentifier("testTieredStoreReleaseFlushesEntries", cacheConfiguration); + Store tieredStore = tieredStoreProvider.createStore(configuration, persistenceSpace); tieredStoreProvider.initStore(tieredStore); for (int i = 0; i < 100; i++) { tieredStore.put(i, "hello"); @@ -136,23 +138,25 @@ public int getDispatcherConcurrency() { serviceLocator1.startAllServices(); tieredStoreProvider.start(serviceLocator1); - LocalPersistenceService persistenceService1 = serviceLocator1.getService(LocalPersistenceService.class); - PersistenceSpaceIdentifier persistenceSpace1 = persistenceService1.getPersistenceSpaceIdentifier("testTieredStoreReleaseFlushesEntries", cacheConfiguration); - tieredStore = tieredStoreProvider.createStore(configuration, new ServiceConfiguration[] {persistenceSpace1}); + DiskResourceService diskResourceService1 = serviceLocator1.getService(DiskResourceService.class); + PersistenceSpaceIdentifier persistenceSpace1 = diskResourceService1.getPersistenceSpaceIdentifier("testTieredStoreReleaseFlushesEntries", cacheConfiguration); + tieredStore = tieredStoreProvider.createStore(configuration, persistenceSpace1); tieredStoreProvider.initStore(tieredStore); for(int i = 0; i < 20; i++) { - assertThat(tieredStore.get(i).hits(), is(21l)); + assertThat(tieredStore.get(i).hits(), is(21L)); } } private ServiceLocator getServiceLocator(File location) throws Exception { DefaultPersistenceConfiguration persistenceConfiguration = new DefaultPersistenceConfiguration(location); - DefaultLocalPersistenceService persistenceService = new DefaultLocalPersistenceService(persistenceConfiguration); - ServiceLocator serviceLocator = new ServiceLocator(); - serviceLocator.addService(persistenceService); - serviceLocator.addService(new OnHeapStore.Provider()); - serviceLocator.addService(new OffHeapDiskStore.Provider()); - return serviceLocator; + DefaultLocalPersistenceService fileService = new DefaultLocalPersistenceService(persistenceConfiguration); + DefaultDiskResourceService diskResourceService = new DefaultDiskResourceService(); + ServiceLocator.DependencySet dependencySet = dependencySet(); + dependencySet.with(fileService); + dependencySet.with(diskResourceService); + dependencySet.with(new OnHeapStore.Provider()); + dependencySet.with(new OffHeapDiskStore.Provider()); + return dependencySet.build(); } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreMutatorTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreMutatorTest.java new file mode 100644 index 0000000000..b7baf15198 --- /dev/null +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreMutatorTest.java @@ -0,0 +1,459 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.store.tiering; + +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.internal.store.StoreConfigurationImpl; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.core.spi.store.tiering.AuthoritativeTier; +import org.ehcache.core.spi.store.tiering.CachingTier; +import org.ehcache.core.spi.time.SystemTimeSource; +import org.ehcache.docs.plugs.StringCopier; +import org.ehcache.expiry.Expirations; +import org.ehcache.impl.internal.events.NullStoreEventDispatcher; +import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; +import org.ehcache.impl.internal.store.basic.NopStore; +import org.ehcache.impl.internal.store.heap.OnHeapStore; +import org.ehcache.impl.internal.store.offheap.BasicOffHeapValueHolder; +import org.ehcache.spi.test.After; +import org.junit.Before; +import org.junit.Test; + +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.CoreMatchers.nullValue; +import static org.junit.Assert.assertThat; + +/** + * Tests for {@link TieredStore}. These tests are mainly to validate that + * ehcache3#1522 is correctly fixed. + *

+ * Only putIfAbsent is tested due the the time is takes to create each test. All methods that conditionally + * modify the authoritative tier and then invalidate the caching tier are impacted. + *

    + *
  • putIfAbsent
  • + *
  • remove(key, value): If the remove does nothing because the value is different, it will return KEY_PRESENT but the get will return null
  • + *
  • replace(key, value): Il faut avoir une valeur. Cette valeur removée mais pas encore invalidé. Ensuite un autre thread tente un replace, échoue et fait un get. Il aura l’ancienne valeur au lieu de null
  • + *
  • replace(key,old,new): If the replace does nothing
  • + *
+ * They should invalidate even if hey have not modified the authoritative tier to prevent inconsistencies. + *

+ * Note: In the tests below, it fails by a deadlock we are creating on purpose. In real life, we would get() + * inconsistent values instead + */ +public class TieredStoreMutatorTest { + + private static final String KEY = "KEY"; + private static final String VALUE = "VALUE"; + private static final String OTHER_VALUE = "OTHER_VALUE"; + + private class AuthoritativeTierMock extends NopStore { + + private final AtomicBoolean get = new AtomicBoolean(false); + + private final ConcurrentMap map = new ConcurrentHashMap(); + + @Override + public PutStatus put(String key, String value) throws StoreAccessException { + String oldValue = map.put(key, value); + + try { + progressLatch.countDown(); + thread3Latch.await(); + } catch (InterruptedException e) { + // ignore + } + + if(oldValue == null) { + return PutStatus.PUT; + } + if(oldValue.equals(value)) { + return PutStatus.NOOP; + } + return PutStatus.UPDATE; + } + + @Override + public boolean remove(String key) throws StoreAccessException { + boolean result = map.remove(key) != null; + try { + progressLatch.countDown(); + thread3Latch.await(); + } catch (InterruptedException e) { + // ignore + } + return result; + } + + @Override + public ValueHolder getAndFault(String key) throws StoreAccessException { + // First, called by Thread 1, blocks + // Then, called by test thread, returns a value holder of null + if (get.compareAndSet(false, true)) { + try { + progressLatch.countDown(); + thread1Latch.await(); + } catch (InterruptedException e) { + // ignore + } + } + return createValueHolder(map.get(key)); + } + + @Override + public ValueHolder putIfAbsent(String key, String value) throws StoreAccessException { + return createValueHolder(map.putIfAbsent(key, value)); + } + + @Override + public RemoveStatus remove(String key, String value) throws StoreAccessException { + String oldValue = map.get(key); + if(oldValue == null) { + return RemoveStatus.KEY_MISSING; + } + if(value.equals(oldValue)) { + map.remove(key); + return RemoveStatus.REMOVED; + } + return RemoveStatus.KEY_PRESENT; + } + + @Override + public ValueHolder replace(String key, String value) throws StoreAccessException { + return createValueHolder(map.replace(key, value)); + } + + @Override + public ReplaceStatus replace(String key, String oldValue, String newValue) throws StoreAccessException { + String currentValue = map.get(key); + if(currentValue == null) { + return ReplaceStatus.MISS_NOT_PRESENT; + } + if(currentValue.equals(oldValue)) { + map.replace(key, newValue); + return ReplaceStatus.HIT; + } + return ReplaceStatus.MISS_PRESENT; + } + } + + private final AuthoritativeTier authoritativeTier = new AuthoritativeTierMock(); + + private TieredStore tieredStore; + + private Thread thread3 = null; + private volatile boolean failed = false; + + private final CountDownLatch progressLatch = new CountDownLatch(2); + private final CountDownLatch thread1Latch = new CountDownLatch(1); + private final CountDownLatch thread3Latch = new CountDownLatch(1); + + @Before + public void setUp() throws Exception { + // Not relevant to the test, just used to instantiate the OnHeapStore + ResourcePools resourcePools = ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(1, MemoryUnit.MB) + .disk(1, MemoryUnit.GB, false) + .build(); + + // Not relevant to the test, just used to instantiate the OnHeapStore + Store.Configuration config = new StoreConfigurationImpl(String.class, String.class, + null, getClass().getClassLoader(), Expirations.noExpiration(), resourcePools, 0, null, null); + + // Here again, all parameters are useless, we only care about the beforeCompletingTheFault implementation + CachingTier cachingTier = new OnHeapStore(config, SystemTimeSource.INSTANCE, + StringCopier.copier(), StringCopier.copier(), new NoopSizeOfEngine(), NullStoreEventDispatcher. + nullStoreEventDispatcher()); + + tieredStore = new TieredStore(cachingTier, authoritativeTier); + } + + @After + public void after() { + releaseThreads(); + } + + @Test + public void testPutIfAbsent() throws Exception { + + // 1. Thread 1 gets the key but found null in the on-heap backend + // 2. Thread 1 creates a Fault and then block + // a. Thread 1 -> Fault.get() + // b. Thread 1 -> AuthoritativeTierMock.getAndFault - BLOCK + launchThread(new Runnable() { + @Override + public void run() { + getFromTieredStore(); + } + }); + + // 3. Thread 2 does a put. But it hasn't invalided the on-heap yet (it blocks instead) + // a. Thread 2 -> TieredStore.put + // b. Thread 2 -> AuthoritativeTierMock.put - BLOCK + launchThread(new Runnable() { + @Override + public void run() { + putToTieredStore(); + } + }); + + // At this point we have a fault with null in the caching tier and a value in the authority + // However the fault has not yet been invalidated following the authority update + progressLatch.await(); + + // 6. Thread 3 - unblock Faults after X ms to make sure it happens after the test thread gets the fault + launchThread3(); + + // 4. Test Thread receives a value from putIfAbsent. We would expect the get to receive the same value right after + // a. Test Thread -> TieredStore.putIfAbsent + // b. Test Thread -> AuthoritativeTierMock.putIfAbsent - returns VALUE + assertThat(putIfAbsentToTieredStore().value(), is(VALUE)); + + // 5. Test Thread -> TieredStore.get() + // If Test Thread bugged -> Fault.get() - synchronized - blocked on the fault because thread 2 already locks the fault + // Else Test Thread fixed -> new Fault ... correct value + Store.ValueHolder value = getFromTieredStore(); + + // These assertions will in fact work most of the time even if a failure occurred. Because as soon as the latches are + // released by thread 3, the thread 2 will invalidate the fault + assertThat(value, notNullValue()); + assertThat(value.value(), is(VALUE)); + + // If the Test thread was blocked, Thread 3 will eventually flag the failure + assertThat(failed, is(false)); + } + + @Test + public void testRemoveKeyValue() throws Exception { + // Follows the same pattern as testPutIfAbsent except that at the end, if remove returns KEY_PRESENT, we expect + // the get to return VALUE afterwards + + launchThread(new Runnable() { + @Override + public void run() { + getFromTieredStore(); + } + }); + + launchThread(new Runnable() { + @Override + public void run() { + putToTieredStore(); + } + }); + + progressLatch.await(); + + launchThread3(); + + // 4. Test Thread receives KEY_PRESENT from remove. We would expect the get to receive a value right afterwards + // a. Test Thread -> TieredStore.remove + // b. Test Thread -> AuthoritativeTierMock.remove - returns KEY_PRESENT + assertThat(removeKeyValueFromTieredStore(OTHER_VALUE), is(Store.RemoveStatus.KEY_PRESENT)); + + // 5. Test Thread -> TieredStore.get() + // If Test Thread bugged -> Fault.get() - synchronized - blocked + // Else Test Thread fixed -> new Fault ... correct value + Store.ValueHolder value = getFromTieredStore(); + assertThat(value, notNullValue()); + assertThat(value.value(), is(VALUE)); + + assertThat(failed, is(false)); + } + + @Test + public void testReplaceKeyValue() throws Exception { + // Follows the same pattern as testPutIfAbsent except that at the end, if remove returns null, we expect + // the get to return null afterwards + + // 1. Put a value. The value is now in the authoritative tier + putIfAbsentToTieredStore(); // using putIfAbsent instead of put here because our mock won't block on a putIfAbsent + + // 2. Thread 1 gets the key but found null in the on-heap backend + // 3. Thread 1 creates a Fault and then block + // a. Thread 1 -> Fault.get() + // b. Thread 1 -> AuthoritativeTierMock.getAndFault - BLOCK + launchThread(new Runnable() { + @Override + public void run() { + getFromTieredStore(); + } + }); + + // 3. Thread 3 does a remove. But it hasn't invalided the on-heap yet (it blocks instead) + // a. Thread 2 -> TieredStore.remove + // b. Thread 2 -> AuthoritativeTierMock.remove - BLOCK + launchThread(new Runnable() { + @Override + public void run() { + removeKeyFromTieredStore(); + } + }); + + progressLatch.await(); + + launchThread3(); + + // 4. Test Thread receives null from replace. We would expect the get to receive the same null afterwards + // a. Test Thread -> TieredStore.replace + // b. Test Thread -> AuthoritativeTierMock.replace - returns null + assertThat(replaceFromTieredStore(VALUE), nullValue()); + + // 5. Test Thread -> TieredStore.get() + // If Test Thread bugged -> Fault.get() - synchronized - blocked + // Else Test Thread fixed -> new Fault ... correct value + Store.ValueHolder value = getFromTieredStore(); + assertThat(value, nullValue()); + + assertThat(failed, is(false)); + } + + @Test + public void testReplaceKeyOldNewValue() throws Exception { + // Follows the same pattern as testReplaceKey + + putIfAbsentToTieredStore(); // using putIfAbsent instead of put here because our mock won't block on a putIfAbsent + + launchThread(new Runnable() { + @Override + public void run() { + getFromTieredStore(); + } + }); + + launchThread(new Runnable() { + @Override + public void run() { + removeKeyFromTieredStore(); + } + }); + + progressLatch.await(); + + launchThread3(); + + assertThat(replaceFromTieredStore(VALUE, OTHER_VALUE), is(Store.ReplaceStatus.MISS_NOT_PRESENT)); + + // 5. Test Thread -> TieredStore.get() + // If Test Thread bugged -> Fault.get() - synchronized - blocked + // Else Test Thread fixed -> new Fault ... correct value + Store.ValueHolder value = getFromTieredStore(); + assertThat(value, nullValue()); + + assertThat(failed, is(false)); + } + + private Store.ValueHolder createValueHolder(String value) { + if(value == null) { + return null; + } + return new BasicOffHeapValueHolder(1, value, Long.MAX_VALUE, System.currentTimeMillis() - 1); + } + + private Store.PutStatus putToTieredStore() { + try { + return tieredStore.put(KEY, VALUE); + } catch (StoreAccessException e) { + throw new RuntimeException(e); + } + } + + private boolean removeKeyFromTieredStore() { + try { + return tieredStore.remove(KEY); + } catch (StoreAccessException e) { + throw new RuntimeException(e); + } + } + + private Store.ValueHolder putIfAbsentToTieredStore() { + try { + return tieredStore.putIfAbsent(KEY, VALUE); + } catch (StoreAccessException e) { + throw new RuntimeException(e); + } + } + + private Store.RemoveStatus removeKeyValueFromTieredStore(String value) { + try { + return tieredStore.remove(KEY, value); + } catch (StoreAccessException e) { + throw new RuntimeException(e); + } + } + + private Store.ValueHolder replaceFromTieredStore(String value) { + try { + return tieredStore.replace(KEY, value); + } catch (StoreAccessException e) { + throw new RuntimeException(e); + } + } + + private Store.ReplaceStatus replaceFromTieredStore(String oldValue, String newValue) { + try { + return tieredStore.replace(KEY, oldValue, newValue); + } catch (StoreAccessException e) { + throw new RuntimeException(e); + } + } + + private Store.ValueHolder getFromTieredStore() { + try { + return tieredStore.get(KEY); + } catch (StoreAccessException e) { + throw new RuntimeException(e); + } + } + + private void launchThread3() { + thread3 = launchThread(new Runnable() { + @Override + public void run() { + try { + // Give time to test thread to reach blocked fault + Thread.sleep(1000); + } catch (InterruptedException e) { + // ignore + } + failed = true; + thread1Latch.countDown(); + thread3Latch.countDown(); + } + }); + } + + private Thread launchThread(Runnable runnable) { + Thread thread = new Thread(runnable); + thread.setDaemon(true); + thread.start(); + return thread; + } + + private void releaseThreads() { + if(thread3 != null) { + thread3.interrupt(); + } + } +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreSPITest.java index 4ed2f05e94..750c880d8c 100755 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreSPITest.java @@ -22,7 +22,7 @@ import org.ehcache.config.ResourceType; import org.ehcache.core.internal.store.StoreConfigurationImpl; import org.ehcache.config.SizedResourcePool; -import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; import org.ehcache.CachePersistenceException; @@ -33,7 +33,7 @@ import org.ehcache.impl.internal.events.NullStoreEventDispatcher; import org.ehcache.impl.internal.events.TestStoreEventDispatcher; import org.ehcache.impl.internal.executor.OnDemandExecutionService; -import org.ehcache.impl.persistence.DefaultLocalPersistenceService; +import org.ehcache.impl.internal.persistence.TestDiskResourceService; import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; import org.ehcache.impl.internal.store.disk.OffHeapDiskStore; import org.ehcache.impl.internal.store.disk.OffHeapDiskStoreSPITest; @@ -52,7 +52,6 @@ import org.ehcache.spi.copy.Copier; import org.ehcache.spi.serialization.Serializer; import org.ehcache.core.spi.service.FileBasedPersistenceContext; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.junit.After; @@ -70,6 +69,7 @@ import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.ehcache.config.units.MemoryUnit.MB; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -83,11 +83,13 @@ public class TieredStoreSPITest extends StoreSPITest { private StoreFactory storeFactory; private final TieredStore.Provider provider = new TieredStore.Provider(); private final Map, String> createdStores = new ConcurrentHashMap, String>(); - private LocalPersistenceService persistenceService; @Rule public final TemporaryFolder folder = new TemporaryFolder(); + @Rule + public TestDiskResourceService diskResourceService = new TestDiskResourceService(); + @Override protected StoreFactory getStoreFactory() { return storeFactory; @@ -95,7 +97,6 @@ protected StoreFactory getStoreFactory() { @Before public void setUp() throws IOException { - persistenceService = new DefaultLocalPersistenceService(new CacheManagerPersistenceConfiguration(folder.newFolder())); storeFactory = new StoreFactory() { final AtomicInteger aliasCounter = new AtomicInteger(); @@ -126,14 +127,15 @@ private Store newStore(Long capacity, EvictionAdvisor config = new StoreConfigurationImpl(getKeyType(), getValueType(), evictionAdvisor, getClass().getClassLoader(), expiry, buildResourcePools(capacity), 0, keySerializer, valueSerializer); - final Copier defaultCopier = new IdentityCopier(); + @SuppressWarnings("unchecked") + final Copier defaultCopier = new IdentityCopier(); OnHeapStore onHeapStore = new OnHeapStore(config, timeSource, defaultCopier, defaultCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); try { CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); when(cacheConfiguration.getResourcePools()).thenReturn(newResourcePoolsBuilder().disk(1, MB, false).build()); String spaceName = "alias-" + aliasCounter.getAndIncrement(); - LocalPersistenceService.PersistenceSpaceIdentifier space = persistenceService.getPersistenceSpaceIdentifier(spaceName, cacheConfiguration); - FileBasedPersistenceContext persistenceContext = persistenceService.createPersistenceContextWithin(space, "store"); + DiskResourceService.PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier(spaceName, cacheConfiguration); + FileBasedPersistenceContext persistenceContext = diskResourceService.createPersistenceContextWithin(space, "store"); SizedResourcePool diskPool = config.getResourcePools().getPoolForResource(ResourceType.Core.DISK); MemoryUnit unit = (MemoryUnit) diskPool.getUnit(); @@ -294,7 +296,7 @@ public void close(final Store store) { String spaceName = createdStores.get(store); provider.releaseStore(store); try { - persistenceService.destroy(spaceName); + diskResourceService.destroy(spaceName); } catch (CachePersistenceException e) { throw new AssertionError(e); } finally { @@ -304,23 +306,19 @@ public void close(final Store store) { @Override public ServiceProvider getServiceProvider() { - ServiceLocator serviceLocator = new ServiceLocator(); - serviceLocator.addService(new FakeCachingTierProvider()); - serviceLocator.addService(new FakeAuthoritativeTierProvider()); - return serviceLocator; + ServiceLocator.DependencySet dependencySet = dependencySet(); + dependencySet.with(new FakeCachingTierProvider()); + dependencySet.with(new FakeAuthoritativeTierProvider()); + return dependencySet.build(); } }; } @After public void tearDown() throws CachePersistenceException { - try { - for (Map.Entry, String> entry : createdStores.entrySet()) { - provider.releaseStore(entry.getKey()); - persistenceService.destroy(entry.getValue()); - } - } finally { - persistenceService.stop(); + for (Map.Entry, String> entry : createdStores.entrySet()) { + provider.releaseStore(entry.getKey()); + diskResourceService.destroy(entry.getValue()); } } @@ -333,6 +331,7 @@ private ResourcePools buildResourcePools(Comparable capacityConstraint) { public static class FakeCachingTierProvider implements CachingTier.Provider { @Override + @SuppressWarnings("unchecked") public CachingTier createCachingTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { return mock(CachingTier.class); } @@ -365,6 +364,7 @@ public void stop() { public static class FakeAuthoritativeTierProvider implements AuthoritativeTier.Provider { @Override + @SuppressWarnings("unchecked") public AuthoritativeTier createAuthoritativeTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { return mock(AuthoritativeTier.class); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java index 6c1eabd7b1..74e1d9198c 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java @@ -20,20 +20,21 @@ import org.ehcache.config.ResourceType; import org.ehcache.config.SizedResourcePool; import org.ehcache.core.internal.service.ServiceLocator; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.core.spi.function.BiFunction; import org.ehcache.core.spi.function.Function; import org.ehcache.core.spi.function.NullaryFunction; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.Store.RemoveStatus; import org.ehcache.core.spi.store.Store.ReplaceStatus; +import org.ehcache.core.spi.store.StoreAccessException; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; import org.ehcache.core.spi.store.tiering.CachingTier; import org.ehcache.impl.internal.store.heap.OnHeapStore; import org.ehcache.impl.internal.store.offheap.OffHeapStore; -import org.ehcache.spi.service.ServiceProvider; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; +import org.ehcache.spi.service.ServiceProvider; import org.hamcrest.Matchers; import org.junit.Assert; import org.junit.Before; @@ -57,7 +58,7 @@ import java.util.concurrent.CyclicBarrier; import java.util.concurrent.TimeUnit; -import static java.util.Collections.singleton; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.Is.is; @@ -90,6 +91,7 @@ public void setUp() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testGetHitsCachingTier() throws Exception { when(numberCachingTier.getOrComputeIfAbsent(eq(1), any(Function.class))).thenReturn(newValueHolder("one")); @@ -101,6 +103,7 @@ public void testGetHitsCachingTier() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testGetHitsAuthoritativeTier() throws Exception { Store.ValueHolder valueHolder = newValueHolder("one"); when(numberAuthoritativeTier.getAndFault(eq(1))).thenReturn(valueHolder); @@ -122,6 +125,7 @@ public Store.ValueHolder answer(InvocationOnMock invocation) throw } @Test + @SuppressWarnings("unchecked") public void testGetMisses() throws Exception { when(numberAuthoritativeTier.getAndFault(eq(1))).thenReturn(null); when(numberCachingTier.getOrComputeIfAbsent(any(Number.class), any(Function.class))).then(new Answer>() { @@ -163,14 +167,14 @@ public void testPutIfAbsent_whenAbsent() throws Exception { @Test public void testPutIfAbsent_whenPresent() throws Exception { - when(numberAuthoritativeTier.putIfAbsent(eq(1), eq("one"))).thenReturn(newValueHolder("un")); + when(numberAuthoritativeTier.putIfAbsent(1, "one")).thenReturn(newValueHolder("un")); TieredStore tieredStore = new TieredStore(numberCachingTier, numberAuthoritativeTier); assertThat(tieredStore.putIfAbsent(1, "one").value(), Matchers.equalTo("un")); - verify(numberCachingTier, times(0)).invalidate(any(Number.class)); - verify(numberAuthoritativeTier, times(1)).putIfAbsent(eq(1), eq("one")); + verify(numberCachingTier, times(1)).invalidate(1); + verify(numberAuthoritativeTier, times(1)).putIfAbsent(1, "one"); } @Test @@ -203,7 +207,7 @@ public void testRemove2Args_doesNotRemove() throws Exception { assertThat(tieredStore.remove(1, "one"), is(RemoveStatus.KEY_MISSING)); - verify(numberCachingTier, times(0)).invalidate(any(Number.class)); + verify(numberCachingTier).invalidate(any(Number.class)); verify(numberAuthoritativeTier, times(1)).remove(eq(1), eq("one")); } @@ -227,7 +231,7 @@ public void testReplace2Args_doesNotReplace() throws Exception { assertThat(tieredStore.replace(1, "one"), is(nullValue())); - verify(numberCachingTier, times(0)).invalidate(any(Number.class)); + verify(numberCachingTier).invalidate(any(Number.class)); verify(numberAuthoritativeTier, times(1)).replace(eq(1), eq("one")); } @@ -251,7 +255,7 @@ public void testReplace3Args_doesNotReplace() throws Exception { assertThat(tieredStore.replace(1, "un", "one"), is(ReplaceStatus.MISS_NOT_PRESENT)); - verify(numberCachingTier, times(0)).invalidate(any(Number.class)); + verify(numberCachingTier).invalidate(any(Number.class)); verify(numberAuthoritativeTier, times(1)).replace(eq(1), eq("un"), eq("one")); } @@ -266,6 +270,7 @@ public void testClear() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testCompute2Args() throws Exception { when(numberAuthoritativeTier.compute(any(Number.class), any(BiFunction.class))).then(new Answer>() { @Override @@ -290,6 +295,7 @@ public CharSequence apply(Number number, CharSequence charSequence) { } @Test + @SuppressWarnings("unchecked") public void testCompute3Args() throws Exception { when(numberAuthoritativeTier.compute(any(Number.class), any(BiFunction.class), any(NullaryFunction.class))).then(new Answer>() { @Override @@ -319,6 +325,7 @@ public Boolean apply() { } @Test + @SuppressWarnings("unchecked") public void testComputeIfAbsent_computes() throws Exception { when(numberCachingTier.getOrComputeIfAbsent(any(Number.class), any(Function.class))).thenAnswer(new Answer>() { @Override @@ -351,6 +358,7 @@ public CharSequence apply(Number number) { } @Test + @SuppressWarnings("unchecked") public void testComputeIfAbsent_doesNotCompute() throws Exception { final Store.ValueHolder valueHolder = newValueHolder("one"); when(numberCachingTier.getOrComputeIfAbsent(any(Number.class), any(Function.class))).thenAnswer(new Answer>() { @@ -374,6 +382,7 @@ public CharSequence apply(Number number) { } @Test + @SuppressWarnings("unchecked") public void testBulkCompute2Args() throws Exception { when(numberAuthoritativeTier.bulkCompute(any(Set.class), any(Function.class))).thenAnswer(new Answer>>() { @Override @@ -418,6 +427,7 @@ public Map> answer(InvocationOnMock invo } @Test + @SuppressWarnings("unchecked") public void testBulkCompute3Args() throws Exception { when( numberAuthoritativeTier.bulkCompute(any(Set.class), any(Function.class), any(NullaryFunction.class))).thenAnswer(new Answer>>() { @@ -468,6 +478,7 @@ public Boolean apply() { } @Test + @SuppressWarnings("unchecked") public void testBulkComputeIfAbsent() throws Exception { when(numberAuthoritativeTier.bulkComputeIfAbsent(any(Set.class), any(Function.class))).thenAnswer(new Answer>>() { @Override @@ -547,6 +558,7 @@ public void run() { } @Test + @SuppressWarnings("unchecked") public void testReleaseStoreFlushes() throws Exception { TieredStore.Provider tieredStoreProvider = new TieredStore.Provider(); @@ -596,7 +608,7 @@ public void testReleaseStoreFlushes() throws Exception { @Test public void testRank() throws Exception { TieredStore.Provider provider = new TieredStore.Provider(); - ServiceLocator serviceLocator = new ServiceLocator(provider); + ServiceLocator serviceLocator = dependencySet().with(provider).with(mock(DiskResourceService.class)).build(); serviceLocator.startAllServices(); assertRank(provider, 0, ResourceType.Core.DISK); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreWith3TiersSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreWith3TiersSPITest.java index ca11ca81f0..d2c82fae66 100755 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreWith3TiersSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreWith3TiersSPITest.java @@ -22,7 +22,7 @@ import org.ehcache.config.ResourceType; import org.ehcache.core.internal.store.StoreConfigurationImpl; import org.ehcache.config.SizedResourcePool; -import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.events.StoreEventDispatcher; @@ -34,7 +34,7 @@ import org.ehcache.impl.internal.events.NullStoreEventDispatcher; import org.ehcache.impl.internal.events.TestStoreEventDispatcher; import org.ehcache.impl.internal.executor.OnDemandExecutionService; -import org.ehcache.impl.persistence.DefaultLocalPersistenceService; +import org.ehcache.impl.internal.persistence.TestDiskResourceService; import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; import org.ehcache.impl.internal.store.disk.OffHeapDiskStore; import org.ehcache.impl.internal.store.disk.OffHeapDiskStoreSPITest; @@ -55,7 +55,6 @@ import org.ehcache.spi.copy.Copier; import org.ehcache.spi.serialization.Serializer; import org.ehcache.core.spi.service.FileBasedPersistenceContext; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.junit.After; @@ -72,6 +71,7 @@ import java.util.concurrent.atomic.AtomicInteger; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -84,11 +84,13 @@ public class TieredStoreWith3TiersSPITest extends StoreSPITest { private StoreFactory storeFactory; private final TieredStore.Provider provider = new TieredStore.Provider(); private final Map, String> createdStores = new ConcurrentHashMap, String>(); - private LocalPersistenceService persistenceService; @Rule public final TemporaryFolder folder = new TemporaryFolder(); + @Rule + public TestDiskResourceService diskResourceService = new TestDiskResourceService(); + @Override protected StoreFactory getStoreFactory() { return storeFactory; @@ -96,7 +98,6 @@ protected StoreFactory getStoreFactory() { @Before public void setUp() throws IOException { - persistenceService = new DefaultLocalPersistenceService(new CacheManagerPersistenceConfiguration(folder.newFolder())); storeFactory = new StoreFactory() { final AtomicInteger aliasCounter = new AtomicInteger(); @@ -126,7 +127,8 @@ private Store newStore(Long capacity, EvictionAdvisor valueSerializer = new JavaSerializer(getClass().getClassLoader()); Store.Configuration config = new StoreConfigurationImpl(getKeyType(), getValueType(), evictionAdvisor, getClass().getClassLoader(), expiry, buildResourcePools(capacity), 0, keySerializer, valueSerializer); - final Copier defaultCopier = new IdentityCopier(); + @SuppressWarnings("unchecked") + final Copier defaultCopier = new IdentityCopier(); StoreEventDispatcher noOpEventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); final OnHeapStore onHeapStore = new OnHeapStore(config, timeSource, defaultCopier, defaultCopier, new NoopSizeOfEngine(), noOpEventDispatcher); @@ -140,8 +142,8 @@ private Store newStore(Long capacity, EvictionAdvisor store) { String spaceName = createdStores.get(store); provider.releaseStore(store); try { - persistenceService.destroy(spaceName); + diskResourceService.destroy(spaceName); } catch (CachePersistenceException e) { throw new AssertionError(e); } finally { @@ -318,9 +320,8 @@ public void close(final Store store) { @Override public ServiceProvider getServiceProvider() { - ServiceLocator serviceLocator = new ServiceLocator(); - serviceLocator.addService(new FakeCachingTierProvider()); - serviceLocator.addService(new FakeAuthoritativeTierProvider()); + ServiceLocator serviceLocator = dependencySet().with(new FakeCachingTierProvider()) + .with(new FakeAuthoritativeTierProvider()).build(); return serviceLocator; } }; @@ -331,10 +332,10 @@ public void tearDown() throws CachePersistenceException { try { for (Map.Entry, String> entry : createdStores.entrySet()) { provider.releaseStore(entry.getKey()); - persistenceService.destroy(entry.getValue()); + diskResourceService.destroy(entry.getValue()); } } finally { - persistenceService.stop(); + diskResourceService.stop(); } } @@ -355,6 +356,7 @@ private ResourcePools buildResourcePools(Long capacityConstraint) { public static class FakeCachingTierProvider implements CachingTier.Provider { @Override + @SuppressWarnings("unchecked") public CachingTier createCachingTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { return mock(CachingTier.class); } @@ -387,6 +389,7 @@ public void stop() { public static class FakeAuthoritativeTierProvider implements AuthoritativeTier.Provider { @Override + @SuppressWarnings("unchecked") public AuthoritativeTier createAuthoritativeTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { return mock(AuthoritativeTier.class); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchers.java b/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchers.java new file mode 100644 index 0000000000..f2ee0124e2 --- /dev/null +++ b/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchers.java @@ -0,0 +1,129 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.util; + +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; + +import java.io.File; +import java.io.FilenameFilter; + +/** + * Matchers for file locks and existence in the persistence directory. + * + * @author RKAV + */ +public class FileExistenceMatchers { + + private static class DirectoryIsLockedMatcher extends TypeSafeMatcher { + @Override + protected boolean matchesSafely(File dir) { + File[] files = dir.listFiles(new FilenameFilter() { + @Override + public boolean accept(File dir, String name) { + return name.equals(".lock"); + } + }); + return files != null && files.length == 1; + } + + @Override + public void describeMismatchSafely(File item, Description mismatchDescription) { + mismatchDescription.appendValue(item) + .appendText(" doesn't contain a .lock file"); + } + + @Override + public void describeTo(Description description) { + description.appendText("a .lock file in the directory"); + } + } + + private static class ContainsCacheDirectoryMatcher extends TypeSafeMatcher { + + private String parentDirectory; + private String startWith; + + public ContainsCacheDirectoryMatcher(String safeSpaceOwner, String cacheAlias) { + this.parentDirectory = safeSpaceOwner; + this.startWith = cacheAlias + "_"; + } + + @Override + protected boolean matchesSafely(File item) { + // The directory layout is that there will be a directory named 'file' + // If the cache directory exists, it will contain a directory starting with 'cacheAlias_' + + File file = new File(item, parentDirectory); + if(!file.exists() || !file.isAbsolute()) { + return false; + } + + File[] files = file.listFiles(new FilenameFilter() { + @Override + public boolean accept(File dir, String name) { + return name.startsWith(startWith); + } + }); + + return files != null && files.length == 1 && files[0].isDirectory(); + } + + @Override + public void describeMismatchSafely(File item, Description mismatchDescription) { + mismatchDescription.appendValue(item) + .appendText(" doesn't contains a file starting with " + startWith); + } + + @Override + public void describeTo(Description description) { + description.appendText("contains a file starting with '" + "'"); + } + } + + /** + * Matcher checking if the persistence directory is locked by a cache manager + * + * @return the matcher + */ + public static Matcher isLocked() { + return new DirectoryIsLockedMatcher(); + } + + /** + * Matcher checking if a cache directory starting with this name exists in the 'file' safe space + * + * @param cacheAlias cache alias that will be the prefix of the cache directory + * @return the matcher + */ + public static Matcher containsCacheDirectory(String cacheAlias) { + return new ContainsCacheDirectoryMatcher("file", cacheAlias); + } + + /** + * Matcher checking if a cache directory starting within the safe space + * + * @param safeSpaceOwner name of the same space owner. It is also the name of the safe space root directory + * @param cacheAlias cache alias that will be the prefix of the cache directory + * @return the matcher + */ + public static Matcher containsCacheDirectory(String safeSpaceOwner, String cacheAlias) { + return new ContainsCacheDirectoryMatcher(safeSpaceOwner, cacheAlias); + } + +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchersTest.java b/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchersTest.java new file mode 100644 index 0000000000..67f03f6464 --- /dev/null +++ b/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchersTest.java @@ -0,0 +1,97 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.util; + +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +import java.io.File; +import java.io.IOException; + +import static org.ehcache.impl.internal.util.FileExistenceMatchers.containsCacheDirectory; +import static org.ehcache.impl.internal.util.FileExistenceMatchers.isLocked; +import static org.hamcrest.CoreMatchers.not; +import static org.junit.Assert.assertThat; + +/** + * @author Henri Tremblay + */ +public class FileExistenceMatchersTest { + + @Rule + public TemporaryFolder folder = new TemporaryFolder(); + + @Test + public void directoryIsLocked() throws Exception { + File dir = folder.newFolder(); + + assertThat(dir, not(isLocked())); + } + + @Test + public void directoryIsNotLocked() throws Exception { + File dir = folder.newFolder(); + File lock = new File(dir, ".lock"); + lock.createNewFile(); + + assertThat(dir, isLocked()); + } + + @Test + public void containsCacheDirectory_noFileDir() throws IOException { + File dir = folder.newFolder(); + + assertThat(dir, not(containsCacheDirectory("test123"))); + } + + @Test + public void containsCacheDirectory_noCacheDir() throws IOException { + File dir = folder.newFolder(); + File file = new File(dir, "file"); + file.mkdir(); + + assertThat(dir, not(containsCacheDirectory("test123"))); + } + + @Test + public void containsCacheDirectory_moreThanOneCacheDir() throws IOException { + File dir = folder.newFolder(); + File file = new File(dir, "file"); + file.mkdir(); + new File(file, "test123_aaa").mkdir(); + new File(file, "test123_bbb").mkdir(); + + assertThat(dir, not(containsCacheDirectory("test123"))); + } + + @Test + public void containsCacheDirectory_existing() throws IOException { + File dir = folder.newFolder(); + new File(dir, "file/test123_aaa").mkdirs(); + + assertThat(dir, containsCacheDirectory("test123")); + } + + @Test + public void containsCacheDirectory_withSafeSpaceExisting() throws IOException { + File dir = folder.newFolder(); + new File(dir, "safespace/test123_aaa").mkdirs(); + + assertThat(dir, containsCacheDirectory("safespace", "test123")); + } +} diff --git a/impl/src/test/java/org/ehcache/impl/persistence/DefaultDiskResourceServiceTest.java b/impl/src/test/java/org/ehcache/impl/persistence/DefaultDiskResourceServiceTest.java new file mode 100644 index 0000000000..41486008cb --- /dev/null +++ b/impl/src/test/java/org/ehcache/impl/persistence/DefaultDiskResourceServiceTest.java @@ -0,0 +1,138 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.persistence; + +import org.ehcache.CachePersistenceException; +import org.ehcache.config.ResourceType; +import org.ehcache.core.spi.service.LocalPersistenceService; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceProvider; +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.runners.Enclosed; +import org.junit.rules.ExpectedException; +import org.junit.runner.RunWith; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * @author Henri Tremblay + */ +@RunWith(Enclosed.class) +public class DefaultDiskResourceServiceTest { + + public static abstract class AbstractDefaultDiskResourceServiceTest { + + @Rule + public ExpectedException expectedException = ExpectedException.none(); + + protected DefaultDiskResourceService service = new DefaultDiskResourceService(); + @SuppressWarnings("unchecked") + protected ServiceProvider serviceProvider = mock(ServiceProvider.class); + + @Before + public void setup() { + service.start(serviceProvider); + } + + @After + public void tearDown() { + service.stop(); + } + + } + + public static class WithPersistenceService extends AbstractDefaultDiskResourceServiceTest { + + LocalPersistenceService persistenceService = mock(LocalPersistenceService.class); + + @Before + public void setup() { + when(serviceProvider.getService(LocalPersistenceService.class)).thenReturn(persistenceService); + super.setup(); + } + + @Test + public void testHandlesResourceType() { + assertThat(service.handlesResourceType(ResourceType.Core.DISK)).isTrue(); + } + + @Test + public void testDestroyAll() { + service.destroyAll(); + verify(persistenceService).destroyAll(DefaultDiskResourceService.PERSISTENCE_SPACE_OWNER); + } + + @Test + public void testDestroy() throws CachePersistenceException { + service.destroy("test"); // should do nothing + } + + // Some tests still missing here + } + + public static class WithoutPersistenceService extends AbstractDefaultDiskResourceServiceTest { + + @Test + public void testHandlesResourceType() { + assertThat(service.handlesResourceType(ResourceType.Core.DISK)).isFalse(); + } + + @Test + public void testDestroyAll() { + service.destroyAll(); // should do nothing + } + + @Test + public void testDestroy() throws CachePersistenceException { + service.destroy("test"); // should do nothing + } + + @Test + public void testCreatePersistenceContextWithin() throws CachePersistenceException { + expectedException.expect(CachePersistenceException.class); + expectedException.expectMessage("Unknown space: null"); + service.createPersistenceContextWithin(null, "test"); + } + + @Test + public void testGetPersistenceSpaceIdentifier() throws CachePersistenceException { + assertThat(service.getPersistenceSpaceIdentifier("test", null)).isNull(); + } + + + @Test + public void testGetStateRepositoryWithin() throws CachePersistenceException { + expectedException.expect(CachePersistenceException.class); + expectedException.expectMessage("Unknown space: null"); + assertThat(service.getStateRepositoryWithin(null, "test")).isNull(); + } + + @Test + public void testReleasePersistenceSpaceIdentifier() throws CachePersistenceException { + expectedException.expect(CachePersistenceException.class); + expectedException.expectMessage("Unknown space: null"); + assertThat(service.getStateRepositoryWithin(null, "test")).isNull(); + } + + } + +} diff --git a/impl/src/test/java/org/ehcache/impl/persistence/DefaultLocalPersistenceServiceTest.java b/impl/src/test/java/org/ehcache/impl/persistence/DefaultLocalPersistenceServiceTest.java index 03a5b4b9d9..4f07b1bae8 100644 --- a/impl/src/test/java/org/ehcache/impl/persistence/DefaultLocalPersistenceServiceTest.java +++ b/impl/src/test/java/org/ehcache/impl/persistence/DefaultLocalPersistenceServiceTest.java @@ -17,29 +17,25 @@ package org.ehcache.impl.persistence; import org.ehcache.CachePersistenceException; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; -import org.ehcache.spi.persistence.PersistableResourceService; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; -import org.mockito.Mockito; import java.io.File; import java.io.IOException; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; +import static org.ehcache.impl.internal.util.FileExistenceMatchers.containsCacheDirectory; +import static org.ehcache.impl.internal.util.FileExistenceMatchers.isLocked; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; import static org.junit.Assert.fail; import static org.junit.Assume.assumeTrue; -import static org.mockito.Mockito.never; public class DefaultLocalPersistenceServiceTest { @@ -111,90 +107,39 @@ public void testLocksDirectoryAndUnlocks() throws IOException { } @Test - public void testExclusiveLock() throws IOException { - DefaultLocalPersistenceService service1 = new DefaultLocalPersistenceService(new DefaultPersistenceConfiguration(testFolder)); - DefaultLocalPersistenceService service2 = new DefaultLocalPersistenceService(new DefaultPersistenceConfiguration(testFolder)); - service1.start(null); - - // We should not be able to lock the same directory twice - // And we should receive a meaningful exception about it - expectedException.expectMessage("Couldn't lock rootDir: " + testFolder.getAbsolutePath()); - service2.start(null); - } - - @Test - public void testCantDestroyAllIfServiceNotStarted() { - DefaultLocalPersistenceService service = new DefaultLocalPersistenceService(new DefaultPersistenceConfiguration(testFolder)); - expectedException.expect(IllegalStateException.class); - expectedException.expectMessage("Service must be started"); - service.destroyAll(); - } - - @Test - public void testDestroyWhenStarted() throws CachePersistenceException { - DefaultLocalPersistenceService service = new DefaultLocalPersistenceService(new DefaultPersistenceConfiguration(testFolder)); + public void testPhysicalDestroy() throws IOException, CachePersistenceException { + final File f = folder.newFolder("testPhysicalDestroy"); + final DefaultLocalPersistenceService service = new DefaultLocalPersistenceService(new DefaultPersistenceConfiguration(f)); service.start(null); - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder - .newCacheConfigurationBuilder(Object.class, Object.class, - ResourcePoolsBuilder.newResourcePoolsBuilder().disk(10, MemoryUnit.MB)) - .build(); - PersistableResourceService.PersistenceSpaceIdentifier id = - service.getPersistenceSpaceIdentifier("test", cacheConfiguration); - - service = Mockito.spy(service); - service.destroy("test"); - - // Make sure we haven't tried to start the service - Mockito.verify(service, never()).internalStart(); + assertThat(service.getLockFile().exists(), is(true)); + assertThat(f, isLocked()); - // Make sure we are still started - assertThat(service.isStarted(), is(true)); + LocalPersistenceService.SafeSpaceIdentifier id = service.createSafeSpaceIdentifier("test", "test"); + service.createSafeSpace(id); - // Make sure the cache was deleted - expectedException.expect(CachePersistenceException.class); - service.getStateRepositoryWithin(id, "test"); - } + assertThat(f, containsCacheDirectory("test", "test")); - @Test - public void testDestroyWhenStopped() throws CachePersistenceException { - DefaultLocalPersistenceService service = new DefaultLocalPersistenceService(new DefaultPersistenceConfiguration(testFolder)); - service.start(null); + // try to destroy the physical space without the logical id + LocalPersistenceService.SafeSpaceIdentifier newId = service.createSafeSpaceIdentifier("test", "test"); + service.destroySafeSpace(newId, false); - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder - .newCacheConfigurationBuilder(Object.class, Object.class, - ResourcePoolsBuilder.newResourcePoolsBuilder().disk(10, MemoryUnit.MB)) - .build(); - PersistableResourceService.PersistenceSpaceIdentifier id = - service.getPersistenceSpaceIdentifier("test", cacheConfiguration); + assertThat(f, not(containsCacheDirectory("test", "test"))); service.stop(); - service = Mockito.spy(service); - service.destroy("test"); - - // Make sure it was started - Mockito.verify(service).internalStart(); - - // Make sure the service is still stopped - assertThat(service.isStarted(), is(false)); - - // Make sure the cache was deleted - expectedException.expect(CachePersistenceException.class); - service.getStateRepositoryWithin(id, "test"); + assertThat(f, not(isLocked())); } @Test - public void testIsStarted() { - DefaultLocalPersistenceService service = new DefaultLocalPersistenceService(new DefaultPersistenceConfiguration(testFolder)); - assertThat(service.isStarted(), is(false)); - service.start(null); - assertThat(service.isStarted(), is(true)); - service.stop(); - assertThat(service.isStarted(), is(false)); - service.startForMaintenance(null); - assertThat(service.isStarted(), is(true)); - service.stop(); - assertThat(service.isStarted(), is(false)); + public void testExclusiveLock() throws IOException { + DefaultLocalPersistenceService service1 = new DefaultLocalPersistenceService(new DefaultPersistenceConfiguration(testFolder)); + DefaultLocalPersistenceService service2 = new DefaultLocalPersistenceService(new DefaultPersistenceConfiguration(testFolder)); + service1.start(null); + + // We should not be able to lock the same directory twice + // And we should receive a meaningful exception about it + expectedException.expectMessage("Couldn't lock rootDir: " + testFolder.getAbsolutePath()); + service2.start(null); } } diff --git a/impl/src/test/java/org/ehcache/impl/persistence/FileBasedStateRepositoryTest.java b/impl/src/test/java/org/ehcache/impl/persistence/FileBasedStateRepositoryTest.java index 3b4c70063f..c2484280a2 100644 --- a/impl/src/test/java/org/ehcache/impl/persistence/FileBasedStateRepositoryTest.java +++ b/impl/src/test/java/org/ehcache/impl/persistence/FileBasedStateRepositoryTest.java @@ -16,6 +16,8 @@ package org.ehcache.impl.persistence; +import org.ehcache.impl.serialization.TransientStateHolder; +import org.ehcache.spi.persistence.StateHolder; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -26,8 +28,6 @@ import java.io.FilenameFilter; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; import static org.hamcrest.Matchers.is; import static org.junit.Assert.*; @@ -37,31 +37,33 @@ */ public class FileBasedStateRepositoryTest { - private static String MAP_FILE_NAME = "map-0-myMap.bin"; + private static String HOLDER_FILE_NAME = "holder-0-myHolder.bin"; @Rule public TemporaryFolder folder = new TemporaryFolder(); @Test - public void testMapSave() throws Exception { + public void testHolderSave() throws Exception { File directory = folder.newFolder("testSave"); FileBasedStateRepository stateRepository = new FileBasedStateRepository(directory); - String mapName = "myMap"; - ConcurrentMap myMap = stateRepository.getPersistentConcurrentMap(mapName, Long.class, String.class); + String holderName = "myHolder"; + StateHolder myHolder = stateRepository.getPersistentStateHolder(holderName, Long.class, String.class); - myMap.put(42L, "TheAnswer!"); + myHolder.putIfAbsent(42L, "TheAnswer!"); stateRepository.close(); - FileInputStream fis = new FileInputStream(new File(directory, MAP_FILE_NAME)); + FileInputStream fis = new FileInputStream(new File(directory, HOLDER_FILE_NAME)); try { ObjectInputStream ois = new ObjectInputStream(fis); try { String name = (String) ois.readObject(); - assertThat(name, is(mapName)); + assertThat(name, is(holderName)); FileBasedStateRepository.Tuple loadedTuple = (FileBasedStateRepository.Tuple) ois.readObject(); assertThat(loadedTuple.index, is(0)); - assertThat((ConcurrentMap)loadedTuple.map, is(myMap)); + @SuppressWarnings("unchecked") + StateHolder stateHolder = (StateHolder) loadedTuple.holder; + assertThat(stateHolder, is(myHolder)); } finally { ois.close(); } @@ -71,17 +73,17 @@ public void testMapSave() throws Exception { } @Test - public void testMapLoad() throws Exception { + public void testHolderLoad() throws Exception { File directory = folder.newFolder("testLoad"); - String mapName = "myMap"; - ConcurrentMap map = new ConcurrentHashMap(); - map.put(42L, "Again? That's not even funny anymore!!"); + String holderName = "myHolder"; + StateHolder map = new TransientStateHolder(); + map.putIfAbsent(42L, "Again? That's not even funny anymore!!"); - FileOutputStream fos = new FileOutputStream(new File(directory, MAP_FILE_NAME)); + FileOutputStream fos = new FileOutputStream(new File(directory, HOLDER_FILE_NAME)); try { ObjectOutputStream oos = new ObjectOutputStream(fos); try { - oos.writeObject(mapName); + oos.writeObject(holderName); oos.writeObject(new FileBasedStateRepository.Tuple(0, map)); } finally { oos.close(); @@ -91,22 +93,22 @@ public void testMapLoad() throws Exception { } FileBasedStateRepository stateRepository = new FileBasedStateRepository(directory); - ConcurrentMap myMap = stateRepository.getPersistentConcurrentMap(mapName, Long.class, String.class); + StateHolder myHolder = stateRepository.getPersistentStateHolder(holderName, Long.class, String.class); - assertThat(myMap, is(map)); + assertThat(myHolder, is(map)); } @Test public void testIndexProperlySetAfterLoad() throws Exception { File directory = folder.newFolder("testIndexAfterLoad"); - String mapName = "myMap"; + String holderName = "myHolder"; - FileOutputStream fos = new FileOutputStream(new File(directory, MAP_FILE_NAME)); + FileOutputStream fos = new FileOutputStream(new File(directory, HOLDER_FILE_NAME)); try { ObjectOutputStream oos = new ObjectOutputStream(fos); try { - oos.writeObject(mapName); - oos.writeObject(new FileBasedStateRepository.Tuple(0, new ConcurrentHashMap())); + oos.writeObject(holderName); + oos.writeObject(new FileBasedStateRepository.Tuple(0, new TransientStateHolder())); } finally { oos.close(); } @@ -115,16 +117,16 @@ public void testIndexProperlySetAfterLoad() throws Exception { } FileBasedStateRepository stateRepository = new FileBasedStateRepository(directory); - stateRepository.getPersistentConcurrentMap("otherMap", Long.class, Long.class); + stateRepository.getPersistentStateHolder("otherHolder", Long.class, Long.class); stateRepository.close(); File[] files = directory.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { - return name.contains("otherMap") && name.contains("-1-"); + return name.contains("otherHolder") && name.contains("-1-"); } }); assertThat(files.length, is(1)); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/serialization/AddedFieldTest.java b/impl/src/test/java/org/ehcache/impl/serialization/AddedFieldTest.java index a39b1d6000..a81c79ba38 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/AddedFieldTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/AddedFieldTest.java @@ -16,7 +16,7 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; import org.hamcrest.core.Is; import org.junit.Assert; import org.junit.Test; @@ -41,7 +41,9 @@ public class AddedFieldTest { @Test public void addingSerializableField() throws Exception { - Serializer serializer = new CompactJavaSerializer(null); + @SuppressWarnings("unchecked") + StatefulSerializer serializer = new CompactJavaSerializer(null); + serializer.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(A_write.class, IncompatibleSerializable_write.class, Serializable_write.class); Serializable a = (Serializable) loaderA.loadClass(newClassName(A_write.class)).newInstance(); @@ -58,7 +60,9 @@ public void addingSerializableField() throws Exception { @Test public void addingExternalizableField() throws Exception { - Serializer serializer = new CompactJavaSerializer(null); + @SuppressWarnings("unchecked") + StatefulSerializer serializer = new CompactJavaSerializer(null); + serializer.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(B_write.class, Externalizable_write.class); Serializable a = (Serializable) loaderA.loadClass(newClassName(B_write.class)).newInstance(); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/AddedSuperClassTest.java b/impl/src/test/java/org/ehcache/impl/serialization/AddedSuperClassTest.java index 0edf4f059b..57266bc738 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/AddedSuperClassTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/AddedSuperClassTest.java @@ -16,7 +16,7 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; import org.junit.Test; import java.io.Serializable; @@ -35,7 +35,9 @@ public class AddedSuperClassTest { @Test public void testAddedSuperClass() throws Exception { - Serializer serializer = new CompactJavaSerializer(null); + @SuppressWarnings("unchecked") + StatefulSerializer serializer = new CompactJavaSerializer(null); + serializer.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(A_2.class, AddedSuperClass_Hidden.class); Serializable a = (Serializable) loaderA.loadClass(newClassName(A_2.class)).newInstance(); @@ -51,7 +53,9 @@ public void testAddedSuperClass() throws Exception { @Test public void testAddedSuperClassNotHidden() throws Exception { - Serializer serializer = new CompactJavaSerializer(null); + @SuppressWarnings("unchecked") + StatefulSerializer serializer = new CompactJavaSerializer(null); + serializer.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(A_2.class, AddedSuperClass_Hidden.class); Serializable a = (Serializable) loaderA.loadClass(newClassName(A_2.class)).newInstance(); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/ArrayPackageScopeTest.java b/impl/src/test/java/org/ehcache/impl/serialization/ArrayPackageScopeTest.java index 05fb13465d..631a42ad1c 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/ArrayPackageScopeTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/ArrayPackageScopeTest.java @@ -16,7 +16,7 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; import org.junit.Assert; import org.junit.Test; @@ -37,7 +37,9 @@ public class ArrayPackageScopeTest { @Test public void testArrayPackageScope() throws Exception { - Serializer serializer = new CompactJavaSerializer(null); + @SuppressWarnings("unchecked") + StatefulSerializer serializer = new CompactJavaSerializer(null); + serializer.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(Foo_A.class); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/BasicSerializationTest.java b/impl/src/test/java/org/ehcache/impl/serialization/BasicSerializationTest.java index 6921bc5383..ace8b0c6d4 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/BasicSerializationTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/BasicSerializationTest.java @@ -23,9 +23,7 @@ import java.util.HashMap; import java.util.Random; -import org.ehcache.impl.serialization.CompactJavaSerializer; -import org.ehcache.spi.serialization.Serializer; - +import org.ehcache.spi.serialization.StatefulSerializer; import org.hamcrest.core.Is; import org.hamcrest.core.IsEqual; import org.hamcrest.core.IsNot; @@ -42,7 +40,9 @@ public class BasicSerializationTest { @Test public void testSimpleObject() throws ClassNotFoundException { - Serializer test = new CompactJavaSerializer(null); + @SuppressWarnings("unchecked") + StatefulSerializer test = new CompactJavaSerializer(null); + test.init(new TransientStateRepository()); String input = ""; String result = (String) test.read(test.serialize(input)); @@ -53,7 +53,9 @@ public void testSimpleObject() throws ClassNotFoundException { @Test public void testComplexObject() throws ClassNotFoundException { - Serializer test = new CompactJavaSerializer(null); + @SuppressWarnings("unchecked") + StatefulSerializer test = new CompactJavaSerializer(null); + test.init(new TransientStateRepository()); HashMap input = new HashMap(); input.put(1, "one"); @@ -74,7 +76,9 @@ public void testComplexObject() throws ClassNotFoundException { @Test public void testPrimitiveClasses() throws ClassNotFoundException { - Serializer s = new CompactJavaSerializer(null); + @SuppressWarnings("unchecked") + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); Class[] out = (Class[]) s.read(s.serialize(PRIMITIVE_CLASSES)); @@ -88,7 +92,9 @@ public void testProxyInstance() throws ClassNotFoundException { int foo = rand.nextInt(); float bar = rand.nextFloat(); - Serializer s = new CompactJavaSerializer(null); + @SuppressWarnings("unchecked") + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); Object proxy = s.read(s.serialize((Serializable) Proxy.newProxyInstance(BasicSerializationTest.class.getClassLoader(), new Class[]{Foo.class, Bar.class}, new Handler(foo, bar)))); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassLoaderTest.java b/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassLoaderTest.java index 7e2399c3e8..df72be4c0b 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassLoaderTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassLoaderTest.java @@ -21,9 +21,7 @@ import static org.ehcache.impl.serialization.SerializerTestUtilities.popTccl; import static org.ehcache.impl.serialization.SerializerTestUtilities.pushTccl; -import org.ehcache.impl.serialization.CompactJavaSerializer; -import org.ehcache.spi.serialization.Serializer; - +import org.ehcache.spi.serialization.StatefulSerializer; import org.junit.Assert; import org.junit.Test; @@ -38,7 +36,9 @@ private static ClassLoader newLoader() { @Test public void testThreadContextLoader() throws Exception { - Serializer serializer = new CompactJavaSerializer(null); + @SuppressWarnings("unchecked") + StatefulSerializer serializer = new CompactJavaSerializer(null); + serializer.init(new TransientStateRepository()); ClassLoader loader = newLoader(); ByteBuffer encoded = serializer.serialize((Serializable) loader.loadClass(Foo.class.getName()).newInstance()); @@ -54,7 +54,9 @@ public void testThreadContextLoader() throws Exception { @Test public void testExplicitLoader() throws Exception { ClassLoader loader = newLoader(); - Serializer serializer = new CompactJavaSerializer(loader); + @SuppressWarnings("unchecked") + StatefulSerializer serializer = new CompactJavaSerializer(loader); + serializer.init(new TransientStateRepository()); ByteBuffer encoded = serializer.serialize((Serializable) loader.loadClass(Foo.class.getName()).newInstance()); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassUnloadingTest.java b/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassUnloadingTest.java index c53e9ca50b..2555a497ec 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassUnloadingTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassUnloadingTest.java @@ -23,9 +23,7 @@ import java.util.ArrayList; import java.util.List; -import org.ehcache.impl.serialization.CompactJavaSerializer; -import org.ehcache.spi.serialization.Serializer; - +import org.ehcache.spi.serialization.StatefulSerializer; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -54,7 +52,9 @@ public void createSpecialObject() throws Exception { @Test public void testClassUnloadingAfterSerialization() throws Exception { - Serializer serializer = new CompactJavaSerializer(null); + @SuppressWarnings("unchecked") + StatefulSerializer serializer = new CompactJavaSerializer(null); + serializer.init(new TransientStateRepository()); serializer.serialize(specialObject); @@ -74,7 +74,9 @@ public void testClassUnloadingAfterSerialization() throws Exception { public void testClassUnloadingAfterSerializationAndDeserialization() throws Exception { Thread.currentThread().setContextClassLoader(specialObject.getClass().getClassLoader()); try { - Serializer serializer = new CompactJavaSerializer(null); + @SuppressWarnings("unchecked") + StatefulSerializer serializer = new CompactJavaSerializer(null); + serializer.init(new TransientStateRepository()); specialObject = serializer.read(serializer.serialize(specialObject)); Assert.assertEquals(SpecialClass.class.getName(), specialObject.getClass().getName()); Assert.assertNotSame(SpecialClass.class, specialObject.getClass()); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/CompactPersistentJavaSerializerTest.java b/impl/src/test/java/org/ehcache/impl/serialization/CompactPersistentJavaSerializerTest.java deleted file mode 100644 index be58b65d4b..0000000000 --- a/impl/src/test/java/org/ehcache/impl/serialization/CompactPersistentJavaSerializerTest.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.serialization; - -import org.ehcache.core.spi.service.FileBasedPersistenceContext; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; - -import java.io.File; -import java.nio.ByteBuffer; - -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.*; - -/** - * CompactPersistentJavaSerializerTest - */ -public class CompactPersistentJavaSerializerTest { - - @Rule - public TemporaryFolder temporaryFolder = new TemporaryFolder(); - - @Test - public void testProperlyInitializesEncodingIndexOnLoad() throws Exception { - final File folder = temporaryFolder.newFolder("test-cpjs"); - FileBasedPersistenceContext persistenceContext = new FileBasedPersistenceContext() { - @Override - public File getDirectory() { - return folder; - } - }; - - CompactPersistentJavaSerializer serializer = new CompactPersistentJavaSerializer(getClass().getClassLoader(), persistenceContext); - ByteBuffer integerBytes = serializer.serialize(10); - serializer.close(); - - serializer = new CompactPersistentJavaSerializer(getClass().getClassLoader(), persistenceContext); - ByteBuffer longBytes = serializer.serialize(42L); - - assertThat((Integer) serializer.read(integerBytes), is(10)); - assertThat((Long) serializer.read(longBytes), is(42L)); - } - -} \ No newline at end of file diff --git a/impl/src/test/java/org/ehcache/impl/serialization/EnumTest.java b/impl/src/test/java/org/ehcache/impl/serialization/EnumTest.java index 0311436a5b..c2562dfe11 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/EnumTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/EnumTest.java @@ -16,7 +16,7 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; import org.hamcrest.core.IsSame; import org.junit.Assert; import org.junit.Test; @@ -36,7 +36,9 @@ public class EnumTest { @Test public void basicInstanceSerialization() throws ClassNotFoundException { - Serializer s = new CompactJavaSerializer(null); + @SuppressWarnings("unchecked") + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); Assert.assertThat(s.read(s.serialize(People.Alice)), IsSame.sameInstance(People.Alice)); Assert.assertThat(s.read(s.serialize(People.Bob)), IsSame.sameInstance(People.Bob)); @@ -45,7 +47,9 @@ public void basicInstanceSerialization() throws ClassNotFoundException { @Test public void classSerialization() throws ClassNotFoundException { - Serializer s = new CompactJavaSerializer(null); + @SuppressWarnings("unchecked") + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); Assert.assertThat(s.read(s.serialize(Enum.class)), IsSame.sameInstance(Enum.class)); Assert.assertThat(s.read(s.serialize(Dogs.Handel.getClass())), IsSame.sameInstance(Dogs.Handel.getClass())); @@ -55,7 +59,9 @@ public void classSerialization() throws ClassNotFoundException { @Test public void shiftingInstanceSerialization() throws ClassNotFoundException { - Serializer s = new CompactJavaSerializer(null); + @SuppressWarnings("unchecked") + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); ClassLoader wLoader = createClassNameRewritingLoader(Foo_W.class); ClassLoader rLoader = createClassNameRewritingLoader(Foo_R.class); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/FieldTypeChangeTest.java b/impl/src/test/java/org/ehcache/impl/serialization/FieldTypeChangeTest.java index 21d83f4c6d..f609534878 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/FieldTypeChangeTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/FieldTypeChangeTest.java @@ -16,7 +16,7 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; import org.junit.Test; import java.io.Serializable; @@ -35,7 +35,9 @@ public class FieldTypeChangeTest { @Test public void fieldTypeChangeWithOkayObject() throws Exception { - Serializer s = new CompactJavaSerializer(null); + @SuppressWarnings("unchecked") + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); ClassLoader loaderW = createClassNameRewritingLoader(Foo_W.class); Serializable a = (Serializable) loaderW.loadClass(newClassName(Foo_W.class)).getConstructor(Object.class).newInstance("foo"); @@ -50,7 +52,9 @@ public void fieldTypeChangeWithOkayObject() throws Exception { @Test public void fieldTypeChangeWithIncompatibleObject() throws Exception { - Serializer s = new CompactJavaSerializer(null); + @SuppressWarnings("unchecked") + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); ClassLoader loaderW = createClassNameRewritingLoader(Foo_W.class); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/GetFieldTest.java b/impl/src/test/java/org/ehcache/impl/serialization/GetFieldTest.java index 3c0f937c89..13ee6e60f3 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/GetFieldTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/GetFieldTest.java @@ -16,7 +16,7 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; import org.junit.Test; import java.io.IOException; @@ -37,7 +37,9 @@ public class GetFieldTest { @Test public void testGetField() throws Exception { - Serializer s = new CompactJavaSerializer(null); + @SuppressWarnings("unchecked") + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(Foo_A.class); Serializable a = (Serializable) loaderA.loadClass(newClassName(Foo_A.class)).newInstance(); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/PutFieldTest.java b/impl/src/test/java/org/ehcache/impl/serialization/PutFieldTest.java index 1c6dcf679c..93ed704f93 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/PutFieldTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/PutFieldTest.java @@ -16,7 +16,7 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; import org.junit.Assert; import org.junit.Test; @@ -39,7 +39,9 @@ public class PutFieldTest { @Test public void testWithAllPrimitivesAndString() throws Exception { - Serializer s = new CompactJavaSerializer(null); + @SuppressWarnings("unchecked") + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(Foo_A.class); Serializable a = (Serializable) loaderA.loadClass(newClassName(Foo_A.class)).newInstance(); @@ -65,7 +67,9 @@ public void testWithAllPrimitivesAndString() throws Exception { @Test public void testWithTwoStrings() throws Exception { - Serializer s = new CompactJavaSerializer(null); + @SuppressWarnings("unchecked") + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(Bar_A.class); Serializable a = (Serializable) loaderA.loadClass(newClassName(Bar_A.class)).newInstance(); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/ReadObjectNoDataTest.java b/impl/src/test/java/org/ehcache/impl/serialization/ReadObjectNoDataTest.java index d9ef48d9e2..1742c8bb5d 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/ReadObjectNoDataTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/ReadObjectNoDataTest.java @@ -16,7 +16,7 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; import org.junit.Assert; import org.junit.Test; @@ -37,7 +37,9 @@ public class ReadObjectNoDataTest { @Test public void test() throws Exception { - Serializer s = new CompactJavaSerializer(null); + @SuppressWarnings("unchecked") + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); ClassLoader loaderW = createClassNameRewritingLoader(C_W.class, B_W.class); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/SerializeAfterEvolutionTest.java b/impl/src/test/java/org/ehcache/impl/serialization/SerializeAfterEvolutionTest.java index 18a0805aaf..6bddd70c76 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/SerializeAfterEvolutionTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/SerializeAfterEvolutionTest.java @@ -20,9 +20,7 @@ import java.nio.ByteBuffer; import java.util.Date; -import org.ehcache.impl.serialization.CompactJavaSerializer; -import org.ehcache.spi.serialization.Serializer; - +import org.ehcache.spi.serialization.StatefulSerializer; import org.hamcrest.core.Is; import org.junit.Assert; import org.junit.Test; @@ -35,7 +33,9 @@ public class SerializeAfterEvolutionTest { @Test public void test() throws Exception { - Serializer s = new CompactJavaSerializer(null); + @SuppressWarnings("unchecked") + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(A_old.class); Serializable a = (Serializable) loaderA.loadClass(newClassName(A_old.class)).newInstance(); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/TransientStateRepositoryTest.java b/impl/src/test/java/org/ehcache/impl/serialization/TransientStateRepositoryTest.java index 41505c4c1f..1fc1413232 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/TransientStateRepositoryTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/TransientStateRepositoryTest.java @@ -16,11 +16,9 @@ package org.ehcache.impl.serialization; +import org.ehcache.spi.persistence.StateHolder; import org.junit.Test; -import java.io.Serializable; -import java.util.concurrent.ConcurrentMap; - import static org.hamcrest.Matchers.is; import static org.junit.Assert.*; @@ -32,11 +30,11 @@ public class TransientStateRepositoryTest { @Test public void testRemembersCreatedMaps() throws Exception { TransientStateRepository repository = new TransientStateRepository(); - ConcurrentMap test = repository.getPersistentConcurrentMap("test", Long.class, String.class); - test.put(42L, "Again??"); + StateHolder test = repository.getPersistentStateHolder("test", Long.class, String.class); + test.putIfAbsent(42L, "Again??"); - test = repository.getPersistentConcurrentMap("test", Long.class, String.class); + test = repository.getPersistentStateHolder("test", Long.class, String.class); assertThat(test.get(42L), is("Again??")); } -} \ No newline at end of file +} diff --git a/integration-test/src/test/java/org/ehcache/integration/CacheCopierTest.java b/integration-test/src/test/java/org/ehcache/integration/CacheCopierTest.java index 812079d8ca..d7c56ba0c5 100644 --- a/integration-test/src/test/java/org/ehcache/integration/CacheCopierTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/CacheCopierTest.java @@ -131,7 +131,7 @@ public void testIdentityCopier() throws Exception { @Test public void testSerializingCopier() throws Exception { CacheConfiguration cacheConfiguration = baseConfig - .add(new DefaultCopierConfiguration((Class)SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE)) + .add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)) .add(new DefaultSerializerConfiguration(PersonSerializer.class, DefaultSerializerConfiguration.Type.VALUE)) .build(); diff --git a/integration-test/src/test/java/org/ehcache/integration/EhcacheBulkMethodsITest.java b/integration-test/src/test/java/org/ehcache/integration/EhcacheBulkMethodsITest.java index 972551946c..511d6b0505 100644 --- a/integration-test/src/test/java/org/ehcache/integration/EhcacheBulkMethodsITest.java +++ b/integration-test/src/test/java/org/ehcache/integration/EhcacheBulkMethodsITest.java @@ -50,6 +50,7 @@ import java.util.Set; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsCollectionContaining.hasItem; @@ -525,7 +526,7 @@ public int rank(final Set> resourceTypes, final Collection Store createStore(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - ServiceLocator serviceLocator = new ServiceLocator(new DefaultSerializationProvider(null)); + ServiceLocator serviceLocator = dependencySet().with(new DefaultSerializationProvider(null)).build(); try { serviceLocator.startAllServices(); } catch (Exception e) { diff --git a/integration-test/src/test/java/org/ehcache/integration/EventNotificationTest.java b/integration-test/src/test/java/org/ehcache/integration/EventNotificationTest.java index a5a7b53761..752034b871 100644 --- a/integration-test/src/test/java/org/ehcache/integration/EventNotificationTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/EventNotificationTest.java @@ -404,7 +404,7 @@ public static class Listener implements CacheEventListener { private HashMap eventTypeHashMap = new HashMap(); @Override - public void onEvent(CacheEvent event) { + public void onEvent(CacheEvent event) { Logger logger = LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "-" + "EventNotificationTest"); logger.info(event.getType().toString()); eventTypeHashMap.put(event.getType(), eventCounter.get()); @@ -440,7 +440,7 @@ private void resetLatchCount(int operations) { } @Override - public void onEvent(final CacheEvent event) { + public void onEvent(final CacheEvent event) { Logger logger = LoggerFactory.getLogger(EventNotificationTest.class + "-" + "EventNotificationTest"); logger.info(event.getType().toString()); if(event.getType() == EventType.EVICTED){ diff --git a/integration-test/src/test/java/org/ehcache/integration/ExpiryEventsTest.java b/integration-test/src/test/java/org/ehcache/integration/ExpiryEventsTest.java index db67960437..2fddbd6d2d 100644 --- a/integration-test/src/test/java/org/ehcache/integration/ExpiryEventsTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/ExpiryEventsTest.java @@ -64,7 +64,7 @@ public class ExpiryEventsTest { private static final CacheConfigurationBuilder byValueCacheConfigBuilder = byRefCacheConfigBuilder.add(new DefaultCopierConfiguration( - (Class)SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE));; + SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE));; private static final TestTimeSource testTimeSource = new TestTimeSource(); @@ -179,7 +179,7 @@ private void performActualTest(Cache testCache) { testCache.getRuntimeConfiguration().registerCacheEventListener(new CacheEventListener() { @Override - public void onEvent(CacheEvent event) { + public void onEvent(CacheEvent event) { expiredKeys.add(event.getKey()); } }, EventOrdering.ORDERED, EventFiring.SYNCHRONOUS, EnumSet.of(EventType.EXPIRED)); diff --git a/integration-test/src/test/java/org/ehcache/integration/SerializersTest.java b/integration-test/src/test/java/org/ehcache/integration/SerializersTest.java new file mode 100644 index 0000000000..94d8c97334 --- /dev/null +++ b/integration-test/src/test/java/org/ehcache/integration/SerializersTest.java @@ -0,0 +1,181 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.integration; + +import org.ehcache.CacheManager; +import org.ehcache.PersistentCacheManager; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.impl.copy.SerializingCopier; +import org.ehcache.integration.domain.Person; +import org.ehcache.spi.persistence.StateRepository; +import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.SerializerException; +import org.ehcache.spi.serialization.StatefulSerializer; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.rules.TemporaryFolder; + +import java.nio.ByteBuffer; + +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.persistence; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertThat; + +public class SerializersTest { + + @Rule + public TemporaryFolder temporaryFolder = new TemporaryFolder(); + @Rule + public ExpectedException expectedException = ExpectedException.none(); + + @Test + public void testStatefulSerializer() throws Exception { + StatefulSerializerImpl serializer = new StatefulSerializerImpl(); + testSerializerWithByRefHeapCache(serializer); + assertThat(serializer.initCount, is(0)); + + serializer = new StatefulSerializerImpl(); + testSerializerWithByValueHeapCache(serializer); + assertThat(serializer.initCount, is(1)); + + serializer = new StatefulSerializerImpl(); + testSerializerWithOffheapCache(serializer); + assertThat(serializer.initCount, is(1)); + + serializer = new StatefulSerializerImpl(); + testSerializerWithHeapOffheapCache(serializer); + assertThat(serializer.initCount, is(1)); + + serializer = new StatefulSerializerImpl(); + testSerializerWithDiskCache(serializer); + assertThat(serializer.initCount, is(1)); + + serializer = new StatefulSerializerImpl(); + testSerializerWithHeapDiskCache(serializer); + assertThat(serializer.initCount, is(1)); + + serializer = new StatefulSerializerImpl(); + testSerializerWithThreeTierCache(serializer); + assertThat(serializer.initCount, is(1)); + + } + + private void testSerializerWithByRefHeapCache(Serializer serializer) throws Exception { + CacheManagerBuilder cmBuilder = + newCacheManagerBuilder() + .withCache("heapByRefCache", + newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().heap(10)) + .withKeySerializer(serializer) + ); + cmBuilder.build(true); + } + + private void testSerializerWithByValueHeapCache(Serializer serializer) throws Exception { + CacheManagerBuilder cmBuilder = + newCacheManagerBuilder() + .withCache("heapByValueCache", + newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().heap(10)) + .withKeyCopier(SerializingCopier.asCopierClass()) + .withKeySerializer(serializer) + ); + cmBuilder.build(true); + } + + private void testSerializerWithOffheapCache(Serializer serializer) throws Exception { + CacheManagerBuilder cmBuilder = + newCacheManagerBuilder() + .withCache("offheapCache", + newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().offheap(2, MemoryUnit.MB)) + .withKeySerializer(serializer) + ); + cmBuilder.build(true); + } + + private void testSerializerWithHeapOffheapCache(Serializer serializer) throws Exception { + CacheManagerBuilder cmBuilder = + newCacheManagerBuilder() + .withCache("heapOffheapCache", + newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().heap(10).offheap(2, MemoryUnit.MB)) + .withKeySerializer(serializer) + ); + cmBuilder.build(true); + } + + private void testSerializerWithDiskCache(Serializer serializer) throws Exception { + CacheManagerBuilder cmBuilder = + newCacheManagerBuilder() + .with(persistence(temporaryFolder.newFolder().getAbsolutePath())) + .withCache("diskCache", + newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().disk(8, MemoryUnit.MB, true)) + .withKeySerializer(serializer) + ); + cmBuilder.build(true); + } + + private void testSerializerWithHeapDiskCache(Serializer serializer) throws Exception { + CacheManagerBuilder cmBuilder = + newCacheManagerBuilder() + .with(persistence(temporaryFolder.newFolder().getAbsolutePath())) + .withCache("heapDiskCache", + newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().heap(10).disk(8, MemoryUnit.MB, true)) + .withKeySerializer(serializer) + ); + cmBuilder.build(true); + } + + private void testSerializerWithThreeTierCache(Serializer serializer) throws Exception { + CacheManagerBuilder cmBuilder = + newCacheManagerBuilder() + .with(persistence(temporaryFolder.newFolder().getAbsolutePath())) + .withCache("heapOffheapDiskCache", + newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().heap(10).offheap(2, MemoryUnit.MB).disk(8, MemoryUnit.MB, true)) + .withKeySerializer(serializer) + ); + cmBuilder.build(true); + } + + public static class StatefulSerializerImpl implements StatefulSerializer { + + private int initCount = 0; + + @Override + public void init(final StateRepository stateRepository) { + initCount++; + } + + @Override + public ByteBuffer serialize(final T object) throws SerializerException { + return null; + } + + @Override + public T read(final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + return null; + } + + @Override + public boolean equals(final T object, final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + return false; + } + } + +} diff --git a/integration-test/src/test/java/org/ehcache/integration/StoreStatisticsTest.java b/integration-test/src/test/java/org/ehcache/integration/StoreStatisticsTest.java index ae9b914609..acb04c37ce 100644 --- a/integration-test/src/test/java/org/ehcache/integration/StoreStatisticsTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/StoreStatisticsTest.java @@ -69,7 +69,7 @@ public void test1TierStoreStatsAvailableInContextManager() throws Exception { assertNull(cache.get(0L)); - long onHeapMisses = StoreStatisticsTest.findStat(cache, "get", "onheap-store").count(StoreOperationOutcomes.GetOutcome.MISS); + long onHeapMisses = StoreStatisticsTest.findStat(cache, "get", "OnHeap").count(StoreOperationOutcomes.GetOutcome.MISS); assertThat(onHeapMisses, equalTo(1L)); cacheManager.close(); @@ -90,9 +90,9 @@ public void test2TiersStoreStatsAvailableInContextManager() throws Exception { assertNull(cache.get(0L)); - long onHeapMisses = StoreStatisticsTest.findStat(cache, "getOrComputeIfAbsent", "onheap-store").count(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS); + long onHeapMisses = StoreStatisticsTest.findStat(cache, "getOrComputeIfAbsent", "OnHeap").count(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS); assertThat(onHeapMisses, equalTo(1L)); - long offheapMisses = StoreStatisticsTest.findStat(cache, "getAndFault", "local-offheap").count(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS); + long offheapMisses = StoreStatisticsTest.findStat(cache, "getAndFault", "OffHeap").count(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS); assertThat(offheapMisses, equalTo(1L)); cacheManager.close(); @@ -115,11 +115,11 @@ public void test3TiersStoreStatsAvailableInContextManager() throws Exception { assertNull(cache.get(0L)); - long onHeapMisses = StoreStatisticsTest.findStat(cache, "getOrComputeIfAbsent", "onheap-store").count(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS); + long onHeapMisses = StoreStatisticsTest.findStat(cache, "getOrComputeIfAbsent", "OnHeap").count(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS); assertThat(onHeapMisses, equalTo(1L)); - long offHeapMisses = StoreStatisticsTest.findStat(cache, "getAndRemove", "local-offheap").count(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.MISS); + long offHeapMisses = StoreStatisticsTest.findStat(cache, "getAndRemove", "OffHeap").count(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.MISS); assertThat(offHeapMisses, equalTo(1L)); - long diskMisses = StoreStatisticsTest.findStat(cache, "getAndFault", "local-disk").count(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS); + long diskMisses = StoreStatisticsTest.findStat(cache, "getAndFault", "Disk").count(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS); assertThat(diskMisses, equalTo(1L)); cacheManager.close(); diff --git a/management/build.gradle b/management/build.gradle index aaf037ec6f..b9a7240d73 100644 --- a/management/build.gradle +++ b/management/build.gradle @@ -17,34 +17,25 @@ apply plugin: EhDeploy dependencies { + // optional: if we want xml config compileOnly project(':xml') + + // optional: if we want to use the clustered management layer compileOnly project(':clustered:client') + compileOnly "org.terracotta:entity-client-api:$parent.entityApiVersion" + compileOnly "org.terracotta.management.dist:management-client:$parent.managementVersion" compile project(':api') compile project(':core') compile project(':impl') - compile "org.terracotta.management:management-entity-client:$parent.managementVersion" + compile ("org.terracotta.management:management-registry:$parent.managementVersion") { + exclude group: 'org.terracotta', module: 'statistics' + } testCompile project(':xml') - testCompile project(':clustered:client') - testCompile project(':clustered:server') - testCompile "org.terracotta:entity-test-lib:$parent.entityTestLibVersion" - testCompile "org.terracotta:passthrough-server:$parent.entityTestLibVersion" - testCompile "org.terracotta.management:monitoring-service:$parent.managementVersion" - testCompile "org.terracotta.management:monitoring-service-entity:$parent.managementVersion" - testCompile "org.terracotta.management:management-entity-server:$parent.managementVersion" - testCompile "org.terracotta.entities:clustered-map-server:$parent.terracottaPlatformVersion" testCompile "com.fasterxml.jackson.core:jackson-databind:2.7.5" } -compileTestJava { - sourceCompatibility = 1.8 - targetCompatibility = 1.8 - options.fork = true; - options.forkOptions.executable = MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'javac') -} - -test { - executable = MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'java') - environment 'JAVA_HOME', MavenToolchain.javaHome(JavaVersion.VERSION_1_8) +tasks.withType(JavaCompile) { + options.compilerArgs += ['-Werror'] } diff --git a/management/src/main/java/org/ehcache/management/CollectorService.java b/management/src/main/java/org/ehcache/management/CollectorService.java index 9acf6f126b..69516b5f2d 100644 --- a/management/src/main/java/org/ehcache/management/CollectorService.java +++ b/management/src/main/java/org/ehcache/management/CollectorService.java @@ -19,7 +19,6 @@ import org.ehcache.spi.service.Service; import org.terracotta.management.model.notification.ContextualNotification; import org.terracotta.management.model.stats.ContextualStatistics; -import org.terracotta.management.registry.collect.StatisticCollector; import java.util.Collection; @@ -28,7 +27,7 @@ *

* The collecting time is automatically calculated from {@link StatisticsProviderConfiguration#timeToDisable()} */ -public interface CollectorService extends StatisticCollector, Service { +public interface CollectorService extends Service { interface Collector { diff --git a/management/src/main/java/org/ehcache/management/SharedManagementService.java b/management/src/main/java/org/ehcache/management/SharedManagementService.java index f2a76eb007..aad2eef48d 100644 --- a/management/src/main/java/org/ehcache/management/SharedManagementService.java +++ b/management/src/main/java/org/ehcache/management/SharedManagementService.java @@ -44,6 +44,6 @@ public interface SharedManagementService extends CapabilityManagementSupport, Se * * @return a map of capabilities, where the key is the alias of the cache manager */ - Map> getCapabilities(); + Map> getCapabilitiesByContext(); } diff --git a/management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementService.java b/management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementService.java index 0313b9303f..cde8cd0c1d 100644 --- a/management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementService.java +++ b/management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementService.java @@ -16,8 +16,10 @@ package org.ehcache.management.cluster; import org.ehcache.Cache; +import org.ehcache.StateTransitionException; import org.ehcache.Status; import org.ehcache.clustered.client.service.ClientEntityFactory; +import org.ehcache.clustered.client.service.ClusteringService; import org.ehcache.clustered.client.service.EntityService; import org.ehcache.core.events.CacheManagerListener; import org.ehcache.core.spi.service.CacheManagerProviderService; @@ -30,6 +32,7 @@ import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.spi.service.ServiceProvider; +import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terracotta.exception.EntityAlreadyExistsException; import org.terracotta.exception.EntityNotFoundException; @@ -48,9 +51,11 @@ import static org.ehcache.impl.internal.executor.ExecutorUtil.shutdownNow; -@ServiceDependencies({CacheManagerProviderService.class, ExecutionService.class, TimeSourceService.class, ManagementRegistryService.class, EntityService.class}) +@ServiceDependencies({CacheManagerProviderService.class, ExecutionService.class, TimeSourceService.class, ManagementRegistryService.class, EntityService.class, ClusteringService.class}) public class DefaultClusteringManagementService implements ClusteringManagementService, CacheManagerListener, CollectorService.Collector { + private static final Logger LOGGER = LoggerFactory.getLogger(DefaultClusteringManagementService.class); + private final ClusteringManagementServiceConfiguration configuration; private volatile ManagementRegistryService managementRegistryService; @@ -59,6 +64,7 @@ public class DefaultClusteringManagementService implements ClusteringManagementS private volatile ClientEntityFactory managementAgentEntityFactory; private volatile InternalCacheManager cacheManager; private volatile ExecutorService managementCallExecutor; + private volatile ClusteringService clusteringService; public DefaultClusteringManagementService() { this(new DefaultClusteringManagementServiceConfiguration()); @@ -70,6 +76,7 @@ public DefaultClusteringManagementService(ClusteringManagementServiceConfigurati @Override public void start(ServiceProvider serviceProvider) { + this.clusteringService = serviceProvider.getService(ClusteringService.class); this.managementRegistryService = serviceProvider.getService(ManagementRegistryService.class); this.cacheManager = serviceProvider.getService(CacheManagerProviderService.class).getCacheManager(); // get an ordered executor to keep ordering of management call requests @@ -92,12 +99,16 @@ public void start(ServiceProvider serviceProvider) { @Override public void stop() { - collectorService.stop(); + if(collectorService != null) { + collectorService.stop(); + } shutdownNow(managementCallExecutor); // nullify so that no further actions are done with them (see null-checks below) - managementAgentService.close(); - managementRegistryService = null; + if(managementAgentService != null) { + managementAgentService.close(); + managementRegistryService = null; + } managementAgentService = null; managementCallExecutor = null; } @@ -133,14 +144,22 @@ public void stateTransition(Status from, Status to) { } managementAgentService = new ManagementAgentService(managementAgentEntity); managementAgentService.setOperationTimeout(configuration.getManagementCallTimeoutSec(), TimeUnit.SECONDS); + managementAgentService.setManagementRegistry(managementRegistryService); // setup the executor that will handle the management call requests received from the server. We log failures. managementAgentService.setManagementCallExecutor(new LoggingExecutor( managementCallExecutor, LoggerFactory.getLogger(getClass().getName() + ".managementCallExecutor"))); - managementAgentService.bridge(managementRegistryService); - // expose tags - managementAgentService.setTags(managementRegistryService.getConfiguration().getTags()); + try { + managementAgentService.init(); + // expose tags + managementAgentService.setTags(managementRegistryService.getConfiguration().getTags()); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new StateTransitionException(e); + } catch (Exception e) { + e.printStackTrace(); + } break; } @@ -163,16 +182,30 @@ public void stateTransition(Status from, Status to) { @Override public void onNotification(ContextualNotification notification) { ManagementAgentService service = managementAgentService; - if (service != null) { - service.pushNotification(notification); + if (service != null && clusteringService.isConnected()) { + try { + service.pushNotification(notification); + } catch (InterruptedException e) { + LOGGER.error("Failed to push notification " + notification + ": " + e.getMessage(), e); + Thread.currentThread().interrupt(); + } catch (Exception e) { + LOGGER.error("Failed to push notification " + notification + ": " + e.getMessage(), e); + } } } @Override public void onStatistics(Collection statistics) { ManagementAgentService service = managementAgentService; - if (service != null) { - service.pushStatistics(statistics); + if (service != null && clusteringService.isConnected()) { + try { + service.pushStatistics(statistics); + } catch (InterruptedException e) { + LOGGER.error("Failed to push statistics " + statistics + ": " + e.getMessage(), e); + Thread.currentThread().interrupt(); + } catch (Exception e) { + LOGGER.error("Failed to push statistics " + statistics + ": " + e.getMessage(), e); + } } } diff --git a/management/src/main/java/org/ehcache/management/config/DefaultStatisticsProviderConfiguration.java b/management/src/main/java/org/ehcache/management/config/DefaultStatisticsProviderConfiguration.java index bbba956b4e..081ec14697 100644 --- a/management/src/main/java/org/ehcache/management/config/DefaultStatisticsProviderConfiguration.java +++ b/management/src/main/java/org/ehcache/management/config/DefaultStatisticsProviderConfiguration.java @@ -17,152 +17,67 @@ import org.terracotta.management.model.Objects; import org.terracotta.management.registry.ManagementProvider; +import org.terracotta.management.registry.collect.StatisticConfiguration; import java.util.concurrent.TimeUnit; -public class DefaultStatisticsProviderConfiguration implements StatisticsProviderConfiguration { +public class DefaultStatisticsProviderConfiguration extends StatisticConfiguration implements StatisticsProviderConfiguration { - private final Class provider; + private static final long serialVersionUID = 1L; - private long averageWindowDuration = 60; - private TimeUnit averageWindowUnit = TimeUnit.SECONDS; - private int historySize = 100; - private long historyInterval = 1; - private TimeUnit historyIntervalUnit = TimeUnit.SECONDS; - private long timeToDisable = 30; - private TimeUnit timeToDisableUnit = TimeUnit.SECONDS; + private final Class provider; public DefaultStatisticsProviderConfiguration(Class provider, long averageWindowDuration, TimeUnit averageWindowUnit, int historySize, long historyInterval, TimeUnit historyIntervalUnit, long timeToDisable, TimeUnit timeToDisableUnit) { + super(averageWindowDuration, averageWindowUnit, historySize, historyInterval, historyIntervalUnit, timeToDisable, timeToDisableUnit); this.provider = Objects.requireNonNull(provider); - this.averageWindowDuration = averageWindowDuration; - this.averageWindowUnit = Objects.requireNonNull(averageWindowUnit); - this.historySize = historySize; - this.historyInterval = historyInterval; - this.historyIntervalUnit = Objects.requireNonNull(historyIntervalUnit); - this.timeToDisable = timeToDisable; - this.timeToDisableUnit = Objects.requireNonNull(timeToDisableUnit); } public DefaultStatisticsProviderConfiguration(Class provider) { this.provider = Objects.requireNonNull(provider); } - @Override - public long averageWindowDuration() { - return averageWindowDuration; - } - - @Override - public TimeUnit averageWindowUnit() { - return averageWindowUnit; - } - - @Override - public int historySize() { - return historySize; - } - - @Override - public long historyInterval() { - return historyInterval; - } - - @Override - public TimeUnit historyIntervalUnit() { - return historyIntervalUnit; - } - - @Override - public long timeToDisable() { - return timeToDisable; - } - - @Override - public TimeUnit timeToDisableUnit() { - return timeToDisableUnit; - } - @Override public Class getStatisticsProviderType() { return provider; } - @Override - public String toString() { - return "{statisticsProviderType=" + getStatisticsProviderType() + - ", averageWindowDuration=" + averageWindowDuration() + - ", averageWindowUnit=" + averageWindowUnit() + - ", historyInterval=" + historyInterval() + - ", historyIntervalUnit=" + historyIntervalUnit() + - ", historySize=" + historySize() + - ", timeToDisable=" + timeToDisable() + - ", timeToDisableUnit=" + timeToDisableUnit() + - '}'; - } - @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - + if (!super.equals(o)) return false; DefaultStatisticsProviderConfiguration that = (DefaultStatisticsProviderConfiguration) o; - - if (!provider.equals(that.provider)) return false; - if (averageWindowDuration != that.averageWindowDuration) return false; - if (historySize != that.historySize) return false; - if (historyInterval != that.historyInterval) return false; - if (timeToDisable != that.timeToDisable) return false; - if (averageWindowUnit != that.averageWindowUnit) return false; - if (historyIntervalUnit != that.historyIntervalUnit) return false; - return timeToDisableUnit == that.timeToDisableUnit; - + return provider.equals(that.provider); } @Override public int hashCode() { - int result = (int) (averageWindowDuration ^ (averageWindowDuration >>> 32)); + int result = super.hashCode(); result = 31 * result + provider.hashCode(); - result = 31 * result + averageWindowUnit.hashCode(); - result = 31 * result + historySize; - result = 31 * result + (int) (historyInterval ^ (historyInterval >>> 32)); - result = 31 * result + historyIntervalUnit.hashCode(); - result = 31 * result + (int) (timeToDisable ^ (timeToDisable >>> 32)); - result = 31 * result + timeToDisableUnit.hashCode(); return result; } - public DefaultStatisticsProviderConfiguration setAverageWindowDuration(long averageWindowDuration) { - this.averageWindowDuration = averageWindowDuration; - return this; - } - - public DefaultStatisticsProviderConfiguration setAverageWindowUnit(TimeUnit averageWindowUnit) { - this.averageWindowUnit = averageWindowUnit; - return this; - } - - public DefaultStatisticsProviderConfiguration setHistoryInterval(long historyInterval) { - this.historyInterval = historyInterval; + @Override + public DefaultStatisticsProviderConfiguration setAverageWindowDuration(long averageWindowDuration, TimeUnit averageWindowUnit) { + super.setAverageWindowDuration(averageWindowDuration, averageWindowUnit); return this; } - public DefaultStatisticsProviderConfiguration setHistoryIntervalUnit(TimeUnit historyIntervalUnit) { - this.historyIntervalUnit = historyIntervalUnit; + @Override + public DefaultStatisticsProviderConfiguration setHistoryInterval(long historyInterval, TimeUnit historyIntervalUnit) { + super.setHistoryInterval(historyInterval, historyIntervalUnit); return this; } + @Override public DefaultStatisticsProviderConfiguration setHistorySize(int historySize) { - this.historySize = historySize; + super.setHistorySize(historySize); return this; } - public DefaultStatisticsProviderConfiguration setTimeToDisable(long timeToDisable) { - this.timeToDisable = timeToDisable; - return this; - } - - public DefaultStatisticsProviderConfiguration setTimeToDisableUnit(TimeUnit timeToDisableUnit) { - this.timeToDisableUnit = timeToDisableUnit; + @Override + public DefaultStatisticsProviderConfiguration setTimeToDisable(long timeToDisable, TimeUnit timeToDisableUnit) { + super.setTimeToDisable(timeToDisable, timeToDisableUnit); return this; } diff --git a/management/src/main/java/org/ehcache/management/config/EhcacheStatisticsProviderConfiguration.java b/management/src/main/java/org/ehcache/management/config/EhcacheStatisticsProviderConfiguration.java index 3f98330cec..f9e07d8071 100644 --- a/management/src/main/java/org/ehcache/management/config/EhcacheStatisticsProviderConfiguration.java +++ b/management/src/main/java/org/ehcache/management/config/EhcacheStatisticsProviderConfiguration.java @@ -21,6 +21,16 @@ public class EhcacheStatisticsProviderConfiguration extends DefaultStatisticsProviderConfiguration { + /** + * + * @param averageWindowDuration Time window to calculate average. + * @param averageWindowUnit TimeUnit of averageWindowDuration. E.g. TimeUnit.MILLISECONDS, TimeUnit.MINUTES etc + * @param historySize Number of statistics to keep in memory. E.g. if set to 20 then the last 20 statistic values will be stored. + * @param historyInterval The interval in which a sample is taken + * @param historyIntervalUnit the time interval in which a sample is taken. + * @param timeToDisable the time period in which a statistic is valid. After this amount of time the statistic is removed. + * @param timeToDisableUnit the TimeUnit associated with the timeToDisable parameter + */ public EhcacheStatisticsProviderConfiguration(long averageWindowDuration, TimeUnit averageWindowUnit, int historySize, long historyInterval, TimeUnit historyIntervalUnit, long timeToDisable, TimeUnit timeToDisableUnit) { super(EhcacheStatisticsProvider.class, averageWindowDuration, averageWindowUnit, historySize, historyInterval, historyIntervalUnit, timeToDisable, timeToDisableUnit); } diff --git a/management/src/main/java/org/ehcache/management/providers/CacheBindingManagementProvider.java b/management/src/main/java/org/ehcache/management/providers/CacheBindingManagementProvider.java index 08f71cf135..02d53495e5 100644 --- a/management/src/main/java/org/ehcache/management/providers/CacheBindingManagementProvider.java +++ b/management/src/main/java/org/ehcache/management/providers/CacheBindingManagementProvider.java @@ -16,12 +16,7 @@ package org.ehcache.management.providers; import org.ehcache.management.ManagementRegistryServiceConfiguration; -import org.terracotta.management.model.capabilities.descriptors.Descriptor; import org.terracotta.management.registry.AbstractManagementProvider; -import org.terracotta.management.registry.action.ExposedObject; - -import java.util.Collection; -import java.util.LinkedHashSet; public abstract class CacheBindingManagementProvider extends AbstractManagementProvider { @@ -35,13 +30,4 @@ public CacheBindingManagementProvider(ManagementRegistryServiceConfiguration reg @Override protected abstract ExposedCacheBinding wrap(CacheBinding managedObject); - @Override - public Collection getDescriptors() { - Collection capabilities = new LinkedHashSet(); - for (ExposedObject o : managedObjects) { - capabilities.addAll(((ExposedCacheBinding) o).getDescriptors()); - } - return capabilities; - } - } diff --git a/management/src/main/java/org/ehcache/management/providers/ExposedCacheBinding.java b/management/src/main/java/org/ehcache/management/providers/ExposedCacheBinding.java index 9b26b3756b..ee84c7c74d 100644 --- a/management/src/main/java/org/ehcache/management/providers/ExposedCacheBinding.java +++ b/management/src/main/java/org/ehcache/management/providers/ExposedCacheBinding.java @@ -28,10 +28,12 @@ public abstract class ExposedCacheBinding implements ExposedObject protected final ManagementRegistryServiceConfiguration registryConfiguration; protected final CacheBinding cacheBinding; + private final Context context; protected ExposedCacheBinding(ManagementRegistryServiceConfiguration registryConfiguration, CacheBinding cacheBinding) { this.registryConfiguration = registryConfiguration; this.cacheBinding = cacheBinding; + this.context = registryConfiguration.getContext().with("cacheName", cacheBinding.getAlias()); } @Override @@ -46,10 +48,11 @@ public final CacheBinding getTarget() { } @Override - public final boolean matches(Context context) { - return context.contains(registryConfiguration.getContext().with("cacheName", cacheBinding.getAlias())); + public Context getContext() { + return context; } + @Override public Collection getDescriptors() { return Collections.emptyList(); } diff --git a/management/src/main/java/org/ehcache/management/providers/settings/EhcacheSettingsProvider.java b/management/src/main/java/org/ehcache/management/providers/settings/EhcacheSettingsProvider.java index 569db9f222..2be41b1c37 100644 --- a/management/src/main/java/org/ehcache/management/providers/settings/EhcacheSettingsProvider.java +++ b/management/src/main/java/org/ehcache/management/providers/settings/EhcacheSettingsProvider.java @@ -25,6 +25,7 @@ import org.terracotta.management.registry.action.Named; import org.terracotta.management.registry.action.RequiredContext; +import java.util.ArrayList; import java.util.Collection; @Named("SettingsCapability") @@ -44,8 +45,8 @@ protected ExposedCacheSettings wrap(CacheBinding cacheBinding) { } @Override - public Collection getDescriptors() { - Collection descriptors = super.getDescriptors(); + public Collection getDescriptors() { + Collection descriptors = new ArrayList(super.getDescriptors()); descriptors.add(cacheManagerSettings()); return descriptors; } diff --git a/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatistics.java b/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatistics.java deleted file mode 100644 index 3dffc5103b..0000000000 --- a/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatistics.java +++ /dev/null @@ -1,291 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.management.providers.statistics; - -import org.ehcache.Cache; -import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.ehcache.management.ManagementRegistryServiceConfiguration; -import org.ehcache.management.config.StatisticsProviderConfiguration; -import org.ehcache.management.providers.CacheBinding; -import org.ehcache.management.providers.ExposedCacheBinding; -import org.terracotta.context.ContextManager; -import org.terracotta.context.TreeNode; -import org.terracotta.context.extended.ExposedStatistic; -import org.terracotta.context.extended.OperationType; -import org.terracotta.context.extended.StatisticsRegistry; -import org.terracotta.context.query.Matcher; -import org.terracotta.context.query.Matchers; -import org.terracotta.context.query.Query; -import org.terracotta.management.model.capabilities.descriptors.Descriptor; -import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; -import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptorCategory; -import org.terracotta.management.model.stats.NumberUnit; -import org.terracotta.management.model.stats.Sample; -import org.terracotta.management.model.stats.Statistic; -import org.terracotta.management.model.stats.StatisticType; -import org.terracotta.management.model.stats.history.AverageHistory; -import org.terracotta.management.model.stats.history.CounterHistory; -import org.terracotta.management.model.stats.history.DurationHistory; -import org.terracotta.management.model.stats.history.RateHistory; -import org.terracotta.management.model.stats.history.RatioHistory; -import org.terracotta.management.model.stats.primitive.Counter; -import org.terracotta.statistics.OperationStatistic; -import org.terracotta.statistics.archive.Timestamped; -import org.terracotta.statistics.extended.Result; -import org.terracotta.statistics.extended.SampledStatistic; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; - -import static org.terracotta.context.query.Matchers.attributes; -import static org.terracotta.context.query.Matchers.context; -import static org.terracotta.context.query.Matchers.hasAttribute; -import static org.terracotta.context.query.Matchers.identifier; -import static org.terracotta.context.query.Matchers.subclassOf; -import static org.terracotta.context.query.QueryBuilder.queryBuilder; - -class EhcacheStatistics extends ExposedCacheBinding { - - private static final Set ALL_CACHE_PUT_OUTCOMES = EnumSet.allOf(CacheOperationOutcomes.PutOutcome.class); - private static final Set ALL_CACHE_GET_OUTCOMES = EnumSet.allOf(CacheOperationOutcomes.GetOutcome.class); - private static final Set ALL_CACHE_MISS_OUTCOMES = EnumSet.of(CacheOperationOutcomes.GetOutcome.FAILURE, CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER, CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER); - private static final Set ALL_CACHE_REMOVE_OUTCOMES = EnumSet.allOf(CacheOperationOutcomes.RemoveOutcome.class); - private static final Set GET_WITH_LOADER_OUTCOMES = EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER, CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER); - private static final Set GET_NO_LOADER_OUTCOMES = EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER, CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER); - - private final StatisticsRegistry statisticsRegistry; - private final Map> countStatistics; - - EhcacheStatistics(ManagementRegistryServiceConfiguration registryConfiguration, CacheBinding cacheBinding, StatisticsProviderConfiguration statisticsProviderConfiguration, ScheduledExecutorService executor) { - super(registryConfiguration, cacheBinding); - this.countStatistics = discoverCountStatistics(cacheBinding.getCache()); - this.statisticsRegistry = new StatisticsRegistry(StandardOperationStatistic.class, cacheBinding.getCache(), executor, statisticsProviderConfiguration.averageWindowDuration(), - statisticsProviderConfiguration.averageWindowUnit(), statisticsProviderConfiguration.historySize(), statisticsProviderConfiguration.historyInterval(), statisticsProviderConfiguration.historyIntervalUnit(), - statisticsProviderConfiguration.timeToDisable(), statisticsProviderConfiguration.timeToDisableUnit()); - - statisticsRegistry.registerCompoundOperation("AllCacheGet", Collections.singleton("cache"), Collections.singletonMap("type", "Result"), StandardOperationStatistic.CACHE_GET, ALL_CACHE_GET_OUTCOMES); - statisticsRegistry.registerCompoundOperation("AllCacheMiss", Collections.singleton("cache"), Collections.singletonMap("type", "Result"), StandardOperationStatistic.CACHE_GET, ALL_CACHE_MISS_OUTCOMES); - statisticsRegistry.registerCompoundOperation("AllCachePut", Collections.singleton("cache"), Collections.singletonMap("type", "Result"), StandardOperationStatistic.CACHE_PUT, ALL_CACHE_PUT_OUTCOMES); - statisticsRegistry.registerCompoundOperation("AllCacheRemove", Collections.singleton("cache"), Collections.singletonMap("type", "Result"), StandardOperationStatistic.CACHE_REMOVE, ALL_CACHE_REMOVE_OUTCOMES); - statisticsRegistry.registerCompoundOperation("GetWithLoader", Collections.singleton("cache"), Collections.singletonMap("type", "Result"), StandardOperationStatistic.CACHE_GET, GET_WITH_LOADER_OUTCOMES); - statisticsRegistry.registerCompoundOperation("GetNoLoader", Collections.singleton("cache"), Collections.singletonMap("type", "Result"), StandardOperationStatistic.CACHE_GET, GET_NO_LOADER_OUTCOMES); - statisticsRegistry.registerRatio("Hit", Collections.singleton("cache"), Collections.singletonMap("type", "Ratio"), StandardOperationStatistic.CACHE_GET, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER), ALL_CACHE_GET_OUTCOMES); - } - - @SuppressWarnings("unchecked") - public Map> queryStatistic(String statisticName, long since) { - Collection registrations = statisticsRegistry.getRegistrations(); - for (ExposedStatistic registration : registrations) { - Object type = registration.getProperties().get("type"); - String name = registration.getName(); - - if ("Result".equals(type)) { - Result result = (Result) registration.getStat(); - - // The way ehcache stats computes stats: - // - Durations are in NANOSECONDS - // - Rate are in SECONDS and the values are divided by the average window, in SECONDS. - - if ((name + "Count").equals(statisticName)) { - SampledStatistic count = result.count(); - return Collections.singletonMap(statisticName, new CounterHistory(buildHistory(count, since), NumberUnit.COUNT)); - - } else if ((name + "Rate").equals(statisticName)) { - SampledStatistic rate = result.rate(); - return Collections.singletonMap(statisticName, new RateHistory(buildHistory(rate, since), TimeUnit.SECONDS)); - - } else if ((name + "LatencyMinimum").equals(statisticName)) { - SampledStatistic minimum = result.latency().minimum(); - return Collections.singletonMap(statisticName, new DurationHistory(buildHistory(minimum, since), TimeUnit.NANOSECONDS)); - - } else if ((name + "LatencyMaximum").equals(statisticName)) { - SampledStatistic maximum = result.latency().maximum(); - return Collections.singletonMap(statisticName, new DurationHistory(buildHistory(maximum, since), TimeUnit.NANOSECONDS)); - - } else if ((name + "LatencyAverage").equals(statisticName)) { - SampledStatistic average = result.latency().average(); - return Collections.singletonMap(statisticName, new AverageHistory(buildHistory(average, since), TimeUnit.NANOSECONDS)); - - } else if (name.equals(statisticName)) { - Map> resultStats = new HashMap>(); - resultStats.put(statisticName + "Count", new CounterHistory(buildHistory(result.count(), since), NumberUnit.COUNT)); - resultStats.put(statisticName + "Rate", new RateHistory(buildHistory(result.rate(), since), TimeUnit.SECONDS)); - resultStats.put(statisticName + "LatencyMinimum", new DurationHistory(buildHistory(result.latency().minimum(), since), TimeUnit.NANOSECONDS)); - resultStats.put(statisticName + "LatencyMaximum", new DurationHistory(buildHistory(result.latency().maximum(), since), TimeUnit.NANOSECONDS)); - resultStats.put(statisticName + "LatencyAverage", new AverageHistory(buildHistory(result.latency().average(), since), TimeUnit.NANOSECONDS)); - return resultStats; - } - - } else if ("Ratio".equals(type)) { - if ((name + "Ratio").equals(statisticName)) { - SampledStatistic ratio = (SampledStatistic) registration.getStat(); - return Collections.singletonMap(statisticName, new RatioHistory(buildHistory(ratio, since), NumberUnit.RATIO)); - } - } - } - - OperationStatistic operationStatistic = countStatistics.get(statisticName); - if (operationStatistic != null) { - long sum = operationStatistic.sum(); - return Collections.singletonMap(statisticName, new Counter(sum, NumberUnit.COUNT)); - } - - return Collections.emptyMap(); - } - - private List> buildHistory(SampledStatistic sampledStatistic, long since) { - List> result = new ArrayList>(); - - List> history = sampledStatistic.history(); - for (Timestamped timestamped : history) { - if(timestamped.getTimestamp() >= since) { - result.add(new Sample(timestamped.getTimestamp(), timestamped.getSample())); - } - } - - return result; - } - - @Override - public Collection getDescriptors() { - Set capabilities = new HashSet(); - - capabilities.addAll(queryStatisticsRegistry()); - capabilities.addAll(operationStatistics()); - - return capabilities; - } - - private Set operationStatistics() { - Set capabilities = new HashSet(); - - for (String name : countStatistics.keySet()) { - capabilities.add(new StatisticDescriptor(name, StatisticType.COUNTER)); - } - - return capabilities; - } - - private Set queryStatisticsRegistry() { - Set capabilities = new HashSet(); - - Collection registrations = statisticsRegistry.getRegistrations(); - for (ExposedStatistic registration : registrations) { - String name = registration.getName(); - Object type = registration.getProperties().get("type"); - if ("Result".equals(type)) { - List statistics = new ArrayList(); - statistics.add(new StatisticDescriptor(name + "Count", StatisticType.COUNTER_HISTORY)); - statistics.add(new StatisticDescriptor(name + "Rate", StatisticType.RATE_HISTORY)); - statistics.add(new StatisticDescriptor(name + "LatencyMinimum", StatisticType.DURATION_HISTORY)); - statistics.add(new StatisticDescriptor(name + "LatencyMaximum", StatisticType.DURATION_HISTORY)); - statistics.add(new StatisticDescriptor(name + "LatencyAverage", StatisticType.AVERAGE_HISTORY)); - - capabilities.add(new StatisticDescriptorCategory(name, statistics)); - } else if ("Ratio".equals(type)) { - capabilities.add(new StatisticDescriptor(name + "Ratio", StatisticType.RATIO_HISTORY)); - } - } - - return capabilities; - } - - public void dispose() { - statisticsRegistry.clearRegistrations(); - } - - - @SuppressWarnings({"unchecked", "rawtypes"}) - private static Map> discoverCountStatistics(Cache cache) { - Map> result = new HashMap>(); - - for (OperationType t : StandardOperationStatistic.class.getEnumConstants()) { - OperationStatistic statistic = findOperationObserver(t, cache); - if (statistic == null) { - if (t.required()) { - throw new IllegalStateException("Required statistic " + t + " not found"); - } - } else { - String key = capitalize(t.operationName()) + "Counter"; - if(!result.containsKey(key)) { - result.put(key, statistic); - } - - } - } - - return result; - } - - private static String capitalize(String s) { - if (s.length() < 2) { - return s.toUpperCase(); - } else { - return s.substring(0, 1).toUpperCase() + s.substring(1); - } - } - - @SuppressWarnings({"unchecked", "rawtypes"}) - private static OperationStatistic findOperationObserver(OperationType statistic, Cache cache) { - Set> results = findOperationObserver(statistic.context(), statistic.type(), statistic.operationName(), statistic.tags(), cache); - switch (results.size()) { - case 0: - return null; - case 1: - return (OperationStatistic) results.iterator().next(); - default: - throw new IllegalStateException("Duplicate statistics found for " + statistic); - } - } - - @SuppressWarnings("unchecked") - private static Set> findOperationObserver(Query contextQuery, Class type, String name, - final Set tags, Cache cache) { - Query q = queryBuilder().chain(contextQuery) - .children().filter(context(identifier(subclassOf(OperationStatistic.class)))).build(); - - Set operationStatisticNodes = q.execute(Collections.singleton(ContextManager.nodeFor(cache))); - Set result = queryBuilder() - .filter( - context(attributes(Matchers.>allOf(hasAttribute("type", type), - hasAttribute("name", name), hasAttribute("tags", new Matcher>() { - @Override - protected boolean matchesSafely(Set object) { - return object.containsAll(tags); - } - }))))).build().execute(operationStatisticNodes); - - if (result.isEmpty()) { - return Collections.emptySet(); - } else { - Set> statistics = new HashSet>(); - for (TreeNode node : result) { - statistics.add((OperationStatistic) node.getContext().attributes().get("this")); - } - return statistics; - } - } - -} diff --git a/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProvider.java b/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProvider.java index aeb392b504..68c53280dd 100644 --- a/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProvider.java +++ b/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProvider.java @@ -22,19 +22,33 @@ import org.ehcache.management.providers.ExposedCacheBinding; import org.terracotta.management.model.capabilities.Capability; import org.terracotta.management.model.capabilities.StatisticsCapability; +import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; import org.terracotta.management.model.context.Context; import org.terracotta.management.model.stats.Statistic; import org.terracotta.management.registry.action.ExposedObject; import org.terracotta.management.registry.action.Named; +import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; +import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.concurrent.ScheduledExecutorService; @Named("StatisticsCapability") public class EhcacheStatisticsProvider extends CacheBindingManagementProvider { + private static final Comparator STATISTIC_DESCRIPTOR_COMPARATOR = new Comparator() { + @Override + public int compare(StatisticDescriptor o1, StatisticDescriptor o2) { + return o1.getName().compareTo(o2.getName()); + } + }; + private final StatisticsProviderConfiguration statisticsProviderConfiguration; private final ScheduledExecutorService executor; @@ -46,12 +60,23 @@ public EhcacheStatisticsProvider(ManagementRegistryServiceConfiguration configur @Override protected ExposedCacheBinding wrap(CacheBinding cacheBinding) { - return new EhcacheStatistics(registryConfiguration, cacheBinding, statisticsProviderConfiguration, executor); + return new StandardEhcacheStatistics(registryConfiguration, cacheBinding, statisticsProviderConfiguration, executor); } @Override protected void dispose(ExposedObject exposedObject) { - ((EhcacheStatistics) exposedObject).dispose(); + ((StandardEhcacheStatistics) exposedObject).dispose(); + } + + @Override + public final Collection getDescriptors() { + Collection capabilities = new HashSet(); + for (ExposedObject o : getExposedObjects()) { + capabilities.addAll(((StandardEhcacheStatistics) o).getDescriptors()); + } + List list = new ArrayList(capabilities); + Collections.sort(list, STATISTIC_DESCRIPTOR_COMPARATOR); + return list; } @Override @@ -65,10 +90,14 @@ public Capability getCapability() { @Override public Map> collectStatistics(Context context, Collection statisticNames, long since) { Map> statistics = new HashMap>(statisticNames.size()); - EhcacheStatistics ehcacheStatistics = (EhcacheStatistics) findExposedObject(context); + StandardEhcacheStatistics ehcacheStatistics = (StandardEhcacheStatistics) findExposedObject(context); if (ehcacheStatistics != null) { for (String statisticName : statisticNames) { - statistics.putAll(ehcacheStatistics.queryStatistic(statisticName, since)); + try { + statistics.put(statisticName, ehcacheStatistics.queryStatistic(statisticName, since)); + } catch (IllegalArgumentException ignored) { + // ignore when statisticName does not exist and throws an exception + } } } return statistics; diff --git a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java new file mode 100644 index 0000000000..0dfc137bb1 --- /dev/null +++ b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java @@ -0,0 +1,93 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import org.ehcache.core.statistics.CacheOperationOutcomes; +import org.ehcache.core.statistics.TierOperationOutcomes; +import org.ehcache.management.ManagementRegistryServiceConfiguration; +import org.ehcache.management.config.StatisticsProviderConfiguration; +import org.ehcache.management.providers.CacheBinding; +import org.ehcache.management.providers.ExposedCacheBinding; +import org.terracotta.context.extended.OperationStatisticDescriptor; +import org.terracotta.context.extended.StatisticsRegistry; +import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; +import org.terracotta.management.model.stats.Statistic; +import org.terracotta.management.registry.collect.StatisticsRegistryMetadata; + +import java.util.Collection; +import java.util.EnumSet; +import java.util.concurrent.ScheduledExecutorService; + +import static java.util.Collections.singleton; +import static java.util.EnumSet.allOf; +import static java.util.EnumSet.of; +import static org.terracotta.context.extended.ValueStatisticDescriptor.descriptor; + +class StandardEhcacheStatistics extends ExposedCacheBinding { + + private final StatisticsRegistry statisticsRegistry; + private final StatisticsRegistryMetadata statisticsRegistryMetadata; + + StandardEhcacheStatistics(ManagementRegistryServiceConfiguration registryConfiguration, CacheBinding cacheBinding, StatisticsProviderConfiguration statisticsProviderConfiguration, ScheduledExecutorService executor) { + super(registryConfiguration, cacheBinding); + this.statisticsRegistry = new StatisticsRegistry(cacheBinding.getCache(), executor, statisticsProviderConfiguration.averageWindowDuration(), + statisticsProviderConfiguration.averageWindowUnit(), statisticsProviderConfiguration.historySize(), statisticsProviderConfiguration.historyInterval(), statisticsProviderConfiguration.historyIntervalUnit(), + statisticsProviderConfiguration.timeToDisable(), statisticsProviderConfiguration.timeToDisableUnit()); + + this.statisticsRegistryMetadata = new StatisticsRegistryMetadata(statisticsRegistry); + + EnumSet hit = of(CacheOperationOutcomes.GetOutcome.HIT); + EnumSet miss = of(CacheOperationOutcomes.GetOutcome.MISS); + OperationStatisticDescriptor getCacheStatisticDescriptor = OperationStatisticDescriptor.descriptor("get", singleton("cache"), CacheOperationOutcomes.GetOutcome.class); + + statisticsRegistry.registerCompoundOperations("Cache:Hit", getCacheStatisticDescriptor, hit); + statisticsRegistry.registerCompoundOperations("Cache:Miss", getCacheStatisticDescriptor, miss); + statisticsRegistry.registerCompoundOperations("Cache:Clear", OperationStatisticDescriptor.descriptor("clear", singleton("cache"),CacheOperationOutcomes.ClearOutcome.class), allOf(CacheOperationOutcomes.ClearOutcome.class)); + statisticsRegistry.registerRatios("Cache:HitRatio", getCacheStatisticDescriptor, hit, allOf(CacheOperationOutcomes.GetOutcome.class)); + statisticsRegistry.registerRatios("Cache:MissRatio", getCacheStatisticDescriptor, miss, allOf(CacheOperationOutcomes.GetOutcome.class)); + + Class tierOperationGetOucomeClass = TierOperationOutcomes.GetOutcome.class; + OperationStatisticDescriptor getTierStatisticDescriptor = OperationStatisticDescriptor.descriptor("get", singleton("tier"), tierOperationGetOucomeClass); + + statisticsRegistry.registerCompoundOperations("Hit", getTierStatisticDescriptor, of(TierOperationOutcomes.GetOutcome.HIT)); + statisticsRegistry.registerCompoundOperations("Miss", getTierStatisticDescriptor, of(TierOperationOutcomes.GetOutcome.MISS)); + statisticsRegistry.registerCompoundOperations("Eviction", + OperationStatisticDescriptor.descriptor("eviction", singleton("tier"), + TierOperationOutcomes.EvictionOutcome.class), + allOf(TierOperationOutcomes.EvictionOutcome.class)); + statisticsRegistry.registerRatios("HitRatio", getTierStatisticDescriptor, of(TierOperationOutcomes.GetOutcome.HIT), allOf(tierOperationGetOucomeClass)); + statisticsRegistry.registerRatios("MissRatio", getTierStatisticDescriptor, of(TierOperationOutcomes.GetOutcome.MISS), allOf(tierOperationGetOucomeClass)); + statisticsRegistry.registerCounter("MappingCount", descriptor("mappings", singleton("tier"))); + statisticsRegistry.registerCounter("MaxMappingCount", descriptor("maxMappings", singleton("tier"))); + statisticsRegistry.registerSize("AllocatedByteSize", descriptor("allocatedMemory", singleton("tier"))); + statisticsRegistry.registerSize("OccupiedByteSize", descriptor("occupiedMemory", singleton("tier"))); + } + + Statistic queryStatistic(String fullStatisticName, long since) { + return statisticsRegistryMetadata.queryStatistic(fullStatisticName, since); + } + + @Override + public Collection getDescriptors() { + return statisticsRegistryMetadata.getDescriptors(); + } + + void dispose() { + statisticsRegistry.clearRegistrations(); + } + + +} diff --git a/management/src/main/java/org/ehcache/management/providers/statistics/StandardOperationStatistic.java b/management/src/main/java/org/ehcache/management/providers/statistics/StandardOperationStatistic.java deleted file mode 100755 index 87b072e945..0000000000 --- a/management/src/main/java/org/ehcache/management/providers/statistics/StandardOperationStatistic.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.management.providers.statistics; - -import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.terracotta.context.extended.OperationType; -import org.terracotta.context.query.Query; - -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; - -import static org.terracotta.context.query.Queries.self; - - -/** - * The Enum OperationType. - */ -enum StandardOperationStatistic implements OperationType { - CACHE_LOADING(false, self(), CacheOperationOutcomes.CacheLoadingOutcome.class, "cacheLoading", "cache"), - - /** - * The cache get. - */ - CACHE_GET(true, self(), CacheOperationOutcomes.GetOutcome.class, "get", "cache"), - - /** - * The cache put. - */ - CACHE_PUT(true, self(), CacheOperationOutcomes.PutOutcome.class, "put", "cache"), - - /** - * The cache remove. - */ - CACHE_REMOVE(true, self(), CacheOperationOutcomes.RemoveOutcome.class, "remove", "cache"), - - /** - * The cache remove(K, V) - */ - CACHE_CONDITIONAL_REMOVE(true, self(), CacheOperationOutcomes.ConditionalRemoveOutcome.class, "conditionalRemove", "cache"), - - /** - * The cache putIfAbsent. - */ - CACHE_PUT_IF_ABSENT(true, self(), CacheOperationOutcomes.PutIfAbsentOutcome.class, "putIfAbsent", "cache"), - - /** - * The cache replace. - */ - CACHE_REPLACE(true, self(), CacheOperationOutcomes.ReplaceOutcome.class, "replace", "cache"), - - ; - - private final boolean required; - private final Query context; - private final Class> type; - private final String name; - private final Set tags; - - StandardOperationStatistic(boolean required, Query context, Class> type, String name, String... tags) { - this.required = required; - this.context = context; - this.type = type; - this.name = name; - this.tags = Collections.unmodifiableSet(new HashSet(Arrays.asList(tags))); - } - - /** - * If this statistic is required. - *

- * If required and this statistic is not present an exception will be thrown. - * - * @return - */ - public final boolean required() { - return required; - } - - /** - * Query that select context nodes for this statistic. - * - * @return context query - */ - public final Query context() { - return context; - } - - /** - * Operation result type. - * - * @return operation result type - */ - @SuppressWarnings("rawtypes") - public final Class> type() { - return type; - } - - /** - * The name of the statistic as found in the statistics context tree. - * - * @return the statistic name - */ - public final String operationName() { - return name; - } - - /** - * A set of tags that will be on the statistic found in the statistics context tree. - * - * @return the statistic tags - */ - public final Set tags() { - return tags; - } - -} diff --git a/management/src/main/java/org/ehcache/management/registry/DefaultCollectorService.java b/management/src/main/java/org/ehcache/management/registry/DefaultCollectorService.java index 2f11eb552d..f23d7f6ba8 100644 --- a/management/src/main/java/org/ehcache/management/registry/DefaultCollectorService.java +++ b/management/src/main/java/org/ehcache/management/registry/DefaultCollectorService.java @@ -31,24 +31,14 @@ import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.spi.service.ServiceProvider; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.terracotta.management.model.context.Context; import org.terracotta.management.model.notification.ContextualNotification; import org.terracotta.management.model.stats.ContextualStatistics; -import org.terracotta.management.registry.StatisticQuery; +import org.terracotta.management.registry.collect.DefaultStatisticCollector; +import org.terracotta.management.registry.collect.StatisticCollector; +import org.terracotta.management.registry.collect.StatisticConfiguration; -import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.ScheduledFuture; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicLong; import static org.ehcache.impl.internal.executor.ExecutorUtil.shutdownNow; @@ -63,11 +53,6 @@ private enum EhcacheNotification { CACHE_MANAGER_CLOSED, } - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultCollectorService.class); - - private ScheduledFuture task; - - private final ConcurrentMap selectedStatsPerCapability = new ConcurrentHashMap(); private final Collector collector; private volatile TimeSource timeSource; @@ -76,6 +61,8 @@ private enum EhcacheNotification { private volatile InternalCacheManager cacheManager; private volatile ManagementRegistryServiceConfiguration configuration; + private volatile DefaultStatisticCollector statisticCollector; + public DefaultCollectorService() { this(Collector.EMPTY); } @@ -92,6 +79,34 @@ public synchronized void start(ServiceProvider serviceProvider) { cacheManager = serviceProvider.getService(CacheManagerProviderService.class).getCacheManager(); scheduledExecutorService = serviceProvider.getService(ExecutionService.class).getScheduledExecutor(configuration.getCollectorExecutorAlias()); + StatisticsProviderConfiguration providerConfiguration = configuration.getConfigurationFor(EhcacheStatisticsProvider.class); + + statisticCollector = new DefaultStatisticCollector( + managementRegistry, + scheduledExecutorService, + new StatisticCollector.Collector() { + @Override + public void onStatistics(Collection statistics) { + collector.onStatistics(statistics); + } + }, + new StatisticCollector.TimeProvider() { + @Override + public long getTimeMillis() { + return timeSource.getTimeMillis(); + } + }, + providerConfiguration instanceof StatisticConfiguration ? + (StatisticConfiguration) providerConfiguration : + new StatisticConfiguration( + providerConfiguration.averageWindowDuration(), + providerConfiguration.averageWindowUnit(), + providerConfiguration.historySize(), + providerConfiguration.historyInterval(), + providerConfiguration.historyIntervalUnit(), + providerConfiguration.timeToDisable(), + providerConfiguration.timeToDisableUnit())); + cacheManager.registerListener(this); } @@ -101,7 +116,7 @@ public synchronized void stop() { // so deregisterListener is done in the stateTransition listener //cacheManager.deregisterListener(this); - stopStatisticCollector(); + statisticCollector.stopStatisticCollector(); shutdownNow(scheduledExecutorService); } @@ -128,7 +143,7 @@ public void stateTransition(Status from, Status to) { case AVAILABLE: // .register() call should be there when CM is AVAILABLE // this is to expose the stats collector for management calls - managementRegistry.register(this); + managementRegistry.register(statisticCollector); collector.onNotification( new ContextualNotification( @@ -136,7 +151,7 @@ public void stateTransition(Status from, Status to) { EhcacheNotification.CACHE_MANAGER_AVAILABLE.name())); // auto-start stat collection - startStatisticCollector(); + statisticCollector.startStatisticCollector(); break; case MAINTENANCE: @@ -161,80 +176,4 @@ public void stateTransition(Status from, Status to) { } } - @Override - public synchronized void startStatisticCollector() { - if (task == null) { - StatisticsProviderConfiguration providerConfiguration = configuration.getConfigurationFor(EhcacheStatisticsProvider.class); - - long timeToDisableMs = TimeUnit.MILLISECONDS.convert(providerConfiguration.timeToDisable(), providerConfiguration.timeToDisableUnit()); - long pollingIntervalMs = Math.round(timeToDisableMs * 0.75); // we poll at 75% of the time to disable (before the time to disable happens) - final AtomicLong lastPoll = new AtomicLong(timeSource.getTimeMillis()); - - task = scheduledExecutorService.scheduleWithFixedDelay(new Runnable() { - @Override - public void run() { - try { - // always check if the cache manager is still available - if (!selectedStatsPerCapability.isEmpty()) { - - // create the full context list from current caches - Collection cacheContexts = new ArrayList(); - for (String cacheAlias : new HashSet(cacheManager.getRuntimeConfiguration().getCacheConfigurations().keySet())) { - cacheContexts.add(configuration.getContext().with("cacheName", cacheAlias)); - } - - Collection statistics = new ArrayList(); - - // for each capability, call the management registry - long since = lastPoll.get(); - for (Map.Entry entry : selectedStatsPerCapability.entrySet()) { - for (ContextualStatistics contextualStatistics : entry.getValue().since(since).on(cacheContexts).build().execute()) { - statistics.add(contextualStatistics); - } - } - - // next time, only poll history from this time - lastPoll.set(timeSource.getTimeMillis()); - - if (!statistics.isEmpty()) { - collector.onStatistics(statistics); - } - } - } catch (RuntimeException e) { - LOGGER.error("StatisticCollector: " + e.getMessage(), e); - } - } - }, pollingIntervalMs, pollingIntervalMs, TimeUnit.MILLISECONDS); - } - } - - @Override - public synchronized void stopStatisticCollector() { - if (task != null) { - task.cancel(false); - task = null; - } - } - - @Override - public void updateCollectedStatistics(String capabilityName, Collection statisticNames) { - if(!statisticNames.isEmpty()) { - StatisticQuery.Builder builder = managementRegistry.withCapability(capabilityName).queryStatistics(statisticNames); - selectedStatsPerCapability.put(capabilityName, builder); - } else { - // we clear the stats set - selectedStatsPerCapability.remove(capabilityName); - } - } - - // for test purposes - Map getSelectedStatsPerCapability() { - return Collections.unmodifiableMap(selectedStatsPerCapability); - } - - // for test purposes - void setManagementRegistry(ManagementRegistryService managementRegistry) { - this.managementRegistry = managementRegistry; - } - } diff --git a/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryService.java b/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryService.java index 7876e183a2..c61d52ef85 100644 --- a/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryService.java +++ b/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryService.java @@ -22,7 +22,6 @@ import org.ehcache.core.spi.store.InternalCacheManager; import org.ehcache.core.spi.service.CacheManagerProviderService; import org.ehcache.core.spi.service.ExecutionService; -import org.ehcache.core.spi.time.TimeSourceService; import org.ehcache.management.ManagementRegistryService; import org.ehcache.management.ManagementRegistryServiceConfiguration; import org.ehcache.management.cluster.Clustering; @@ -37,7 +36,7 @@ import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceDependencies; import org.terracotta.management.model.context.ContextContainer; -import org.terracotta.management.registry.AbstractManagementRegistry; +import org.terracotta.management.registry.DefaultManagementRegistry; import org.terracotta.management.registry.ManagementProvider; import org.terracotta.statistics.StatisticsManager; @@ -48,8 +47,8 @@ import static org.ehcache.impl.internal.executor.ExecutorUtil.shutdownNow; -@ServiceDependencies({CacheManagerProviderService.class, ExecutionService.class, TimeSourceService.class}) -public class DefaultManagementRegistryService extends AbstractManagementRegistry implements ManagementRegistryService, CacheManagerListener { +@ServiceDependencies({CacheManagerProviderService.class, ExecutionService.class}) +public class DefaultManagementRegistryService extends DefaultManagementRegistry implements ManagementRegistryService, CacheManagerListener { private final ManagementRegistryServiceConfiguration configuration; private volatile ScheduledExecutorService statisticsExecutor; @@ -61,6 +60,7 @@ public DefaultManagementRegistryService() { } public DefaultManagementRegistryService(ManagementRegistryServiceConfiguration configuration) { + super(null); // context container creation is overriden here this.configuration = configuration == null ? new DefaultManagementRegistryConfiguration() : configuration; } @@ -104,13 +104,11 @@ public void stop() { public void cacheAdded(String alias, Cache cache) { StatisticsManager.associate(cache).withParent(cacheManager); - register(cache); register(new CacheBinding(alias, cache)); } @Override public void cacheRemoved(String alias, Cache cache) { - unregister(cache); unregister(new CacheBinding(alias, cache)); StatisticsManager.dissociate(cache).fromParent(cacheManager); diff --git a/management/src/main/java/org/ehcache/management/registry/DefaultSharedManagementService.java b/management/src/main/java/org/ehcache/management/registry/DefaultSharedManagementService.java index bc2abc8a64..168de755e1 100644 --- a/management/src/main/java/org/ehcache/management/registry/DefaultSharedManagementService.java +++ b/management/src/main/java/org/ehcache/management/registry/DefaultSharedManagementService.java @@ -18,14 +18,14 @@ import org.ehcache.Cache; import org.ehcache.Status; import org.ehcache.core.events.CacheManagerListener; +import org.ehcache.core.spi.service.CacheManagerProviderService; import org.ehcache.core.spi.store.InternalCacheManager; import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; import org.ehcache.management.ManagementRegistryService; import org.ehcache.management.SharedManagementService; -import org.ehcache.spi.service.ServiceProvider; -import org.ehcache.core.spi.service.CacheManagerProviderService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceDependencies; +import org.ehcache.spi.service.ServiceProvider; import org.terracotta.management.model.capabilities.Capability; import org.terracotta.management.model.context.Context; import org.terracotta.management.model.context.ContextContainer; @@ -53,7 +53,7 @@ public void start(final ServiceProvider serviceProvider) { final ManagementRegistryService managementRegistry = serviceProvider.getService(ManagementRegistryService.class); final Context cmContext = managementRegistry.getConfiguration().getContext(); final InternalCacheManager cacheManager = - serviceProvider.getService(CacheManagerProviderService.class).getCacheManager(); + serviceProvider.getService(CacheManagerProviderService.class).getCacheManager(); cacheManager.registerListener(new CacheManagerListener() { @Override @@ -102,8 +102,17 @@ public Map getContextContainers() { } @Override - public Map> getCapabilities() { - Map> capabilities = new HashMap>(); + public Collection getCapabilities() { + Collection capabilities = new ArrayList(); + for (ManagementRegistryService registryService : delegates.values()) { + capabilities.addAll(registryService.getCapabilities()); + } + return capabilities; + } + + @Override + public Map> getCapabilitiesByContext() { + Map> capabilities = new HashMap>(); for (Map.Entry entry : delegates.entrySet()) { capabilities.put(entry.getKey(), entry.getValue().getCapabilities()); } diff --git a/management/src/main/java/org/ehcache/management/registry/ManagementRegistryServiceConfigurationParser.java b/management/src/main/java/org/ehcache/management/registry/ManagementRegistryServiceConfigurationParser.java index c66fd57c80..f093257933 100644 --- a/management/src/main/java/org/ehcache/management/registry/ManagementRegistryServiceConfigurationParser.java +++ b/management/src/main/java/org/ehcache/management/registry/ManagementRegistryServiceConfigurationParser.java @@ -100,14 +100,16 @@ public ServiceCreationConfiguration parseServiceCreat // average-window for (Element averageWindow : NodeListIterable.elements(statisticConfiguration, NAMESPACE, "average-window")) { - providerConfiguration.setAverageWindowDuration(Long.parseLong(val(averageWindow, String.valueOf(providerConfiguration.averageWindowDuration())))); - providerConfiguration.setAverageWindowUnit(unit(averageWindow, providerConfiguration.averageWindowUnit())); + providerConfiguration.setAverageWindowDuration( + Long.parseLong(val(averageWindow, String.valueOf(providerConfiguration.averageWindowDuration()))), + unit(averageWindow, providerConfiguration.averageWindowUnit())); } // history-interval for (Element historyInterval : NodeListIterable.elements(statisticConfiguration, NAMESPACE, "history-interval")) { - providerConfiguration.setHistoryInterval(Long.parseLong(val(historyInterval, String.valueOf(providerConfiguration.historyInterval())))); - providerConfiguration.setHistoryIntervalUnit(unit(historyInterval, providerConfiguration.historyIntervalUnit())); + providerConfiguration.setHistoryInterval( + Long.parseLong(val(historyInterval, String.valueOf(providerConfiguration.historyInterval()))), + unit(historyInterval, providerConfiguration.historyIntervalUnit())); } // history-size @@ -117,8 +119,9 @@ public ServiceCreationConfiguration parseServiceCreat // time-to-disable for (Element timeToDisable : NodeListIterable.elements(statisticConfiguration, NAMESPACE, "time-to-disable")) { - providerConfiguration.setTimeToDisable(Long.parseLong(val(timeToDisable, String.valueOf(providerConfiguration.timeToDisable())))); - providerConfiguration.setTimeToDisableUnit(unit(timeToDisable, providerConfiguration.timeToDisableUnit())); + providerConfiguration.setTimeToDisable( + Long.parseLong(val(timeToDisable, String.valueOf(providerConfiguration.timeToDisable()))), + unit(timeToDisable, providerConfiguration.timeToDisableUnit())); } registryConfiguration.addConfiguration(providerConfiguration); diff --git a/management/src/test/java/org/ehcache/docs/ManagementTest.java b/management/src/test/java/org/ehcache/docs/ManagementTest.java index 9a0c7e429d..6a92f52ace 100644 --- a/management/src/test/java/org/ehcache/docs/ManagementTest.java +++ b/management/src/test/java/org/ehcache/docs/ManagementTest.java @@ -21,62 +21,99 @@ import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; import org.ehcache.management.ManagementRegistryService; -import org.ehcache.management.registry.DefaultManagementRegistryService; -import org.terracotta.management.registry.ResultSet; import org.ehcache.management.SharedManagementService; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.providers.statistics.StatsUtil; import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryService; import org.ehcache.management.registry.DefaultSharedManagementService; import org.hamcrest.Matchers; import org.junit.Assert; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.Timeout; import org.terracotta.management.model.capabilities.Capability; import org.terracotta.management.model.capabilities.context.CapabilityContext; import org.terracotta.management.model.capabilities.descriptors.Descriptor; import org.terracotta.management.model.context.Context; import org.terracotta.management.model.context.ContextContainer; import org.terracotta.management.model.stats.ContextualStatistics; -import org.terracotta.management.model.stats.primitive.Counter; +import org.terracotta.management.model.stats.history.CounterHistory; +import org.terracotta.management.registry.ResultSet; +import org.terracotta.management.registry.StatisticQuery; import java.util.Collection; import java.util.Iterator; +import java.util.concurrent.TimeUnit; public class ManagementTest { - @Test + private final EhcacheStatisticsProviderConfiguration EHCACHE_STATS_CONFIG = new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES); + + + @Rule + public final Timeout globalTimeout = Timeout.seconds(10); + public void usingManagementRegistry() throws Exception { // tag::usingManagementRegistry[] - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10)) + + CacheManager cacheManager = null; + try { + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager1"); // <1> + registryConfiguration.addConfiguration(EHCACHE_STATS_CONFIG); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); // <2> + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(1, MemoryUnit.MB)) .build(); - DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); // <1> - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); // <2> - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache", cacheConfiguration) - .using(managementRegistry) // <3> - .build(true); + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("myCache", cacheConfiguration) + .using(managementRegistry) // <3> + .build(true); + + Cache aCache = cacheManager.getCache("myCache", Long.class, String.class); + aCache.put(1L, "one"); + aCache.put(0L, "zero"); + aCache.get(1L); // <4> + aCache.get(0L); // <4> + aCache.get(0L); + aCache.get(0L); - Cache aCache = cacheManager.getCache("aCache", Long.class, String.class); - aCache.get(0L); // <4> - aCache.get(0L); - aCache.get(0L); + Context context = StatsUtil.createContext(managementRegistry); // <5> - Context context = createContext(managementRegistry); // <5> + StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") // <6> + .queryStatistic("Cache:HitCount") + .on(context) + .build(); - ContextualStatistics counters = managementRegistry.withCapability("StatisticsCapability") // <6> - .queryStatistic("GetCounter") - .on(context) - .build() - .execute() - .getSingleResult(); + long onHeapHitCount = 0; + // it could be several seconds before the sampled stats could become available + // let's try until we find the correct value : 4 + do { + ResultSet counters = query.execute(); - Assert.assertThat(counters.size(), Matchers.is(1)); - Counter getCounter = counters.getStatistic(Counter.class); + ContextualStatistics statisticsContext = counters.getResult(context); - Assert.assertThat(getCounter.getValue(), Matchers.equalTo(3L)); // <7> + Assert.assertThat(counters.size(), Matchers.is(1)); - cacheManager.close(); + CounterHistory onHeapStore_Hit_Count = statisticsContext.getStatistic(CounterHistory.class, "Cache:HitCount"); + + // hit count is a sampled stat, for example its values could be [0,0,3,4]. + // In the present case, only the last value is important to us , the cache was eventually hit 4 times + if (onHeapStore_Hit_Count.getValue().length > 0) { + int mostRecentIndex = onHeapStore_Hit_Count.getValue().length - 1; + onHeapHitCount = onHeapStore_Hit_Count.getValue()[mostRecentIndex].getValue(); + } + + } while (onHeapHitCount != 4L); + } + finally { + if(cacheManager != null) cacheManager.close(); + } // end::usingManagementRegistry[] } @@ -86,41 +123,45 @@ public void capabilitiesAndContexts() throws Exception { CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10)) .build(); - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(); - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache", cacheConfiguration) - .using(managementRegistry) - .build(true); - - - Collection capabilities = managementRegistry.getCapabilities(); // <1> - Assert.assertThat(capabilities.isEmpty(), Matchers.is(false)); - Capability capability = capabilities.iterator().next(); - String capabilityName = capability.getName(); // <2> - Collection capabilityDescriptions = capability.getDescriptors(); // <3> - Assert.assertThat(capabilityDescriptions.isEmpty(), Matchers.is(false)); - CapabilityContext capabilityContext = capability.getCapabilityContext(); - Collection attributes = capabilityContext.getAttributes(); // <4> - Assert.assertThat(attributes.size(), Matchers.is(2)); - Iterator iterator = attributes.iterator(); - CapabilityContext.Attribute attribute1 = iterator.next(); - Assert.assertThat(attribute1.getName(), Matchers.equalTo("cacheManagerName")); // <5> - Assert.assertThat(attribute1.isRequired(), Matchers.is(true)); - CapabilityContext.Attribute attribute2 = iterator.next(); - Assert.assertThat(attribute2.getName(), Matchers.equalTo("cacheName")); // <6> - Assert.assertThat(attribute2.isRequired(), Matchers.is(true)); - - ContextContainer contextContainer = managementRegistry.getContextContainer(); // <7> - Assert.assertThat(contextContainer.getName(), Matchers.equalTo("cacheManagerName")); // <8> - Assert.assertThat(contextContainer.getValue(), Matchers.startsWith("cache-manager-")); - Collection subContexts = contextContainer.getSubContexts(); - Assert.assertThat(subContexts.size(), Matchers.is(1)); - ContextContainer subContextContainer = subContexts.iterator().next(); - Assert.assertThat(subContextContainer.getName(), Matchers.equalTo("cacheName")); // <9> - Assert.assertThat(subContextContainer.getValue(), Matchers.equalTo("aCache")); - - - cacheManager.close(); + CacheManager cacheManager = null; + try { + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(); + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .using(managementRegistry) + .build(true); + + + Collection capabilities = managementRegistry.getCapabilities(); // <1> + Assert.assertThat(capabilities.isEmpty(), Matchers.is(false)); + Capability capability = capabilities.iterator().next(); + String capabilityName = capability.getName(); // <2> + Collection capabilityDescriptions = capability.getDescriptors(); // <3> + Assert.assertThat(capabilityDescriptions.isEmpty(), Matchers.is(false)); + CapabilityContext capabilityContext = capability.getCapabilityContext(); + Collection attributes = capabilityContext.getAttributes(); // <4> + Assert.assertThat(attributes.size(), Matchers.is(2)); + Iterator iterator = attributes.iterator(); + CapabilityContext.Attribute attribute1 = iterator.next(); + Assert.assertThat(attribute1.getName(), Matchers.equalTo("cacheManagerName")); // <5> + Assert.assertThat(attribute1.isRequired(), Matchers.is(true)); + CapabilityContext.Attribute attribute2 = iterator.next(); + Assert.assertThat(attribute2.getName(), Matchers.equalTo("cacheName")); // <6> + Assert.assertThat(attribute2.isRequired(), Matchers.is(true)); + + ContextContainer contextContainer = managementRegistry.getContextContainer(); // <7> + Assert.assertThat(contextContainer.getName(), Matchers.equalTo("cacheManagerName")); // <8> + Assert.assertThat(contextContainer.getValue(), Matchers.startsWith("cache-manager-")); + Collection subContexts = contextContainer.getSubContexts(); + Assert.assertThat(subContexts.size(), Matchers.is(1)); + ContextContainer subContextContainer = subContexts.iterator().next(); + Assert.assertThat(subContextContainer.getName(), Matchers.equalTo("cacheName")); // <9> + Assert.assertThat(subContextContainer.getValue(), Matchers.equalTo("aCache")); + } + finally { + if(cacheManager != null) cacheManager.close(); + } + // end::capabilitiesAndContexts[] } @@ -130,77 +171,97 @@ public void actionCall() throws Exception { CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10)) .build(); - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(); - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache", cacheConfiguration) - .using(managementRegistry) - .build(true); - - Cache aCache = cacheManager.getCache("aCache", Long.class, String.class); - aCache.put(0L, "zero"); // <1> - - Context context = createContext(managementRegistry); // <2> - - managementRegistry.withCapability("ActionsCapability") // <3> - .call("clear") - .on(context) - .build() - .execute(); - - Assert.assertThat(aCache.get(0L), Matchers.is(Matchers.nullValue())); // <4> - - cacheManager.close(); + CacheManager cacheManager = null; + try { + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(); + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .using(managementRegistry) + .build(true); + + Cache aCache = cacheManager.getCache("aCache", Long.class, String.class); + aCache.put(0L, "zero"); // <1> + + Context context = StatsUtil.createContext(managementRegistry); // <2> + + managementRegistry.withCapability("ActionsCapability") // <3> + .call("clear") + .on(context) + .build() + .execute(); + + Assert.assertThat(aCache.get(0L), Matchers.is(Matchers.nullValue())); // <4> + } + finally { + if(cacheManager != null) cacheManager.close(); + } // end::actionCall[] } - @Test + //TODO update managingMultipleCacheManagers() documentation/asciidoc public void managingMultipleCacheManagers() throws Exception { // tag::managingMultipleCacheManagers[] CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10)) .build(); - SharedManagementService sharedManagementService = new DefaultSharedManagementService(); // <1> - CacheManager cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache", cacheConfiguration) - .using(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager-1")) - .using(sharedManagementService) // <2> - .build(true); - - CacheManager cacheManager2 = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache", cacheConfiguration) - .using(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager-2")) - .using(sharedManagementService) // <3> - .build(true); - - Context context1 = Context.empty() - .with("cacheManagerName", "myCacheManager-1") - .with("cacheName", "aCache"); - - Context context2 = Context.empty() - .with("cacheManagerName", "myCacheManager-2") - .with("cacheName", "aCache"); - - ResultSet counters = sharedManagementService.withCapability("StatisticsCapability") - .queryStatistic("GetCounter") + CacheManager cacheManager1 = null; + CacheManager cacheManager2 = null; + try { + SharedManagementService sharedManagementService = new DefaultSharedManagementService(); // <1> + cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .using(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager-1").addConfiguration(EHCACHE_STATS_CONFIG)) + .using(sharedManagementService) // <2> + .build(true); + + cacheManager2 = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .using(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager-2").addConfiguration(EHCACHE_STATS_CONFIG)) + .using(sharedManagementService) // <3> + .build(true); + + Context context1 = Context.empty() + .with("cacheManagerName", "myCacheManager-1") + .with("cacheName", "aCache"); + + Context context2 = Context.empty() + .with("cacheManagerName", "myCacheManager-2") + .with("cacheName", "aCache"); + + Cache cache = cacheManager1.getCache("aCache", Long.class, String.class); + cache.get(1L);//cache miss + cache.get(2L);//cache miss + + StatisticQuery query = sharedManagementService.withCapability("StatisticsCapability") + .queryStatistic("Cache:MissCount") .on(context1) .on(context2) - .build() - .execute(); + .build(); - ContextualStatistics statistics = counters.getResult(context1); - Counter counter = statistics.getStatistic(Counter.class, "GetCounter"); + long val = 0; + // it could be several seconds before the sampled stats could become available + // let's try until we find the correct value : 2 + do { + ResultSet counters = query.execute(); + + ContextualStatistics statisticsContext1 = counters.getResult(context1); + + CounterHistory counterContext1 = statisticsContext1.getStatistic(CounterHistory.class, "Cache:MissCount"); + + // miss count is a sampled stat, for example its values could be [0,1,2]. + // In the present case, only the last value is important to us , the cache was eventually missed 2 times + if (counterContext1.getValue().length > 0) { + int mostRecentSampleIndex = counterContext1.getValue().length - 1; + val = counterContext1.getValue()[mostRecentSampleIndex].getValue(); + } + } while(val != 2); + } + finally { + if(cacheManager2 != null) cacheManager2.close(); + if(cacheManager1 != null) cacheManager1.close(); + } - cacheManager2.close(); - cacheManager1.close(); // end::managingMultipleCacheManagers[] } - private static Context createContext(ManagementRegistryService managementRegistry) { - ContextContainer cacheManagerCtx = managementRegistry.getContextContainer(); - ContextContainer firstCacheCtx = cacheManagerCtx.getSubContexts().iterator().next(); - return Context.empty() - .with(cacheManagerCtx.getName(), cacheManagerCtx.getValue()) - .with(firstCacheCtx.getName(), firstCacheCtx.getValue()); - } - } diff --git a/management/src/test/java/org/ehcache/management/cluster/AbstractClusteringManagementTest.java b/management/src/test/java/org/ehcache/management/cluster/AbstractClusteringManagementTest.java deleted file mode 100644 index 2aab98bbf8..0000000000 --- a/management/src/test/java/org/ehcache/management/cluster/AbstractClusteringManagementTest.java +++ /dev/null @@ -1,189 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.management.cluster; - -import org.ehcache.clustered.client.internal.EhcacheClientEntityService; -import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockEntityClientService; -import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; -import org.ehcache.clustered.server.EhcacheServerEntityService; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.terracotta.connection.Connection; -import org.terracotta.connection.ConnectionFactory; -import org.terracotta.management.entity.management.ManagementAgentConfig; -import org.terracotta.management.entity.management.client.ContextualReturnListener; -import org.terracotta.management.entity.management.client.ManagementAgentEntityClientService; -import org.terracotta.management.entity.management.client.ManagementAgentEntityFactory; -import org.terracotta.management.entity.management.client.ManagementAgentService; -import org.terracotta.management.entity.management.server.ManagementAgentEntityServerService; -import org.terracotta.management.entity.monitoring.client.MonitoringServiceEntity; -import org.terracotta.management.entity.monitoring.client.MonitoringServiceEntityClientService; -import org.terracotta.management.entity.monitoring.client.MonitoringServiceEntityFactory; -import org.terracotta.management.entity.monitoring.server.MonitoringServiceEntityServerService; -import org.terracotta.management.model.call.ContextualReturn; -import org.terracotta.management.model.call.Parameter; -import org.terracotta.management.model.cluster.ClientIdentifier; -import org.terracotta.management.model.context.Context; -import org.terracotta.management.model.stats.ContextualStatistics; -import org.terracotta.offheapresource.OffHeapResourcesConfiguration; -import org.terracotta.offheapresource.OffHeapResourcesProvider; -import org.terracotta.offheapresource.config.OffheapResourcesType; -import org.terracotta.offheapresource.config.ResourceType; -import org.terracotta.passthrough.PassthroughClusterControl; -import org.terracotta.passthrough.PassthroughServer; - -import java.io.Serializable; -import java.math.BigInteger; -import java.net.URI; -import java.util.Collection; -import java.util.Properties; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; - -import static java.util.Arrays.asList; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.notNullValue; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; - -public abstract class AbstractClusteringManagementTest { - - protected static MonitoringServiceEntity consumer; - - private static PassthroughClusterControl stripeControl; - - @BeforeClass - public static void beforeClass() throws Exception { - PassthroughServer activeServer = new PassthroughServer(); - activeServer.setServerName("server-1"); - activeServer.setBindPort(9510); - activeServer.setGroupPort(9610); - - // management agent entity - activeServer.registerServerEntityService(new ManagementAgentEntityServerService()); - activeServer.registerClientEntityService(new ManagementAgentEntityClientService()); - - // ehcache entity - activeServer.registerServerEntityService(new EhcacheServerEntityService()); - activeServer.registerClientEntityService(new EhcacheClientEntityService()); - - // RW lock entity (required by ehcache) - activeServer.registerServerEntityService(new VoltronReadWriteLockServerEntityService()); - activeServer.registerClientEntityService(new VoltronReadWriteLockEntityClientService()); - - activeServer.registerServerEntityService(new MonitoringServiceEntityServerService()); - activeServer.registerClientEntityService(new MonitoringServiceEntityClientService()); - - // off-heap service - OffheapResourcesType offheapResourcesType = new OffheapResourcesType(); - ResourceType resourceType = new ResourceType(); - resourceType.setName("primary-server-resource"); - resourceType.setUnit(org.terracotta.offheapresource.config.MemoryUnit.MB); - resourceType.setValue(BigInteger.TEN); - offheapResourcesType.getResource().add(resourceType); - activeServer.registerServiceProvider(new OffHeapResourcesProvider(), new OffHeapResourcesConfiguration(offheapResourcesType)); - - stripeControl = new PassthroughClusterControl("server-1", activeServer); - - consumer = new MonitoringServiceEntityFactory(ConnectionFactory.connect(URI.create("passthrough://server-1:9510/cluster-1"), new Properties())).retrieveOrCreate("MonitoringConsumerEntity"); - consumer.createBestEffortBuffer("client-notifications", 1024, Serializable[].class); - consumer.createBestEffortBuffer("client-statistics", 1024, Serializable[].class); - } - - @AfterClass - public static void afterClass() throws Exception { - if (stripeControl != null) { - stripeControl.tearDown(); - } - } - - @After - public final void clearBuffers() throws Exception { - clear(); - } - - protected final void clear() { - while (consumer.readBuffer("client-notifications", Serializable[].class) != null) ; - while (consumer.readBuffer("client-statistics", Serializable[].class) != null) ; - } - - protected static void sendManagementCallToCollectStats(String... statNames) throws Exception { - try (Connection managementConsole = ConnectionFactory.connect(URI.create("passthrough://server-1:9510/"), new Properties())) { - ManagementAgentService agent = new ManagementAgentService(new ManagementAgentEntityFactory(managementConsole).retrieveOrCreate(new ManagementAgentConfig())); - - assertThat(agent.getManageableClients().size(), equalTo(2)); - - // find Ehcache client - ClientIdentifier me = agent.getClientIdentifier(); - ClientIdentifier client = null; - for (ClientIdentifier clientIdentifier : agent.getManageableClients()) { - if (!clientIdentifier.equals(me)) { - client = clientIdentifier; - break; - } - } - assertThat(client, is(notNullValue())); - final ClientIdentifier ehcacheClientIdentifier = client; - - CountDownLatch callCompleted = new CountDownLatch(1); - AtomicReference managementCallId = new AtomicReference<>(); - BlockingQueue> returns = new LinkedBlockingQueue<>(); - - agent.setContextualReturnListener(new ContextualReturnListener() { - @Override - public void onContextualReturn(ClientIdentifier from, String id, ContextualReturn aReturn) { - try { - assertEquals(ehcacheClientIdentifier, from); - // make sure the call completed - callCompleted.await(10, TimeUnit.SECONDS); - assertEquals(managementCallId.get(), id); - returns.offer(aReturn); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - } - }); - - managementCallId.set(agent.call( - ehcacheClientIdentifier, - Context.create("cacheManagerName", "my-super-cache-manager"), - "StatisticCollectorCapability", - "updateCollectedStatistics", - Collection.class, - new Parameter("StatisticsCapability"), - new Parameter(asList(statNames), Collection.class.getName()))); - - // now we're sure the call completed - callCompleted.countDown(); - - // ensure the call is made - returns.take(); - } - } - - protected static ContextualStatistics[] waitForNextStats() { - // uses the monitoring consumre entity to get the content of the stat buffer when some stats are collected - Serializable[] serializables; - while ((serializables = consumer.readBuffer("client-statistics", Serializable[].class)) == null) { Thread.yield(); } - return (ContextualStatistics[]) serializables[1]; - } - -} diff --git a/management/src/test/java/org/ehcache/management/cluster/ClusteringManagementServiceTest.java b/management/src/test/java/org/ehcache/management/cluster/ClusteringManagementServiceTest.java deleted file mode 100644 index 34627247d9..0000000000 --- a/management/src/test/java/org/ehcache/management/cluster/ClusteringManagementServiceTest.java +++ /dev/null @@ -1,249 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.management.cluster; - -import org.ehcache.Cache; -import org.ehcache.CacheManager; -import org.ehcache.Status; -import org.ehcache.ValueSupplier; -import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; -import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; -import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; -import org.ehcache.xml.XmlConfiguration; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.Timeout; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.terracotta.management.entity.management.ManagementAgentConfig; -import org.terracotta.management.entity.management.client.ManagementAgentEntityFactory; -import org.terracotta.management.model.capabilities.Capability; -import org.terracotta.management.model.context.ContextContainer; -import org.terracotta.management.model.notification.ContextualNotification; -import org.terracotta.management.model.stats.ContextualStatistics; -import org.terracotta.management.model.stats.history.CounterHistory; -import org.terracotta.management.model.stats.primitive.Counter; - -import java.io.Serializable; -import java.net.URI; -import java.util.Arrays; -import java.util.Collection; -import java.util.TreeSet; -import java.util.concurrent.TimeUnit; - -import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.nullValue; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.collection.IsCollectionWithSize.hasSize; -import static org.junit.Assert.assertThat; - -@RunWith(Parameterized.class) -public class ClusteringManagementServiceTest extends AbstractClusteringManagementTest { - - @Parameterized.Parameters - public static Collection data() { - return Arrays.asList(new Object[][]{ - { - new ValueSupplier() { - @Override - public CacheManager value() { - return CacheManagerBuilder.newCacheManagerBuilder() - // cluster config - .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("passthrough://server-1:9510/my-server-entity-1")) - .autoCreate() - .defaultServerResource("primary-server-resource")) - // management config - .using(new DefaultManagementRegistryConfiguration() - .addTags("webapp-1", "server-node-1") - .setCacheManagerAlias("my-super-cache-manager") - .addConfiguration(new EhcacheStatisticsProviderConfiguration( - 1, TimeUnit.MINUTES, - 100, 1, TimeUnit.SECONDS, - 2, TimeUnit.SECONDS))) // TTD reduce to 2 seconds so that the stat collector runs faster - // cache config - .withCache("cache-1", CacheConfigurationBuilder.newCacheConfigurationBuilder( - String.class, String.class, - newResourcePoolsBuilder() - .heap(10, EntryUnit.ENTRIES) - .offheap(1, MemoryUnit.MB) - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB))) - .build()) - .build(true); - } - } - }, { - new ValueSupplier() { - @Override - public CacheManager value() { - CacheManager cacheManager = CacheManagerBuilder.newCacheManager(new XmlConfiguration(getClass().getResource("/ehcache-management-clustered.xml"))); - cacheManager.init(); - return cacheManager; - } - } - }}); - } - - @Rule - public final Timeout globalTimeout = new Timeout(10000); - - private final ValueSupplier cacheManagerValueSupplier; - - private CacheManager cacheManager; - private String clientIdentifier; - private long consumerId; - - public ClusteringManagementServiceTest(ValueSupplier cacheManagerValueSupplier) { - this.cacheManagerValueSupplier = cacheManagerValueSupplier; - } - - @Before - public void init() throws Exception { - this.cacheManager = cacheManagerValueSupplier.value(); - - // ensure the CM is running and get its client id - assertThat(cacheManager.getStatus(), equalTo(Status.AVAILABLE)); - consumerId = consumer.getConsumerId(ManagementAgentConfig.ENTITY_TYPE, ManagementAgentEntityFactory.ENTITYNAME); - clientIdentifier = consumer.getChildNamesForNode(consumerId, "management", "clients").iterator().next(); - } - - @After - public void close() throws Exception { - if (cacheManager != null && cacheManager.getStatus() == Status.AVAILABLE) { - cacheManager.close(); - } - } - - @Test - public void test_tags_exposed() throws Exception { - String[] tags = consumer.getValueForNode(consumerId, new String[]{"management", "clients", clientIdentifier, "tags"}, String[].class); - assertThat(tags, equalTo(new String[]{"server-node-1", "webapp-1"})); - } - - @Test - public void test_contextContainer_exposed() throws Exception { - ContextContainer contextContainer = consumer.getValueForNode(consumerId, new String[]{"management", "clients", clientIdentifier, "registry", "contextContainer"}, ContextContainer.class); - assertThat(contextContainer.getValue(), equalTo("my-super-cache-manager")); - assertThat(contextContainer.getSubContexts(), hasSize(1)); - assertThat(contextContainer.getSubContexts().iterator().next().getValue(), equalTo("cache-1")); - } - - @Test - public void test_capabilities_exposed() throws Exception { - Capability[] capabilities = consumer.getValueForNode(consumerId, new String[]{"management", "clients", clientIdentifier, "registry", "capabilities"}, Capability[].class); - assertThat(capabilities.length, equalTo(5)); - assertThat(capabilities[0].getName(), equalTo("ActionsCapability")); - assertThat(capabilities[1].getName(), equalTo("StatisticsCapability")); - assertThat(capabilities[2].getName(), equalTo("StatisticCollectorCapability")); - assertThat(capabilities[3].getName(), equalTo("SettingsCapability")); - assertThat(capabilities[4].getName(), equalTo("ManagementAgentService")); - assertThat(capabilities[0].getDescriptors(), hasSize(4)); - assertThat(capabilities[1].getDescriptors(), hasSize(13)); - } - - @Test - public void test_notifs_sent_at_CM_init() throws Exception { - assertThat(((ContextualNotification) consumer.readBuffer("client-notifications", Serializable[].class)[1]).getType(), equalTo("CLIENT_REGISTRY_UPDATED")); - assertThat(((ContextualNotification) consumer.readBuffer("client-notifications", Serializable[].class)[1]).getType(), equalTo("CLIENT_TAGS_UPDATED")); - assertThat(consumer.readBuffer("client-notifications", Serializable[].class), is(nullValue())); - } - - @Test - public void test_notifs_on_add_cache() throws Exception { - clear(); - - cacheManager.createCache("cache-2", CacheConfigurationBuilder.newCacheConfigurationBuilder( - String.class, String.class, - newResourcePoolsBuilder() - .heap(10, EntryUnit.ENTRIES) - .offheap(1, MemoryUnit.MB) - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB))) - .build()); - - ContextContainer contextContainer = consumer.getValueForNode(consumerId, new String[]{"management", "clients", clientIdentifier, "registry", "contextContainer"}, ContextContainer.class); - assertThat(contextContainer.getSubContexts(), hasSize(2)); - - Collection cNames = new TreeSet(); - for (ContextContainer container : contextContainer.getSubContexts()) { - cNames.add(container.getValue()); - } - assertThat(cNames, equalTo(new TreeSet(Arrays.asList("cache-1", "cache-2")))); - - assertThat(((ContextualNotification) consumer.readBuffer("client-notifications", Serializable[].class)[1]).getType(), equalTo("CLIENT_REGISTRY_UPDATED")); - assertThat(((ContextualNotification) consumer.readBuffer("client-notifications", Serializable[].class)[1]).getType(), equalTo("CLIENT_REGISTRY_UPDATED")); - assertThat(((ContextualNotification) consumer.readBuffer("client-notifications", Serializable[].class)[1]).getType(), equalTo("CACHE_ADDED")); - assertThat(consumer.readBuffer("client-notifications", Serializable[].class), is(nullValue())); - } - - @Test - public void test_notifs_on_remove_cache() throws Exception { - test_notifs_on_add_cache(); - - cacheManager.removeCache("cache-2"); - - assertThat(((ContextualNotification) consumer.readBuffer("client-notifications", Serializable[].class)[1]).getType(), equalTo("CLIENT_REGISTRY_UPDATED")); - assertThat(((ContextualNotification) consumer.readBuffer("client-notifications", Serializable[].class)[1]).getType(), equalTo("CLIENT_REGISTRY_UPDATED")); - assertThat(((ContextualNotification) consumer.readBuffer("client-notifications", Serializable[].class)[1]).getType(), equalTo("CACHE_REMOVED")); - assertThat(consumer.readBuffer("client-notifications", Serializable[].class), is(nullValue())); - } - - @Test - public void test_stats_collection() throws Exception { - - sendManagementCallToCollectStats("GetCounter", "InexistingRate", "AllCacheGetCount"); - - Cache cache1 = cacheManager.getCache("cache-1", String.class, String.class); - cache1.put("key1", "val"); - cache1.put("key2", "val"); - - cache1.get("key1"); - cache1.get("key2"); - - // get the stats (we are getting the primitive counter, not the sample history) - ContextualStatistics[] stats = waitForNextStats(); - - assertThat(stats.length, equalTo(1)); - assertThat(stats[0].getContext().get("cacheName"), equalTo("cache-1")); - assertThat(stats[0].getStatistic(Counter.class, "GetCounter").getValue(), equalTo(2L)); - - // first collect of a sample gives no value because it "triggers" the stat computation - // this is how the internal ehcache's stat framework works: first call to a sample activates it. - assertThat(stats[0].getContext().get("cacheName"), equalTo("cache-1")); - assertThat(stats[0].getStatistic(CounterHistory.class, "AllCacheGetCount").getValue().length, equalTo(0)); - - // do some other operations - cache1.get("key1"); - cache1.get("key2"); - - stats = waitForNextStats(); - - assertThat(stats.length, equalTo(1)); - assertThat(stats[0].getContext().get("cacheName"), equalTo("cache-1")); - assertThat(stats[0].getStatistic(Counter.class, "GetCounter").getValue(), equalTo(4L)); - assertThat(stats[0].getContext().get("cacheName"), equalTo("cache-1")); - assertThat(stats[0].getStatistic(CounterHistory.class, "AllCacheGetCount").getValue().length, greaterThanOrEqualTo(1)); - assertThat(stats[0].getStatistic(CounterHistory.class, "AllCacheGetCount").getValue()[0].getValue(), equalTo(4L)); - } - -} diff --git a/management/src/test/java/org/ehcache/management/providers/actions/EhcacheActionProviderTest.java b/management/src/test/java/org/ehcache/management/providers/actions/EhcacheActionProviderTest.java index e00af73e16..d628e32223 100644 --- a/management/src/test/java/org/ehcache/management/providers/actions/EhcacheActionProviderTest.java +++ b/management/src/test/java/org/ehcache/management/providers/actions/EhcacheActionProviderTest.java @@ -52,13 +52,14 @@ public class EhcacheActionProviderTest { ManagementRegistryServiceConfiguration cmConfig_0 = new DefaultManagementRegistryConfiguration().setContext(cmContext_0); @Test + @SuppressWarnings("unchecked") public void testDescriptions() throws Exception { EhcacheActionProvider ehcacheActionProvider = new EhcacheActionProvider(cmConfig); ehcacheActionProvider.register(new CacheBinding("myCacheName1", mock(EhcacheWithLoaderWriter.class))); ehcacheActionProvider.register(new CacheBinding("myCacheName2", mock(EhcacheWithLoaderWriter.class))); - Collection descriptions = ehcacheActionProvider.getDescriptors(); + Collection descriptions = ehcacheActionProvider.getDescriptors(); assertThat(descriptions.size(), is(4)); assertThat(descriptions, (Matcher) containsInAnyOrder( new CallDescriptor("remove", "void", Collections.singletonList(new CallDescriptor.Parameter("key", "java.lang.Object"))), @@ -122,8 +123,10 @@ public void testCallAction_happyPathNoParam() throws Exception { public void testCallAction_happyPathWithParams() throws Exception { EhcacheActionProvider ehcacheActionProvider = new EhcacheActionProvider(cmConfig_0); - EhcacheWithLoaderWriter ehcache = mock(EhcacheWithLoaderWriter.class); - CacheRuntimeConfiguration cacheRuntimeConfiguration = mock(CacheRuntimeConfiguration.class); + @SuppressWarnings("unchecked") + EhcacheWithLoaderWriter ehcache = mock(EhcacheWithLoaderWriter.class); + @SuppressWarnings("unchecked") + CacheRuntimeConfiguration cacheRuntimeConfiguration = mock(CacheRuntimeConfiguration.class); when(cacheRuntimeConfiguration.getClassLoader()).thenReturn(ClassLoader.getSystemClassLoader()); when(cacheRuntimeConfiguration.getKeyType()).thenReturn(Long.class); when(ehcache.getRuntimeConfiguration()).thenReturn(cacheRuntimeConfiguration); @@ -201,8 +204,10 @@ public void testCallAction_noSuchMethodName() throws Exception { public void testCallAction_noSuchMethod() throws Exception { EhcacheActionProvider ehcacheActionProvider = new EhcacheActionProvider(cmConfig_0); - EhcacheWithLoaderWriter ehcache = mock(EhcacheWithLoaderWriter.class); - CacheRuntimeConfiguration cacheRuntimeConfiguration = mock(CacheRuntimeConfiguration.class); + @SuppressWarnings("unchecked") + EhcacheWithLoaderWriter ehcache = mock(EhcacheWithLoaderWriter.class); + @SuppressWarnings("unchecked") + CacheRuntimeConfiguration cacheRuntimeConfiguration = mock(CacheRuntimeConfiguration.class); when(cacheRuntimeConfiguration.getClassLoader()).thenReturn(ClassLoader.getSystemClassLoader()); when(ehcache.getRuntimeConfiguration()).thenReturn(cacheRuntimeConfiguration); ehcacheActionProvider.register(new CacheBinding("cache-0", ehcache)); diff --git a/management/src/test/java/org/ehcache/management/providers/settings/EhcacheSettingsProviderTest.java b/management/src/test/java/org/ehcache/management/providers/settings/EhcacheSettingsProviderTest.java index fc628dbdaa..a6cb1630a8 100644 --- a/management/src/test/java/org/ehcache/management/providers/settings/EhcacheSettingsProviderTest.java +++ b/management/src/test/java/org/ehcache/management/providers/settings/EhcacheSettingsProviderTest.java @@ -118,7 +118,7 @@ public void test_standalone_ehcache() throws IOException { } private Capability getSettingsCapability() { - for (Capability capability : sharedManagementService.getCapabilities().values().iterator().next()) { + for (Capability capability : sharedManagementService.getCapabilitiesByContext().values().iterator().next()) { if (capability.getName().equals("SettingsCapability")) { return capability; } diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProviderTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProviderTest.java index f119932510..a7229f0978 100644 --- a/management/src/test/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProviderTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProviderTest.java @@ -60,23 +60,24 @@ public void tearDown() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testDescriptions() throws Exception { EhcacheStatisticsProvider ehcacheStatisticsProvider = new EhcacheStatisticsProvider(cmConfig_0, executor) { @Override protected ExposedCacheBinding wrap(CacheBinding cacheBinding) { - EhcacheStatistics mock = mock(EhcacheStatistics.class); - Set descriptors = new HashSet(); + StandardEhcacheStatistics mock = mock(StandardEhcacheStatistics.class); + Collection descriptors = new HashSet(); descriptors.add(new StatisticDescriptor("aCounter", StatisticType.COUNTER)); descriptors.add(new StatisticDescriptor("aDuration", StatisticType.DURATION)); descriptors.add(new StatisticDescriptor("aSampledRate", StatisticType.RATE_HISTORY)); - when(mock.getDescriptors()).thenReturn(descriptors); + when(mock.getDescriptors()).thenReturn((Collection) descriptors); return mock; } }; ehcacheStatisticsProvider.register(new CacheBinding("cache-0", mock(EhcacheWithLoaderWriter.class))); - Collection descriptions = ehcacheStatisticsProvider.getDescriptors(); + Collection descriptions = ehcacheStatisticsProvider.getDescriptors(); assertThat(descriptions.size(), is(3)); assertThat(descriptions, (Matcher) containsInAnyOrder( new StatisticDescriptor("aCounter", StatisticType.COUNTER), @@ -90,7 +91,7 @@ public void testCapabilityContext() throws Exception { EhcacheStatisticsProvider ehcacheStatisticsProvider = new EhcacheStatisticsProvider(cmConfig_0, executor) { @Override protected ExposedCacheBinding wrap(CacheBinding cacheBinding) { - return mock(EhcacheStatistics.class); + return mock(StandardEhcacheStatistics.class); } }; diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java new file mode 100755 index 0000000000..bc8cb222ae --- /dev/null +++ b/management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java @@ -0,0 +1,195 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import static java.util.Arrays.asList; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.config.units.EntryUnit.ENTRIES; +import static org.ehcache.config.units.MemoryUnit.MB; +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.Builder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.Configuration; +import org.ehcache.config.ResourcePools; +import org.ehcache.core.EhcacheManager; +import org.ehcache.core.config.DefaultConfiguration; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryService; +import org.ehcache.spi.service.Service; +import org.hamcrest.CoreMatchers; +import org.hamcrest.Matchers; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.Timeout; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.model.stats.history.CounterHistory; + +@RunWith(Parameterized.class) +public class EvictionTest { + + @Parameterized.Parameters + public static Collection data() { + + char[] value = new char[1000000]; + Arrays.fill(value, 'x'); + + return asList(new Object[][] { + { newResourcePoolsBuilder().heap(1, ENTRIES), 2, Arrays.asList(1l), new String(value).getBytes(), Arrays.asList("OnHeap:EvictionCount")}, + { newResourcePoolsBuilder().offheap(1, MB), 2, Arrays.asList(1l), new String(value).getBytes(), Arrays.asList("OffHeap:EvictionCount")}, + { newResourcePoolsBuilder().heap(2, ENTRIES).offheap(1, MB), 3, Arrays.asList(0l,2l), new String(value).getBytes(), Arrays.asList("OnHeap:EvictionCount", "OffHeap:EvictionCount")}, + + //FAILS: org.ehcache.core.spi.store.StoreAccessException: The element with key '0' is too large to be stored in this offheap store. + //{ newResourcePoolsBuilder().disk(1, MB), 2, Arrays.asList(1l), new String(value).getBytes(), Arrays.asList("Disk:EvictionCount")}, + + //FAILS: org.ehcache.core.spi.store.StoreAccessException: The element with key '0' is too large to be stored in this offheap store. + //java.lang.IllegalStateException: No Store.Provider found to handle configured resource types [offheap, disk] from {org.ehcache.impl.internal.store.heap.OnHeapStore$Provider, org.ehcache.impl.internal.store.offheap.OffHeapStore$Provider, org.ehcache.impl.internal.store.disk.OffHeapDiskStore$Provider, org.ehcache.impl.internal.store.tiering.TieredStore$Provider, org.ehcache.clustered.client.internal.store.ClusteredStore$Provider} + //{ newResourcePoolsBuilder().offheap(1, MB).disk(2, MB), 3, Arrays.asList(0l,1l), new String(value).getBytes(), Arrays.asList("OffHeap:EvictionCount", "Disk:EvictionCount")}, + + //FAILS: Expects 1 eviction but it evicts twice. Value stored on disk must be > 1MB + //{ newResourcePoolsBuilder().heap(1, ENTRIES).offheap(1, MB).disk(2, MB), 3, Arrays.asList(0l,0l,1l), new String(value).getBytes(), Arrays.asList("OnHeap:EvictionCount","OffHeap:EvictionCount", "Disk:EvictionCount")}, + + //TODO need clustered tests: + //1. clustered + //2. offheap, clustered + //3. disk, clustered + //4. onheap, offheap, clustered (This is an invalid configuration) + }); + } + + private final ResourcePools resources; + private final int iterations; + private final List expected; //expectetd outcomes must be ordered from highest tier to lowest tier. e.g. OnHeap, OffHeap, Disk + private final byte[] value; + private final List stats; //must be ordered from highest tier to lowest tier. e.g. OnHeap, OffHeap, Disk + + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + + @Rule + public final Timeout globalTimeout = Timeout.seconds(60); + + public EvictionTest(Builder resources, int iterations, List expected, byte[] value, List stats) { + this.resources = resources.build(); + this.iterations = iterations; + this.expected = expected; + this.value = value; + this.stats = stats; + } + + @Test + public void test() throws IOException, InterruptedException { + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); + registryConfiguration.addConfiguration(new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES)); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + + Configuration config = new DefaultConfiguration(EvictionTest.class.getClassLoader(), + new DefaultPersistenceConfiguration(diskPath.newFolder())); + + Collection services = new ArrayList(); + services.add(managementRegistry); + + CacheManager cacheManager = null; + + try { + cacheManager = new EhcacheManager(config, services); + CacheConfiguration cacheConfig = newCacheConfigurationBuilder(Long.class, byte[].class, resources).build(); + + cacheManager.init(); + + Cache cache = cacheManager.createCache("myCache", cacheConfig); + + Context context = StatsUtil.createContext(managementRegistry); + + // we need to trigger first the stat computation with a first query + ContextualStatistics contextualStatistics = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(stats) + .on(context) + .build() + .execute() + .getSingleResult(); + assertThat(contextualStatistics.size(), Matchers.is(stats.size())); + + for(long i=0; i statNames; + private final List tierExpectedValues; + private final Long cacheExpectedValue; + + @Parameterized.Parameters + public static Collection data() { + return asList(new Object[][] { + //1 tier + { newResourcePoolsBuilder().heap(1, MB), singletonList("OnHeap:HitCount"), singletonList(CACHE_HIT_TOTAL), CACHE_HIT_TOTAL }, + { newResourcePoolsBuilder().offheap(1, MB), singletonList("OffHeap:HitCount"), singletonList(CACHE_HIT_TOTAL), CACHE_HIT_TOTAL }, + { newResourcePoolsBuilder().disk(1, MB), singletonList("Disk:HitCount"), singletonList(CACHE_HIT_TOTAL), CACHE_HIT_TOTAL }, + + //2 tiers + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), Arrays.asList("OnHeap:HitCount","OffHeap:HitCount"), Arrays.asList(2L,2L), CACHE_HIT_TOTAL}, + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), Arrays.asList("OnHeap:HitCount","Disk:HitCount"), Arrays.asList(2L,2L), CACHE_HIT_TOTAL}, + + //3 tiers + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB).disk(3, MB), Arrays.asList("OnHeap:HitCount","OffHeap:HitCount","Disk:HitCount"), Arrays.asList(2L,0L,2L), CACHE_HIT_TOTAL}, + { newResourcePoolsBuilder().heap(1, ENTRIES).offheap(2, MB).disk(3, MB), Arrays.asList("OnHeap:HitCount","OffHeap:HitCount","Disk:HitCount"), Arrays.asList(1L,1L,2L), CACHE_HIT_TOTAL}, + }); + } + + public HitCountTest(Builder resources, List statNames, List tierExpectedValues, Long cacheExpectedValue) { + this.resources = resources.build(); + this.statNames = statNames; + this.tierExpectedValues = tierExpectedValues; + this.cacheExpectedValue = cacheExpectedValue; + } + + @Test + public void test() throws InterruptedException, IOException { + + CacheManager cacheManager = null; + + try { + + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); + registryConfiguration.addConfiguration(new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES)); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources) + .withEvictionAdvisor(new EvictionAdvisor() { + @Override + public boolean adviseAgainstEviction(Long key, String value) { + return key.equals(2L); + } + }) + .build(); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("myCache", cacheConfiguration) + .using(managementRegistry) + .using(new DefaultPersistenceConfiguration(diskPath.newFolder())) + .build(true); + + Context context = StatsUtil.createContext(managementRegistry); + + StatsUtil.triggerStatComputation(managementRegistry, context, "Cache:HitCount", "OnHeap:HitCount", "OffHeap:HitCount", "Disk:HitCount"); + + Cache cache = cacheManager.getCache("myCache", Long.class, String.class); + + cache.put(1L, "1");//put in lowest tier + cache.put(2L, "2");//put in lowest tier + cache.put(3L, "3");//put in lowest tier + + + cache.get(1L);//HIT lowest tier + cache.get(2L);//HIT lowest tier + cache.get(2L);//HIT highest tier + + cache.get(1L);//HIT middle/highest tier. Depends on tier configuration. + + long tierHitCountSum = 0; + for (int i = 0; i < statNames.size(); i++) { + tierHitCountSum += StatsUtil.getAndAssertExpectedValueFromCounterHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); + } + + long cacheHitCount = StatsUtil.getAndAssertExpectedValueFromCounterHistory("Cache:HitCount", context, managementRegistry, cacheExpectedValue); + Assert.assertThat(tierHitCountSum, is(cacheHitCount)); + + } + finally { + if(cacheManager != null) { + cacheManager.close(); + } + } + } + +} diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/HitLatencyTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/HitLatencyTest.java new file mode 100755 index 0000000000..d03f2d6164 --- /dev/null +++ b/management/src/test/java/org/ehcache/management/providers/statistics/HitLatencyTest.java @@ -0,0 +1,165 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import static java.util.Arrays.asList; + +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.config.units.EntryUnit.ENTRIES; +import static org.ehcache.config.units.MemoryUnit.MB; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.Builder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryService; +import org.hamcrest.Matchers; +import org.junit.Assert; +import org.junit.Ignore; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.Timeout; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.terracotta.management.model.context.Context; + +@Ignore +@RunWith(Parameterized.class) +public class HitLatencyTest { + + @Rule + public final Timeout globalTimeout = Timeout.seconds(30); + + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + + private static final EhcacheStatisticsProviderConfiguration EHCACHE_STATISTICS_PROVIDER_CONFIG = new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.SECONDS,10,TimeUnit.MINUTES); + + private static final Long ITERATIONS = 10L; + private static final List HIT_LATENCY_MIN_STATS = Arrays.asList("OnHeap:HitLatencyMinimum","OffHeap:HitLatencyMinimum","Disk:HitLatencyMinimum"); + private static final List HIT_LATENCY_MAX_STATS = Arrays.asList("OnHeap:HitLatencyMaximum","OffHeap:HitLatencyMaximum","Disk:HitLatencyMaximum"); + private static final List HIT_LATENCY_AVG_STATS = Arrays.asList("OnHeap:HitLatencyAverage","OffHeap:HitLatencyAverage","Disk:HitLatencyAverage"); + + private final ResourcePools resources; + private final List hitLatencyMinStatNames; + private final List hitLatencyMaxStatNames; + private final List hitLatencyAvgStatNames; + + @Parameterized.Parameters + public static Collection data() { + + return asList(new Object[][] { + //1 tier + { newResourcePoolsBuilder().heap(1, MB), HIT_LATENCY_MIN_STATS.subList(0,1), HIT_LATENCY_MAX_STATS.subList(0,1), HIT_LATENCY_AVG_STATS.subList(0,1)}, + { newResourcePoolsBuilder().offheap(1, MB), HIT_LATENCY_MIN_STATS.subList(1,2), HIT_LATENCY_MAX_STATS.subList(1,2), HIT_LATENCY_AVG_STATS.subList(1,2)}, + { newResourcePoolsBuilder().disk(1, MB), HIT_LATENCY_MIN_STATS.subList(2,3), HIT_LATENCY_MAX_STATS.subList(2,3), HIT_LATENCY_AVG_STATS.subList(2,3)}, + + //2 tier + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), HIT_LATENCY_MIN_STATS.subList(0,2), HIT_LATENCY_MAX_STATS.subList(0,2), HIT_LATENCY_AVG_STATS.subList(0,2)}, + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), Arrays.asList(HIT_LATENCY_MIN_STATS.get(0),HIT_LATENCY_MIN_STATS.get(2)), Arrays.asList(HIT_LATENCY_MAX_STATS.get(0),HIT_LATENCY_MAX_STATS.get(2)), Arrays.asList(HIT_LATENCY_AVG_STATS.get(0),HIT_LATENCY_AVG_STATS.get(2))}, + //offheap and disk configuration is not valid. Throws IllegalStateException no Store.Provider found to handle configured resource types [offheap,disk] + + //3 tier + { newResourcePoolsBuilder().heap(1, ENTRIES).offheap(2, MB).disk(3, MB), HIT_LATENCY_MIN_STATS, HIT_LATENCY_MAX_STATS, HIT_LATENCY_AVG_STATS} + }); + } + + public HitLatencyTest(Builder resources, List hitLatencyMinStatNames, List hitLatencyMaxStatNames, List hitLatencyAvgStatNames) { + this.resources = resources.build(); + this.hitLatencyMinStatNames = hitLatencyMinStatNames; + this.hitLatencyMaxStatNames = hitLatencyMaxStatNames; + this.hitLatencyAvgStatNames = hitLatencyAvgStatNames; + } + + + @Test + public void test() throws IOException, InterruptedException { + CacheManager cacheManager = null; + + try { + + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); + registryConfiguration.addConfiguration(EHCACHE_STATISTICS_PROVIDER_CONFIG); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources).build(); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("myCache", cacheConfiguration) + .using(managementRegistry) + .using(new DefaultPersistenceConfiguration(diskPath.newFolder())) + .build(true); + + Cache cache = cacheManager.getCache("myCache", Long.class, String.class); + + Context context = StatsUtil.createContext(managementRegistry); + + StatsUtil.triggerStatComputation(managementRegistry, context, "Cache:HitLatencyMinimum","Cache:HitLatencyMaximum","Cache:HitLatencyAverage", + "OnHeap:HitLatencyMinimum","OnHeap:HitLatencyMaximum","OnHeap:HitLatencyAverage", + "OffHeap:HitLatencyMinimum","OffHeap:HitLatencyMaximum","OffHeap:HitLatencyAverage", + "Disk:HitLatencyMinimum","Disk:HitLatencyMaximum","Disk:HitLatencyAverage"); + + for (Long i = 0L; i < ITERATIONS; i++) { + cache.put(i, String.valueOf(i)); + } + + //HITS to lowest tier + for (Long i = 0L; i < ITERATIONS; i++) { + cache.get(i); + } + + //HITS, depends on tiering so can be any tier + for (Long i = ITERATIONS-1; i >= 0; i--) { + cache.get(i); + } + + + for (int i = 0; i < hitLatencyMinStatNames.size(); i++) { + double tierHitLatencyMin = StatsUtil.assertExpectedValueFromDurationHistory(hitLatencyMinStatNames.get(i), context, managementRegistry, 0L); + double tierHitLatencyMax = StatsUtil.assertExpectedValueFromDurationHistory(hitLatencyMaxStatNames.get(i), context, managementRegistry, 0L); + double tierHitLatencyAverage = StatsUtil.assertExpectedValueFromAverageHistory(hitLatencyAvgStatNames.get(i), context, managementRegistry); + Assert.assertThat(tierHitLatencyMin, Matchers.lessThanOrEqualTo(tierHitLatencyAverage)); + Assert.assertThat(tierHitLatencyMax, Matchers.greaterThanOrEqualTo(tierHitLatencyAverage)); + + } + + double cacheHitLatencyMinimum = StatsUtil.assertExpectedValueFromDurationHistory("Cache:HitLatencyMinimum", context, managementRegistry, 0L); + double cacheHitLatencyMaximum = StatsUtil.assertExpectedValueFromDurationHistory("Cache:HitLatencyMaximum", context, managementRegistry, 0L); + double cacheHitLatencyAverage = StatsUtil.assertExpectedValueFromAverageHistory("Cache:HitLatencyAverage", context, managementRegistry); + + Assert.assertThat(cacheHitLatencyMinimum, Matchers.lessThanOrEqualTo(cacheHitLatencyAverage)); + Assert.assertThat(cacheHitLatencyMaximum, Matchers.greaterThanOrEqualTo(cacheHitLatencyAverage)); + } + finally { + if(cacheManager != null) { + cacheManager.close(); + } + } + } +} diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/HitRateTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/HitRateTest.java new file mode 100755 index 0000000000..87c1b4cf7d --- /dev/null +++ b/management/src/test/java/org/ehcache/management/providers/statistics/HitRateTest.java @@ -0,0 +1,153 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import static java.util.Arrays.asList; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.config.units.MemoryUnit.MB; +import static org.ehcache.config.units.EntryUnit.ENTRIES; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.Builder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryService; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.Timeout; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.terracotta.management.model.context.Context; + +@RunWith(Parameterized.class) +public class HitRateTest { + + @Rule + public final Timeout globalTimeout = Timeout.seconds(10); + + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + + private static final EhcacheStatisticsProviderConfiguration EHCACHE_STATISTICS_PROVIDER_CONFIG = new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.SECONDS,10,TimeUnit.MINUTES); + private static final double CACHE_HIT_RATE = 5.0d / (double)TimeUnit.MINUTES.toSeconds(EHCACHE_STATISTICS_PROVIDER_CONFIG.averageWindowDuration()); + + private final ResourcePools resources; + private final List statNames; + private final List tierExpectedValues; + + @Parameterized.Parameters + public static Collection data() { + + double seconds = (double)TimeUnit.MINUTES.toSeconds(EHCACHE_STATISTICS_PROVIDER_CONFIG.averageWindowDuration()); + + return asList(new Object[][] { + //1 tier + { newResourcePoolsBuilder().heap(1, MB), Arrays.asList("OnHeap:HitRate"), Arrays.asList(CACHE_HIT_RATE)}, + { newResourcePoolsBuilder().offheap(1, MB), Arrays.asList("OffHeap:HitRate"), Arrays.asList(CACHE_HIT_RATE) }, + { newResourcePoolsBuilder().disk(1, MB), Arrays.asList("Disk:HitRate"), Arrays.asList(CACHE_HIT_RATE) }, + + //2 tier + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), Arrays.asList("OnHeap:HitRate","OffHeap:HitRate"), Arrays.asList(2d/seconds,3d/seconds)}, + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), Arrays.asList("OnHeap:HitRate","Disk:HitRate"), Arrays.asList(2d/seconds,3d/seconds)}, + //offheap and disk configuration is not valid. Throws IllegalStateException no Store.Provider found to handle configured resource types [offheap,disk] + + //3 tier + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB).disk(3, MB), Arrays.asList("OnHeap:HitRate","OffHeap:HitRate","Disk:HitRate"), Arrays.asList(2d/seconds,0d,3d/seconds)}, + { newResourcePoolsBuilder().heap(1, ENTRIES).offheap(2, MB).disk(3, MB), Arrays.asList("OnHeap:HitRate","OffHeap:HitRate","Disk:HitRate"), Arrays.asList(1d/seconds,1d/seconds,3d/seconds)}, + }); + } + + public HitRateTest(Builder resources, List statNames, List tierExpectedValues) { + this.resources = resources.build(); + this.statNames = statNames; + this.tierExpectedValues = tierExpectedValues; + } + + @Test + public void test() throws InterruptedException, IOException { + + CacheManager cacheManager = null; + + try { + + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); + registryConfiguration.addConfiguration(EHCACHE_STATISTICS_PROVIDER_CONFIG); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources) + .withEvictionAdvisor(new EvictionAdvisor() { + @Override + public boolean adviseAgainstEviction(Long key, String value) { + return key.equals(1L); + } + }) + .build(); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("myCache", cacheConfiguration) + .using(managementRegistry) + .using(new DefaultPersistenceConfiguration(diskPath.newFolder())) + .build(true); + + Cache cache = cacheManager.getCache("myCache", Long.class, String.class); + + Context context = StatsUtil.createContext(managementRegistry); + + StatsUtil.triggerStatComputation(managementRegistry, context, "OnHeap:HitRate","OffHeap:HitRate","Disk:HitRate","Cache:HitRate"); + + //Put values in cache + cache.put(1L, "one"); + cache.put(2L, "two"); + cache.put(3L, "three"); + + cache.get(1L);//HIT lowest tier + cache.get(2L);//HIT lowest tier + cache.get(3L);//HIT lowest tier + + cache.get(1L);//HIT higher tier + cache.get(2L);//HIT middle/highest tier (depends on number of tiers) + + //TIER stats + for (int i = 0; i < statNames.size(); i++) { + StatsUtil.getAndAssertExpectedValueFromRateHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); + } + + //CACHE stats + StatsUtil.getAndAssertExpectedValueFromRateHistory("Cache:HitRate", context, managementRegistry, CACHE_HIT_RATE); + + } + finally { + if(cacheManager != null) { + cacheManager.close(); + } + } + } + +} diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java new file mode 100755 index 0000000000..b816952db6 --- /dev/null +++ b/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java @@ -0,0 +1,180 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.Builder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryService; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.Timeout; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.terracotta.management.model.context.Context; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import static java.util.Arrays.asList; +import static java.util.Collections.singletonList; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.config.units.EntryUnit.ENTRIES; +import static org.ehcache.config.units.MemoryUnit.MB; + +@RunWith(Parameterized.class) +public class HitRatioTest { + + @Rule + public final Timeout globalTimeout = Timeout.seconds(60); + + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + + private final ResourcePools resources; + private final List statNames; + private final List tierExpectedValues; + private final List getKeys; + private final Double cacheExpectedValue; + + @Parameterized.Parameters + public static Collection data() { + + List statNamesOnHeap = singletonList("OnHeap:HitRatio"); + List statNamesOffHeap = singletonList("OffHeap:HitRatio"); + List statNamesDisk = singletonList("Disk:HitRatio"); + List statNamesOnHeapOffHeap = Arrays.asList("OnHeap:HitRatio","OffHeap:HitRatio"); + List statNamesOnHeapDisk = Arrays.asList("OnHeap:HitRatio","Disk:HitRatio"); + List statNamesThreeTiers = Arrays.asList("OnHeap:HitRatio","OffHeap:HitRatio","Disk:HitRatio"); + + return asList(new Object[][] { + //1 tier + { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(1L, 2L, 3L) , singletonList(1d), 1d }, //3 hits, 0 misses + { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(1L, 2L, 4L, 5L) , singletonList(.5d), .5d }, //2 hits, 2 misses + { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(4L, 5L) , singletonList(0d), 0d }, //0 hits, 2 misses + + { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(1L, 2L, 3L), singletonList(1d), 1d }, //3 hits, 0 misses + { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(1L, 2L, 4L, 5L) , singletonList(.5d), .5d }, //2 hits, 2 misses + { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(4L, 5L) , singletonList(0d), 0d }, //0 hits, 2 misses + + { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(1L, 2L, 3L) , singletonList(1d), 1d }, //3 hits, 0 misses + { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(1L, 2L, 4L, 5L) , singletonList(.5d), .5d }, //2 hits, 2 misses + { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(4L, 5L) , singletonList(0d), 0d }, //0 hits, 2 misses + + //2 tiers + + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(1L, 2L, 3L) , Arrays.asList(0d,1d), 1d }, //3 offheap hits, 0 misses + + /* + explanation of ratio calc: + + Each get checks the heap first. For every get there is a hit/miss on the heap tier. This test checks the heap 4 times. + The first 3 gets are misses and the last get is a hit. + Thus heapHitRatio = 1 hit / 4 attempts = .25 + + If the get key is not in the heap then it checks the tier below. In this case it checks offheap. + This test checks the offheap tier on the first 3 gets, and finds the key on each check. So there are 3 hits. + Thus offHeapHitRatio = 3 hits / 3 attempts = 1 + */ + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(1L, 2L, 3L,1L) , Arrays.asList(.25d,1d), 1d },//3 offheap hits, 1 heap hit, 0 misses + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(1L, 2L, 4L, 5L) , Arrays.asList(0d,.5), .5d }, //2 offheap hits, 2 misses + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(4L, 5L) , Arrays.asList(0d,0d), 0d }, //0 hits, 2 misses + + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(1L, 2L, 3L,1L) , Arrays.asList(.25d,1d), 1d }, //3 disk hits, 1 heap hit, 0 misses + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(1L, 2L, 4L, 5L) , Arrays.asList(0d,.5), .5d }, //2 disk hits, 2 misses + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(4L, 5L) , Arrays.asList(0d,0d), 0d }, //0 hits, 2 misses + + //3 tiers + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB).disk(3, MB), statNamesThreeTiers, Arrays.asList(1L, 2L, 3L,1L) , Arrays.asList(.25d,0d,1d), 1d }, //3 disk hits, 1 heap hit, 0 misses + { newResourcePoolsBuilder().heap(1, ENTRIES).offheap(2, MB).disk(3, MB), statNamesThreeTiers, Arrays.asList(1L, 2L,2L,1L), Arrays.asList(.25d,(1d / 3d),1d), 1d},//3 disk hits, 1 offheap hit, 1 heap hit, 0 misses + }); + } + + public HitRatioTest(Builder resources, List statNames, List getKeys, List tierExpectedValues, Double cacheExpectedValue) { + this.resources = resources.build(); + this.statNames = statNames; + this.getKeys = getKeys; + this.tierExpectedValues = tierExpectedValues; + this.cacheExpectedValue = cacheExpectedValue; + } + + @Test + public void test() throws InterruptedException, IOException { + + CacheManager cacheManager = null; + + try { + + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); + registryConfiguration.addConfiguration(new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.SECONDS,10,TimeUnit.MINUTES)); + final ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources) + .withEvictionAdvisor(new EvictionAdvisor() { + @Override + public boolean adviseAgainstEviction(Long key, String value) { + return key.equals(2L); + } + }) + .build(); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("myCache", cacheConfiguration) + .using(managementRegistry) + .using(new DefaultPersistenceConfiguration(diskPath.newFolder())) + .build(true); + + final Context context = StatsUtil.createContext(managementRegistry); + + StatsUtil.triggerStatComputation(managementRegistry, context, "Cache:HitRatio", "OnHeap:HitRatio", "OffHeap:HitRatio", "Disk:HitRatio"); + + Cache cache = cacheManager.getCache("myCache", Long.class, String.class); + + cache.put(1L, "1");//put in lowest tier + cache.put(2L, "2");//put in lowest tier + cache.put(3L, "3");//put in lowest tier + + for(Long key : getKeys) { + cache.get(key); + } + + for (int i = 0; i < statNames.size(); i++) { + StatsUtil.assertExpectedValueFromRatioHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); + } + + StatsUtil.assertExpectedValueFromRatioHistory("Cache:HitRatio", context, managementRegistry, cacheExpectedValue); + + } + finally { + if(cacheManager != null) { + cacheManager.close(); + } + } + } +} diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java new file mode 100755 index 0000000000..9606d08840 --- /dev/null +++ b/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java @@ -0,0 +1,137 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import static java.util.Arrays.asList; +import static java.util.Collections.singletonList; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.config.units.MemoryUnit.MB; +import static org.hamcrest.CoreMatchers.is; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.Builder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryService; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.Timeout; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.terracotta.management.model.context.Context; + +@RunWith(Parameterized.class) +public class MissCountTest { + + @Rule + public final Timeout globalTimeout = Timeout.seconds(60); + + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + + private final ResourcePools resources; + private final List statNames; + private final List tierExpectedValues; + private final Long cacheExpectedValue; + + @Parameterized.Parameters + public static Collection data() { + return asList(new Object[][] { + //1 tier + { newResourcePoolsBuilder().heap(1, MB), singletonList("OnHeap:MissCount"), singletonList(2L), 2L }, + { newResourcePoolsBuilder().offheap(1, MB), singletonList("OffHeap:MissCount"), singletonList(2L), 2L }, + { newResourcePoolsBuilder().disk(1, MB), singletonList("Disk:MissCount"), singletonList(2L), 2L }, + + //2 tiers + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), Arrays.asList("OnHeap:MissCount","OffHeap:MissCount"), Arrays.asList(2L,2L), 2L}, + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), Arrays.asList("OnHeap:MissCount","Disk:MissCount"), Arrays.asList(2L,2L), 2L}, + //offheap and disk configuration below is not valid. Throws IllegalStateException no Store.Provider found to handle configured resource types [offheap,disk] + + //3 tiers + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB).disk(3, MB), Arrays.asList("OnHeap:MissCount","OffHeap:MissCount","Disk:MissCount"), Arrays.asList(2L,2L,2L), 2L} + }); + } + + public MissCountTest(Builder resources, List statNames, List tierExpectedValues, Long cacheExpectedValue) { + this.resources = resources.build(); + this.statNames = statNames; + this.tierExpectedValues = tierExpectedValues; + this.cacheExpectedValue = cacheExpectedValue; + } + + @Test + public void test() throws InterruptedException, IOException { + + CacheManager cacheManager = null; + + try { + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); + registryConfiguration.addConfiguration(new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES)); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources).build(); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("myCache", cacheConfiguration) + .using(managementRegistry) + .using(new DefaultPersistenceConfiguration(diskPath.newFolder())) + .build(true); + + Context context = StatsUtil.createContext(managementRegistry); + + StatsUtil.triggerStatComputation(managementRegistry, context, "Cache:MissCount", "OnHeap:MissCount", "OffHeap:MissCount", "Disk:MissCount"); + + Cache cache = cacheManager.getCache("myCache", Long.class, String.class); + + cache.put(1L, "1");//put in lowest tier + cache.put(2L, "2");//put in lowest tier + cache.put(3L, "3");//put in lowest tier + + cache.get(4L);//MISS + cache.get(5L);//MISS + + long tierMissCountSum = 0; + for (int i = 0; i < statNames.size(); i++) { + tierMissCountSum += StatsUtil.getAndAssertExpectedValueFromCounterHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); + } + + long cacheMissCount = StatsUtil.getAndAssertExpectedValueFromCounterHistory("Cache:MissCount", context, managementRegistry, cacheExpectedValue); + //A cache.get() checks every tier, so there is one miss per tier. However the cache miss count only counts 1 miss regardless of the number of tiers. + Assert.assertThat(tierMissCountSum/statNames.size(), is(cacheMissCount)); + + } + finally { + if(cacheManager != null) { + cacheManager.close(); + } + } + } + +} diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/MissLatencyTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/MissLatencyTest.java new file mode 100755 index 0000000000..ecfb7ec7ef --- /dev/null +++ b/management/src/test/java/org/ehcache/management/providers/statistics/MissLatencyTest.java @@ -0,0 +1,158 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import static java.util.Arrays.asList; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.config.units.MemoryUnit.MB; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.Builder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryService; +import org.hamcrest.Matchers; +import org.junit.Assert; +import org.junit.Ignore; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.Timeout; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.terracotta.management.model.context.Context; + +@Ignore +@RunWith(Parameterized.class) +public class MissLatencyTest { + + @Rule + public final Timeout globalTimeout = Timeout.seconds(60); + + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + + private static final EhcacheStatisticsProviderConfiguration EHCACHE_STATISTICS_PROVIDER_CONFIG = new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.SECONDS,10,TimeUnit.MINUTES); + + private static final Long ITERATIONS = 10L; + private static final List MISS_LATENCY_MIN_STATS = Arrays.asList("OnHeap:MissLatencyMinimum","OffHeap:MissLatencyMinimum","Disk:MissLatencyMinimum"); + private static final List MISS_LATENCY_MAX_STATS = Arrays.asList("OnHeap:MissLatencyMaximum","OffHeap:MissLatencyMaximum","Disk:MissLatencyMaximum"); + private static final List MISS_LATENCY_AVG_STATS = Arrays.asList("OnHeap:MissLatencyAverage","OffHeap:MissLatencyAverage","Disk:MissLatencyAverage"); + + private final ResourcePools resources; + private final List missLatencyMinStatNames; + private final List missLatencyMaxStatNames; + private final List missLatencyAvgStatNames; + + @Parameterized.Parameters + public static Collection data() { + + return asList(new Object[][] { + //1 tier + { newResourcePoolsBuilder().heap(1, MB), MISS_LATENCY_MIN_STATS.subList(0,1), MISS_LATENCY_MAX_STATS.subList(0,1), MISS_LATENCY_AVG_STATS.subList(0,1)}, + { newResourcePoolsBuilder().offheap(1, MB), MISS_LATENCY_MIN_STATS.subList(1,2), MISS_LATENCY_MAX_STATS.subList(1,2), MISS_LATENCY_AVG_STATS.subList(1,2)}, + { newResourcePoolsBuilder().disk(1, MB), MISS_LATENCY_MIN_STATS.subList(2,3), MISS_LATENCY_MAX_STATS.subList(2,3), MISS_LATENCY_AVG_STATS.subList(2,3)}, + + //2 tier + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), MISS_LATENCY_MIN_STATS.subList(0,2), MISS_LATENCY_MAX_STATS.subList(0,2), MISS_LATENCY_AVG_STATS.subList(0,2)}, + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), Arrays.asList(MISS_LATENCY_MIN_STATS.get(0),MISS_LATENCY_MIN_STATS.get(2)), Arrays.asList(MISS_LATENCY_MAX_STATS.get(0),MISS_LATENCY_MAX_STATS.get(2)), Arrays.asList(MISS_LATENCY_AVG_STATS.get(0),MISS_LATENCY_AVG_STATS.get(2))}, + //offheap and disk configuration is not valid. Throws IllegalStateException no Store.Provider found to handle configured resource types [offheap,disk] + + //3 tier + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB).disk(3, MB), MISS_LATENCY_MIN_STATS, MISS_LATENCY_MAX_STATS, MISS_LATENCY_AVG_STATS} + }); + } + + public MissLatencyTest(Builder resources, List missLatencyMinStatNames, List missLatencyMaxStatNames, List missLatencyAvgStatNames) { + this.resources = resources.build(); + this.missLatencyMinStatNames = missLatencyMinStatNames; + this.missLatencyMaxStatNames = missLatencyMaxStatNames; + this.missLatencyAvgStatNames = missLatencyAvgStatNames; + } + + @Test + public void test() throws InterruptedException, IOException { + CacheManager cacheManager = null; + + try { + + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); + registryConfiguration.addConfiguration(EHCACHE_STATISTICS_PROVIDER_CONFIG); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources).build(); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("myCache", cacheConfiguration) + .using(managementRegistry) + .using(new DefaultPersistenceConfiguration(diskPath.newFolder())) + .build(true); + + + Cache cache = cacheManager.getCache("myCache", Long.class, String.class); + + Context context = StatsUtil.createContext(managementRegistry); + + StatsUtil.triggerStatComputation(managementRegistry, context, "Cache:MissLatencyMinimum","Cache:MissLatencyMaximum","Cache:MissLatencyAverage", + "OnHeap:MissLatencyMinimum","OnHeap:MissLatencyMaximum","OnHeap:MissLatencyAverage", + "OffHeap:MissLatencyMinimum","OffHeap:MissLatencyMaximum","OffHeap:MissLatencyAverage", + "Disk:MissLatencyMinimum","Disk:MissLatencyMaximum","Disk:MissLatencyAverage"); + + for (Long i = 0L; i < ITERATIONS; i++) { + cache.put(i, String.valueOf(i)); + } + + //MISS + for (Long i = ITERATIONS; i < (2*ITERATIONS); i++) { + cache.get(i); + } + + for (int i = 0; i < missLatencyMinStatNames.size(); i++) { + double tierMissLatencyMin = StatsUtil.assertExpectedValueFromDurationHistory(missLatencyMinStatNames.get(i), context, managementRegistry, 0L); + double tierMissLatencyMax = StatsUtil.assertExpectedValueFromDurationHistory(missLatencyMaxStatNames.get(i), context, managementRegistry, 0L); + double tierMissLatencyAverage = StatsUtil.assertExpectedValueFromAverageHistory(missLatencyAvgStatNames.get(i), context, managementRegistry); + Assert.assertThat(tierMissLatencyMin, Matchers.lessThanOrEqualTo(tierMissLatencyAverage)); + Assert.assertThat(tierMissLatencyMax, Matchers.greaterThanOrEqualTo(tierMissLatencyAverage)); + + } + + double cacheMissLatencyMinimum = StatsUtil.assertExpectedValueFromDurationHistory("Cache:MissLatencyMinimum", context, managementRegistry, 0L); + double cacheMissLatencyMaximum = StatsUtil.assertExpectedValueFromDurationHistory("Cache:MissLatencyMaximum", context, managementRegistry, 0L); + double cacheMissLatencyAverage = StatsUtil.assertExpectedValueFromAverageHistory("Cache:MissLatencyAverage", context, managementRegistry); + + Assert.assertThat(cacheMissLatencyMinimum, Matchers.lessThanOrEqualTo(cacheMissLatencyAverage)); + Assert.assertThat(cacheMissLatencyMaximum, Matchers.greaterThanOrEqualTo(cacheMissLatencyAverage)); + + } + finally { + if(cacheManager != null) { + cacheManager.close(); + } + } + } +} diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/MissRateTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/MissRateTest.java new file mode 100755 index 0000000000..24d149cd18 --- /dev/null +++ b/management/src/test/java/org/ehcache/management/providers/statistics/MissRateTest.java @@ -0,0 +1,141 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import static java.util.Arrays.asList; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.config.units.MemoryUnit.MB; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.Builder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryService; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.Timeout; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.terracotta.management.model.context.Context; + + +@RunWith(Parameterized.class) +public class MissRateTest { + + @Rule + public final Timeout globalTimeout = Timeout.seconds(10); + + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + + private static final EhcacheStatisticsProviderConfiguration EHCACHE_STATISTICS_PROVIDER_CONFIG = new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.SECONDS,10,TimeUnit.MINUTES); + private static final double CACHE_MISS_RATE = 3.0d / (double)TimeUnit.MINUTES.toSeconds(EHCACHE_STATISTICS_PROVIDER_CONFIG.averageWindowDuration()); + + private final ResourcePools resources; + private final List statNames; + private final List tierExpectedValues; + + + @Parameterized.Parameters + public static Collection data() { + + double seconds = (double)TimeUnit.MINUTES.toSeconds(EHCACHE_STATISTICS_PROVIDER_CONFIG.averageWindowDuration()); + + return asList(new Object[][] { + //1 tier + { newResourcePoolsBuilder().heap(1, MB), Arrays.asList("OnHeap:MissRate"), Arrays.asList(CACHE_MISS_RATE)}, + { newResourcePoolsBuilder().offheap(1, MB), Arrays.asList("OffHeap:MissRate"), Arrays.asList(CACHE_MISS_RATE) }, + { newResourcePoolsBuilder().disk(1, MB), Arrays.asList("Disk:MissRate"), Arrays.asList(CACHE_MISS_RATE) }, + + //2 tier + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), Arrays.asList("OnHeap:MissRate","OffHeap:MissRate"), Arrays.asList(3d/seconds,3d/seconds)}, + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), Arrays.asList("OnHeap:MissRate","Disk:MissRate"), Arrays.asList(3d/seconds,3d/seconds)}, + //offheap and disk configuration is not valid. Throws IllegalStateException no Store.Provider found to handle configured resource types [offheap,disk] + + //3 tier + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB).disk(3, MB), Arrays.asList("OnHeap:MissRate","OffHeap:MissRate","Disk:MissRate"), Arrays.asList(3d/seconds,3d/seconds,3d/seconds)}, + }); + } + + public MissRateTest(Builder resources, List statNames, List tierExpectedValues) { + this.resources = resources.build(); + this.statNames = statNames; + this.tierExpectedValues = tierExpectedValues; + } + + @Test + public void test() throws InterruptedException, IOException { + + CacheManager cacheManager = null; + + try { + + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); + registryConfiguration.addConfiguration(EHCACHE_STATISTICS_PROVIDER_CONFIG); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources).build(); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("myCache", cacheConfiguration) + .using(managementRegistry) + .using(new DefaultPersistenceConfiguration(diskPath.newFolder())) + .build(true); + + Cache cache = cacheManager.getCache("myCache", Long.class, String.class); + + Context context = StatsUtil.createContext(managementRegistry); + + StatsUtil.triggerStatComputation(managementRegistry, context, "OnHeap:MissRate","OffHeap:MissRate","Disk:MissRate","Cache:MissRate"); + + //Put values in cache + cache.put(1L, "one"); + cache.put(2L, "two"); + cache.put(3L, "three"); + + cache.get(4L);//MISS + cache.get(5L);//MISS + cache.get(6L);//MISS + + //TIER stats + for (int i = 0; i < statNames.size(); i++) { + StatsUtil.getAndAssertExpectedValueFromRateHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); + } + + //CACHE stats + StatsUtil.getAndAssertExpectedValueFromRateHistory("Cache:MissRate", context, managementRegistry, CACHE_MISS_RATE); + + } + finally { + if(cacheManager != null) { + cacheManager.close(); + } + } + } +} diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java new file mode 100755 index 0000000000..c321acdc3d --- /dev/null +++ b/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java @@ -0,0 +1,169 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import static java.util.Arrays.asList; +import static java.util.Collections.singletonList; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.config.units.EntryUnit.ENTRIES; +import static org.ehcache.config.units.MemoryUnit.MB; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.Builder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryService; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.Timeout; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.terracotta.management.model.context.Context; + +@RunWith(Parameterized.class) +public class MissRatioTest { + + @Rule + public final Timeout globalTimeout = Timeout.seconds(60); + + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + + private final ResourcePools resources; + private final List statNames; + private final List tierExpectedValues; + private final List getKeys; + private final Double cacheExpectedValue; + + @Parameterized.Parameters + public static Collection data() { + + List statNamesOnHeap = singletonList("OnHeap:MissRatio"); + List statNamesOffHeap = singletonList("OffHeap:MissRatio"); + List statNamesDisk = singletonList("Disk:MissRatio"); + List statNamesOnHeapOffHeap = Arrays.asList("OnHeap:MissRatio","OffHeap:MissRatio"); + List statNamesOnHeapDisk = Arrays.asList("OnHeap:MissRatio","Disk:MissRatio"); + List statNamesThreeTiers = Arrays.asList("OnHeap:MissRatio","OffHeap:MissRatio","Disk:MissRatio"); + + return asList(new Object[][] { + //1 tier + { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(1L, 2L, 3L) , singletonList(0d), 0d }, //0 misses, 3 hits + { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(1L, 2L, 4L, 5L) , singletonList(.5d), .5d }, //2 misses, 2 hits + { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(4L, 5L) , singletonList(1d), 1d }, //0 hits, 2 misses + + { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(1L, 2L, 3L), singletonList(0d), 0d }, //0 misses, 3 hits + { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(1L, 2L, 4L, 5L) , singletonList(.5d), .5d }, //2 misses, 2 hits + { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(4L, 5L) , singletonList(1d), 1d }, //2 misses, 0 hits + + { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(1L, 2L, 3L) , singletonList(0d), 0d }, //0 misses, 3 hits + { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(1L, 2L, 4L, 5L) , singletonList(.5d), .5d }, //2 misses, 2 hits + { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(4L, 5L) , singletonList(1d), 1d }, //2 misses, 0 hits + + //2 tiers + + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(1L, 2L, 3L) , Arrays.asList(1d,0d), 0d }, //3 heap misses, 0 offheap misses, 3 hits + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(1L, 2L, 3L,4L) , Arrays.asList(1d,.25d), .25d },//4 heap misses, 1 offheap miss, 3 hits + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(4L,5L) , Arrays.asList(1d,1d), 1d }, //2 heap misses, 2 offheap misses, 0 hits + + + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(1L, 2L, 3L) , Arrays.asList(1d,0d), 0d }, //3 heap misses, 0 disk misses, 3 hits + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(1L, 2L, 3L,4L) , Arrays.asList(1d,.25d), .25d },//4 heap misses, 1 disk miss, 3 hits + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(4L,5L) , Arrays.asList(1d,1d), 1d }, //2 heap misses, 2 disk misses, 0 hits + + //3 tiers + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB).disk(3, MB), statNamesThreeTiers, Arrays.asList(1L, 2L, 3L,1L) , Arrays.asList(.75d,1d,0d), 0d }, //3 heap misses, 3 offheap misses, 0 disk misses, 4 hits + { newResourcePoolsBuilder().heap(1, ENTRIES).offheap(2, MB).disk(3, MB), statNamesThreeTiers, Arrays.asList(1L, 2L,2L,4L), Arrays.asList(.75d,1d, 1d / 3d), 1d / 4d},//3 heap misses, 3 offheap misses, 1 disk miss, 3 hits + }); + + } + + public MissRatioTest(Builder resources, List statNames, List getKeys, List tierExpectedValues, Double cacheExpectedValue) { + this.resources = resources.build(); + this.statNames = statNames; + this.getKeys = getKeys; + this.tierExpectedValues = tierExpectedValues; + this.cacheExpectedValue = cacheExpectedValue; + } + + @Test + public void test() throws InterruptedException, IOException { + + CacheManager cacheManager = null; + + try { + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); + registryConfiguration.addConfiguration(new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.SECONDS,10,TimeUnit.MINUTES)); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources) + .withEvictionAdvisor(new EvictionAdvisor() { + @Override + public boolean adviseAgainstEviction(Long key, String value) { + return key.equals(2L); + } + }) + .build(); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("myCache", cacheConfiguration) + .using(managementRegistry) + .using(new DefaultPersistenceConfiguration(diskPath.newFolder())) + .build(true); + + Context context = StatsUtil.createContext(managementRegistry); + + StatsUtil.triggerStatComputation(managementRegistry, context, "Cache:MissRatio", "OnHeap:MissRatio", "OffHeap:MissRatio", "Disk:MissRatio"); + + Cache cache = cacheManager.getCache("myCache", Long.class, String.class); + + cache.put(1L, "1");//put in lowest tier + cache.put(2L, "2");//put in lowest tier + cache.put(3L, "3");//put in lowest tier + + for(Long key : getKeys) { + cache.get(key); + } + + for (int i = 0; i < statNames.size(); i++) { + StatsUtil.assertExpectedValueFromRatioHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); + } + + StatsUtil.assertExpectedValueFromRatioHistory("Cache:MissRatio", context, managementRegistry, cacheExpectedValue); + + } + finally { + if(cacheManager != null) { + cacheManager.close(); + } + } + } + +} diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java new file mode 100755 index 0000000000..784d9a71d3 --- /dev/null +++ b/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java @@ -0,0 +1,105 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import java.util.Arrays; +import java.util.concurrent.TimeUnit; +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryService; +import org.hamcrest.Matchers; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.Timeout; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.model.stats.history.CounterHistory; + +import static org.junit.Assert.assertThat; + +public class StandardEhcacheStatisticsTest { + + private final EhcacheStatisticsProviderConfiguration EHCACHE_STATS_CONFIG = new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES); + + @Rule + public final Timeout globalTimeout = Timeout.seconds(10); + + @Test + public void statsClearCacheTest() throws InterruptedException { + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(1, EntryUnit.ENTRIES).offheap(10, MemoryUnit.MB)) + .build(); + + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager3"); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + registryConfiguration.addConfiguration(EHCACHE_STATS_CONFIG); + + CacheManager cacheManager = null; + + try { + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("cCache", cacheConfiguration) + .using(managementRegistry) + .build(true); + + Cache aCache = cacheManager.getCache("cCache", Long.class, String.class); + aCache.put(1L, "one"); + Assert.assertTrue(aCache.containsKey(1L)); + aCache.clear(); + Assert.assertFalse(aCache.iterator().hasNext()); + + aCache.put(1L, "one"); + Assert.assertTrue(aCache.containsKey(1L)); + aCache.clear(); + Assert.assertFalse(aCache.iterator().hasNext()); + + Thread.sleep(1000); + + Context context = StatsUtil.createContext(managementRegistry); + + CounterHistory cache_Clear_Count; + do { + ContextualStatistics clearCounter = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(Arrays.asList("Cache:ClearCount")) + .on(context) + .build() + .execute() + .getSingleResult(); + + assertThat(clearCounter.size(), Matchers.is(1)); + cache_Clear_Count = clearCounter.getStatistic(CounterHistory.class, "Cache:ClearCount"); + } while(!Thread.currentThread().isInterrupted() && !StatsUtil.isHistoryReady(cache_Clear_Count, 0L)); + + int mostRecentIndex = cache_Clear_Count.getValue().length - 1; + assertThat(cache_Clear_Count.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(2L)); + } + finally { + if(cacheManager != null) { + cacheManager.close(); + } + } + } +} diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java b/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java new file mode 100755 index 0000000000..ba5c6bdd6a --- /dev/null +++ b/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java @@ -0,0 +1,332 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import static java.util.Collections.singletonList; +import static org.junit.Assert.assertThat; + +import org.ehcache.management.ManagementRegistryService; +import org.hamcrest.Matchers; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.model.context.ContextContainer; +import org.terracotta.management.model.stats.AbstractStatisticHistory; +import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.model.stats.Statistic; +import org.terracotta.management.model.stats.StatisticHistory; +import org.terracotta.management.model.stats.history.AverageHistory; +import org.terracotta.management.model.stats.history.CounterHistory; +import org.terracotta.management.model.stats.history.DurationHistory; +import org.terracotta.management.model.stats.history.RateHistory; +import org.terracotta.management.model.stats.history.RatioHistory; +import org.terracotta.management.registry.ResultSet; +import org.terracotta.management.registry.StatisticQuery; +import org.junit.Assert; +import java.util.Arrays; +import java.util.Map; + +public class StatsUtil { + + public static boolean isHistoryReady(AbstractStatisticHistory counterHistory) { + if (counterHistory.getValue().length > 0) { + return true; + } + return false; + } + + public static boolean isHistoryReady(AbstractStatisticHistory history, Long defaultValue) { + if (history.getValue().length > 0) { + int mostRecentIndex = history.getValue().length - 1; + if ((Long) history.getValue()[mostRecentIndex].getValue() > defaultValue) { + return true; + } + } + return false; + } + + public static Context createContext(ManagementRegistryService managementRegistry) { + ContextContainer cacheManagerCtx = managementRegistry.getContextContainer(); + ContextContainer firstCacheCtx = cacheManagerCtx.getSubContexts().iterator().next(); + return Context.empty() + .with(cacheManagerCtx.getName(), cacheManagerCtx.getValue()) + .with(firstCacheCtx.getName(), firstCacheCtx.getValue()); + } + + /* + NOTE: When using this method in other unit tests, make sure to declare a timeout as it is possible to get an infinite loop. + This should only occur if the stats value is different from your expectedResult, which may happen if the stats calculations + change, the stats value isn't accessible or if you enter the wrong expectedResult. + */ + public static long getAndAssertExpectedValueFromCounterHistory(String statName, Context context, ManagementRegistryService managementRegistry, long expectedResult) { + + StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(singletonList(statName)) + .on(context) + .build(); + + long value = 0; + do { + ResultSet counters = query.execute(); + + ContextualStatistics statisticsContext = counters.getResult(context); + + assertThat(counters.size(), Matchers.is(1)); + + CounterHistory counterHistory = statisticsContext.getStatistic(CounterHistory.class, statName); + + if (counterHistory.getValue().length > 0) { + int mostRecentIndex = counterHistory.getValue().length - 1; + value = counterHistory.getValue()[mostRecentIndex].getValue(); + } + + } while (!Thread.currentThread().isInterrupted() && value != expectedResult); + + assertThat(value, Matchers.is(expectedResult)); + + return value; + } + + /* + NOTE: When using this method in other unit tests, make sure to declare a timeout as it is possible to get an infinite loop. + This should only occur if the stats value is different from your expectedResult, which may happen if the stats calculations + change, the stats value isn't accessible or if you enter the wrong expectedResult. + */ + public static void assertExpectedValueFromRatioHistory(String statName, Context context, ManagementRegistryService managementRegistry, double expectedResult) { + + StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(singletonList(statName)) + .on(context) + .build(); + + double value = 0; + do { + ResultSet counters = query.execute(); + + ContextualStatistics statisticsContext = counters.getResult(context); + + assertThat(counters.size(), Matchers.is(1)); + + RatioHistory ratioHistory = statisticsContext.getStatistic(RatioHistory.class, statName); + + if (ratioHistory.getValue().length > 0) { + int mostRecentIndex = ratioHistory.getValue().length - 1; + value = ratioHistory.getValue()[mostRecentIndex].getValue(); + } + } while (!Thread.currentThread().isInterrupted() && value != expectedResult); + + assertThat(value, Matchers.is(expectedResult)); + } + + /* + NOTE: When using this method in other unit tests, make sure to declare a timeout as it is possible to get an infinite loop. + This should only occur if the stats value is different from your minExpectedValue, which may happen if the stats calculations + change, the stats value isn't accessible or if you enter the wrong minExpectedValue. + */ + public static long assertExpectedValueFromDurationHistory(String statName, Context context, ManagementRegistryService managementRegistry, Long minExpectedValue) { + + StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(Arrays.asList(statName)) + .on(context) + .build(); + + Long value = null; + do { + ResultSet counters = query.execute(); + + ContextualStatistics statisticsContext = counters.getResult(context); + + assertThat(counters.size(), Matchers.is(1)); + + DurationHistory durationHistory = statisticsContext.getStatistic(DurationHistory.class, statName); + + if (durationHistory.getValue().length > 0) { + int mostRecentIndex = durationHistory.getValue().length - 1; + value = durationHistory.getValue()[mostRecentIndex].getValue(); + } + + }while(!Thread.currentThread().isInterrupted() && value == null); + + assertThat(value, Matchers.greaterThan(minExpectedValue)); + + return value; + } + + /* + NOTE: When using this method in other unit tests, make sure to declare a timeout as it is possible to get an infinite loop. + This should only occur if the stats value is different from your minExpectedValue, which may happen if the stats calculations + change, the stats value isn't accessible or if you enter the wrong minExpectedValue. + */ + public static double assertExpectedValueFromAverageHistory(String statName, Context context, ManagementRegistryService managementRegistry) { + + StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(Arrays.asList(statName)) + .on(context) + .build(); + + double value = Double.NaN; + do { + ResultSet counters = query.execute(); + + ContextualStatistics statisticsContext = counters.getResult(context); + + assertThat(counters.size(), Matchers.is(1)); + + + AverageHistory avgHistory = statisticsContext.getStatistic(AverageHistory.class, statName);//returns type DurationHistory but it was AverageHistory + + if (avgHistory.getValue().length > 0) { + int mostRecentIndex = avgHistory.getValue().length - 1; + value = avgHistory.getValue()[mostRecentIndex].getValue(); + } + + }while(!Thread.currentThread().isInterrupted() && Double.isNaN(value)); + + assertThat(value, Matchers.greaterThan(0d)); + + return value; + } + + // When testing ratios, we need to wait for the first computation (we do not have any choice) to happen because ratio depends on 2 other sampled statistics. + // If you do not wait, then you'll always get some NaN because the hits will be done within the 1st second, and the hits won't be done in the right "window". + // A ratio is computed by dividing a rate with another rate. See CompoundOperationImpl.ratioOf(). + // And a rate is computed with values aggregated into a EventRateSimpleMovingAverage. + // The call to EventRateSimpleMovingAverage.rateUsingSeconds() will return 0 during the fist second (until first computation did happen). + // So the hits must be after the first second so that values get accumulated into the partitions of EventRateSimpleMovingAverage. + + // Also, we have to take in consideration that in clustered, there is a collector that is scheduled at 75% of the TTD to collect and send stats. + // So the delay can be greater than just the duration of the first sampling. + public static void triggerStatComputation(ManagementRegistryService managementRegistry, Context context, String... statNames) { + boolean noSample; + do { + noSample = false; + Map> statistics = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(Arrays.asList(statNames)) + .on(context) + .build() + .execute() + .getSingleResult() + .getStatistics(); + + for (Map.Entry> entry : statistics.entrySet()) { + if (((StatisticHistory) entry.getValue()).getValue().length < 2) { + noSample = true; + break; + } + } + } while (!Thread.currentThread().isInterrupted() && noSample); + } + + /* + NOTE: When using this method in other unit tests, make sure to declare a timeout as it is possible to get an infinite loop. + This should only occur if the stats value is different from your expectedResult, which may happen if the stats calculations + change, the stats value isn't accessible or if you enter the wrong expectedResult. + */ + public static double getAndAssertExpectedValueFromRateHistory(String statName, Context context, ManagementRegistryService managementRegistry, Double expectedResult) { + + StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(Arrays.asList(statName)) + .on(context) + .build(); + + Double value = 0d; + do { + ResultSet counters = query.execute(); + + ContextualStatistics statisticsContext = counters.getResult(context); + + Assert.assertThat(counters.size(), Matchers.is(1)); + + RateHistory rateHistory = statisticsContext.getStatistic(RateHistory.class, statName); + + if (rateHistory.getValue().length > 0) { + int mostRecentIndex = rateHistory.getValue().length - 1; + value = rateHistory.getValue()[mostRecentIndex].getValue(); + } + + }while(!Thread.currentThread().isInterrupted() && !value.equals(expectedResult)); + + Assert.assertThat(value, Matchers.is(expectedResult)); + + return value; + } + + /* + NOTE: When using this method in other unit tests, make sure to declare a timeout as it is possible to get an infinite loop. + This should only occur if the stats value is different from your minExpectedValue, which may happen if the stats calculations + change, the stats value isn't accessible or if you enter the wrong minExpectedValue. + */ + public static long getExpectedValueFromDurationHistory(String statName, Context context, ManagementRegistryService managementRegistry, Long minExpectedValue) { + + StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(Arrays.asList(statName)) + .on(context) + .build(); + + Long value = null; + do { + ResultSet counters = query.execute(); + + ContextualStatistics statisticsContext = counters.getResult(context); + + Assert.assertThat(counters.size(), Matchers.is(1)); + + DurationHistory durationHistory = statisticsContext.getStatistic(DurationHistory.class, statName); + + if (durationHistory.getValue().length > 0) { + int mostRecentIndex = durationHistory.getValue().length - 1; + value = durationHistory.getValue()[mostRecentIndex].getValue(); + } + + }while(!Thread.currentThread().isInterrupted() && value == null); + + Assert.assertThat(value, Matchers.greaterThan(minExpectedValue)); + + return value; + } + + /* + NOTE: When using this method in other unit tests, make sure to declare a timeout as it is possible to get an infinite loop. + This should only occur if the stats value is different from your minExpectedValue, which may happen if the stats calculations + change, the stats value isn't accessible or if you enter the wrong minExpectedValue. + */ + public static double getExpectedValueFromAverageHistory(String statName, Context context, ManagementRegistryService managementRegistry, double minExpectedValue) { + + StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(Arrays.asList(statName)) + .on(context) + .build(); + + double value = 0; + do { + ResultSet counters = query.execute(); + + ContextualStatistics statisticsContext = counters.getResult(context); + + Assert.assertThat(counters.size(), Matchers.is(1)); + + AverageHistory avgHistory = statisticsContext.getStatistic(AverageHistory.class, statName); + + if (avgHistory.getValue().length > 0) { + int mostRecentIndex = avgHistory.getValue().length - 1; + value = avgHistory.getValue()[mostRecentIndex].getValue(); + } + + }while(!Thread.currentThread().isInterrupted() && value <= minExpectedValue); + + Assert.assertThat(value, Matchers.greaterThan(minExpectedValue)); + + return value; + } +} diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultCollectorServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultCollectorServiceTest.java index 8867bc2e4d..fb6223ed73 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultCollectorServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultCollectorServiceTest.java @@ -31,8 +31,6 @@ import org.terracotta.management.model.context.Context; import org.terracotta.management.model.notification.ContextualNotification; import org.terracotta.management.model.stats.ContextualStatistics; -import org.terracotta.management.registry.CapabilityManagement; -import org.terracotta.management.registry.StatisticQuery; import java.util.ArrayList; import java.util.Arrays; @@ -47,38 +45,9 @@ import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.hamcrest.CoreMatchers.equalTo; import static org.junit.Assert.assertThat; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class DefaultCollectorServiceTest { - - @Test - public void updateCollectedStatisticsTest__should_not_add_stats_when_selection_empty() throws Exception { - DefaultCollectorService defaultCollectorService = new DefaultCollectorService(); - defaultCollectorService.updateCollectedStatistics("PifCapability", new ArrayList<>()); - assertThat(defaultCollectorService.getSelectedStatsPerCapability().size(), equalTo(0)); - } - - @Test - public void updateCollectedStatisticsTest__add_stats_and_then_clear_them() throws Exception { - DefaultCollectorService defaultCollectorService = new DefaultCollectorService(); - ManagementRegistryService managementRegistryService = mock(ManagementRegistryService.class); - CapabilityManagement capability = mock(CapabilityManagement.class); - StatisticQuery.Builder builder = mock(StatisticQuery.Builder.class); - when(capability.queryStatistics(new ArrayList(){{add("SuperStat");}})).thenReturn(builder); - when(managementRegistryService.withCapability("PifCapability")).thenReturn(capability); - defaultCollectorService.setManagementRegistry(managementRegistryService); - defaultCollectorService.updateCollectedStatistics("PifCapability", new ArrayList(){{add("SuperStat");}}); - assertThat(defaultCollectorService.getSelectedStatsPerCapability().size(), equalTo(1)); - - - defaultCollectorService.updateCollectedStatistics("PifCapability", new ArrayList<>()); - assertThat(defaultCollectorService.getSelectedStatsPerCapability().size(), equalTo(0)); - - } - - @Test(timeout = 6000) public void test_collector() throws Exception { final Queue messages = new ConcurrentLinkedQueue(); @@ -134,7 +103,7 @@ void onEvent(Object event) { managementRegistry.withCapability("StatisticCollectorCapability") .call("updateCollectedStatistics", new Parameter("StatisticsCapability"), - new Parameter(asList("PutCounter", "InexistingRate"), Collection.class.getName())) + new Parameter(asList("Cache:HitCount", "Cache:MissCount"), Collection.class.getName())) .on(Context.create("cacheManagerName", "my-cm-1")) .build() .execute() @@ -144,7 +113,6 @@ void onEvent(Object event) { cache.put("key", "val"); num.await(); - cacheManager.removeCache("my-cache"); cacheManager.close(); diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java index 43eb16bfc7..b00b764cf4 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java @@ -15,11 +15,26 @@ */ package org.ehcache.management.registry; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.config.units.MemoryUnit.MB; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.everyItem; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.collection.IsCollectionWithSize.hasSize; +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.assertThat; +import static org.junit.Assert.fail; + +import java.io.File; import org.ehcache.CacheManager; import org.ehcache.config.CacheConfiguration; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.management.ManagementRegistryService; +import org.junit.rules.Timeout; import org.terracotta.management.registry.ResultSet; import org.terracotta.management.registry.StatisticQuery; import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; @@ -31,48 +46,183 @@ import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.management.model.stats.Sample; import org.terracotta.management.model.stats.history.CounterHistory; -import org.terracotta.management.model.stats.primitive.Counter; - +import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.NoSuchElementException; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import static org.hamcrest.Matchers.containsInAnyOrder; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.rules.TemporaryFolder; +import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; +import org.terracotta.management.model.stats.StatisticType; +import org.terracotta.management.registry.StatisticQuery.Builder; -import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.everyItem; -import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.nullValue; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.collection.IsCollectionWithSize.hasSize; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; public class DefaultManagementRegistryServiceTest { + private static final Collection ONHEAP_DESCRIPTORS = new ArrayList(); + private static final Collection OFFHEAP_DESCRIPTORS = new ArrayList(); + private static final Collection DISK_DESCRIPTORS = new ArrayList(); + private static final Collection CACHE_DESCRIPTORS = new ArrayList(); + + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + + @Rule + public final Timeout globalTimeout = Timeout.seconds(10); + @Test public void testCanGetContext() { - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) + CacheManager cacheManager1 = null; + try { + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) .build(); - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); - CacheManager cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache", cacheConfiguration) - .using(managementRegistry) - .build(true); + cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .using(managementRegistry) + .build(true); - assertThat(managementRegistry.getContextContainer().getName(), equalTo("cacheManagerName")); - assertThat(managementRegistry.getContextContainer().getValue(), equalTo("myCM")); - assertThat(managementRegistry.getContextContainer().getSubContexts(), hasSize(1)); - assertThat(managementRegistry.getContextContainer().getSubContexts().iterator().next().getName(), equalTo("cacheName")); - assertThat(managementRegistry.getContextContainer().getSubContexts().iterator().next().getValue(), equalTo("aCache")); + assertThat(managementRegistry.getContextContainer().getName(), equalTo("cacheManagerName")); + assertThat(managementRegistry.getContextContainer().getValue(), equalTo("myCM")); + assertThat(managementRegistry.getContextContainer().getSubContexts(), hasSize(1)); + assertThat(managementRegistry.getContextContainer().getSubContexts().iterator().next().getName(), equalTo("cacheName")); + assertThat(managementRegistry.getContextContainer().getSubContexts().iterator().next().getValue(), equalTo("aCache")); + } + finally { + if(cacheManager1 != null) cacheManager1.close(); + } + } + + @Test + public void descriptorOnHeapTest() { + CacheManager cacheManager1 = null; + try { + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) + .build(); + + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); + + cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .using(managementRegistry) + .build(true); + + assertThat(managementRegistry.getCapabilities(), hasSize(4)); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getName(), equalTo("ActionsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName(), equalTo("SettingsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(2).getName(), equalTo("StatisticCollectorCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName(), equalTo("StatisticsCapability")); + + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getDescriptors(), hasSize(4)); + + Collection descriptors = new ArrayList(managementRegistry.getCapabilities()).get(3).getDescriptors(); + Collection allDescriptors = new ArrayList(); + allDescriptors.addAll(ONHEAP_DESCRIPTORS); + allDescriptors.addAll(CACHE_DESCRIPTORS); + + assertThat(descriptors, containsInAnyOrder(allDescriptors.toArray())); + assertThat(descriptors, hasSize(allDescriptors.size())); + } + finally { + if(cacheManager1 != null) cacheManager1.close(); + } + + } + + @Test + public void descriptorOffHeapTest() { + CacheManager cacheManager1 = null; + try { + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, newResourcePoolsBuilder().heap(5, MB).offheap(10, MB)) + .build(); + + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); + + cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .using(managementRegistry) + .build(true); + + assertThat(managementRegistry.getCapabilities(), hasSize(4)); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getName(), equalTo("ActionsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName(), equalTo("SettingsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(2).getName(), equalTo("StatisticCollectorCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName(), equalTo("StatisticsCapability")); + + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getDescriptors(), hasSize(4)); + + Collection descriptors = new ArrayList(managementRegistry.getCapabilities()).get(3).getDescriptors(); + Collection allDescriptors = new ArrayList(); + allDescriptors.addAll(ONHEAP_DESCRIPTORS); + allDescriptors.addAll(OFFHEAP_DESCRIPTORS); + allDescriptors.addAll(CACHE_DESCRIPTORS); + + assertThat(descriptors, containsInAnyOrder(allDescriptors.toArray())); + assertThat(descriptors, hasSize(allDescriptors.size())); + } + finally { + if(cacheManager1 != null) cacheManager1.close(); + } - cacheManager1.close(); } + @Test + public void descriptorDiskStoreTest() throws URISyntaxException { + PersistentCacheManager persistentCacheManager = null; + try { + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); + + persistentCacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .with(CacheManagerBuilder.persistence(getStoragePath() + File.separator + "myData")) + .withCache("persistent-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .disk(10, MemoryUnit.MB, true)) + ) + .using(managementRegistry) + .build(true); + + assertThat(managementRegistry.getCapabilities(), hasSize(4)); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getName(), equalTo("ActionsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName(), equalTo("SettingsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(2).getName(), equalTo("StatisticCollectorCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName(), equalTo("StatisticsCapability")); + + + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getDescriptors(), hasSize(4)); + + Collection descriptors = new ArrayList(managementRegistry.getCapabilities()).get(3).getDescriptors(); + Collection allDescriptors = new ArrayList(); + allDescriptors.addAll(ONHEAP_DESCRIPTORS); + allDescriptors.addAll(DISK_DESCRIPTORS); + allDescriptors.addAll(CACHE_DESCRIPTORS); + + assertThat(descriptors, containsInAnyOrder(allDescriptors.toArray())); + assertThat(descriptors, hasSize(allDescriptors.size())); + } + finally { + if(persistentCacheManager != null) persistentCacheManager.close(); + } + } + + private String getStoragePath() throws URISyntaxException { + return getClass().getClassLoader().getResource(".").toURI().getPath(); + } + + @Test public void testCanGetCapabilities() { CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) @@ -87,25 +237,36 @@ public void testCanGetCapabilities() { assertThat(managementRegistry.getCapabilities(), hasSize(4)); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getName(), equalTo("ActionsCapability")); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName(), equalTo("StatisticsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName(), equalTo("SettingsCapability")); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(2).getName(), equalTo("StatisticCollectorCapability")); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName(), equalTo("SettingsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName(), equalTo("StatisticsCapability")); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getDescriptors(), hasSize(4)); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getDescriptors(), hasSize(13)); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getDescriptors(), hasSize(ONHEAP_DESCRIPTORS.size() + CACHE_DESCRIPTORS.size())); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getCapabilityContext().getAttributes(), hasSize(2)); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getCapabilityContext().getAttributes(), hasSize(2)); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getCapabilityContext().getAttributes(), hasSize(2)); cacheManager1.close(); } @Test public void testCanGetStats() { + String queryStatisticName = "Cache:HitCount"; + + long averageWindowDuration = 1; + TimeUnit averageWindowUnit = TimeUnit.MINUTES; + int historySize = 100; + long historyInterval = 1; + TimeUnit historyIntervalUnit = TimeUnit.MILLISECONDS; + long timeToDisable = 10; + TimeUnit timeToDisableUnit = TimeUnit.MINUTES; + EhcacheStatisticsProviderConfiguration config = new EhcacheStatisticsProviderConfiguration(averageWindowDuration,averageWindowUnit,historySize,historyInterval,historyIntervalUnit,timeToDisable,timeToDisableUnit); + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) .build(); - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM").addConfiguration(config)); CacheManager cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() .withCache("aCache1", cacheConfiguration) @@ -121,40 +282,86 @@ public void testCanGetStats() { .with("cacheManagerName", "myCM") .with("cacheName", "aCache2"); - cacheManager1.getCache("aCache1", Long.class, String.class).put(1L, "1"); - cacheManager1.getCache("aCache1", Long.class, String.class).put(2L, "2"); - cacheManager1.getCache("aCache2", Long.class, String.class).put(3L, "3"); - cacheManager1.getCache("aCache2", Long.class, String.class).put(4L, "4"); - cacheManager1.getCache("aCache2", Long.class, String.class).put(5L, "5"); + Cache cache1 = cacheManager1.getCache("aCache1", Long.class, String.class); + Cache cache2 = cacheManager1.getCache("aCache2", Long.class, String.class); - ContextualStatistics counters = managementRegistry.withCapability("StatisticsCapability") - .queryStatistic("PutCounter") - .on(context1) - .build() - .execute() - .getResult(context1); + cache1.put(1L, "one"); + cache2.put(3L, "three"); + + cache1.get(1L); + cache1.get(2L); + cache2.get(3L); + cache2.get(4L); + + Builder builder1 = managementRegistry.withCapability("StatisticsCapability") + .queryStatistic(queryStatisticName) + .on(context1); + + ContextualStatistics counters = getResultSet(builder1, context1, null, CounterHistory.class, queryStatisticName).getResult(context1); + CounterHistory counterHistory1 = counters.getStatistic(CounterHistory.class, queryStatisticName); assertThat(counters.size(), equalTo(1)); - assertThat(counters.getStatistic(Counter.class).getValue(), equalTo(2L)); + int mostRecentSampleIndex = counterHistory1.getValue().length - 1; + assertThat(counterHistory1.getValue()[mostRecentSampleIndex].getValue(), equalTo(1L)); - ResultSet allCounters = managementRegistry.withCapability("StatisticsCapability") - .queryStatistic("PutCounter") + Builder builder2 = managementRegistry.withCapability("StatisticsCapability") + .queryStatistic(queryStatisticName) .on(context1) - .on(context2) - .build() - .execute(); + .on(context2); + ResultSet allCounters = getResultSet(builder2, context1, context2, CounterHistory.class, queryStatisticName); assertThat(allCounters.size(), equalTo(2)); assertThat(allCounters.getResult(context1).size(), equalTo(1)); - assertThat(allCounters.getResult(context2).size(), Matchers.equalTo(1)); - assertThat(allCounters.getResult(context1).getStatistic(Counter.class).getValue(), equalTo(2L)); - assertThat(allCounters.getResult(context2).getStatistic(Counter.class).getValue(), equalTo(3L)); + assertThat(allCounters.getResult(context2).size(), equalTo(1)); + + mostRecentSampleIndex = allCounters.getResult(context1).getStatistic(CounterHistory.class, queryStatisticName).getValue().length - 1; + assertThat(allCounters.getResult(context1).getStatistic(CounterHistory.class, queryStatisticName).getValue()[mostRecentSampleIndex].getValue(), equalTo(1L)); + + mostRecentSampleIndex = allCounters.getResult(context2).getStatistic(CounterHistory.class, queryStatisticName).getValue().length - 1; + assertThat(allCounters.getResult(context2).getStatistic(CounterHistory.class, queryStatisticName).getValue()[mostRecentSampleIndex].getValue(), equalTo(1L)); cacheManager1.close(); } + private static ResultSet getResultSet(Builder builder, Context context1, Context context2, Class type, String statisticsName) { + ResultSet counters = null; + + while(!Thread.currentThread().isInterrupted()) //wait till Counter history(s) is initialized and contains values. + { + counters = builder.build().execute(); + + ContextualStatistics statisticsContext1 = counters.getResult(context1); + CounterHistory counterHistoryContext1 = statisticsContext1.getStatistic(type, statisticsName); + + if(context2 != null) + { + ContextualStatistics statisticsContext2 = counters.getResult(context2); + CounterHistory counterHistoryContext2 = statisticsContext2.getStatistic(type, statisticsName); + + if(counterHistoryContext2.getValue().length > 0 && + counterHistoryContext2.getValue()[counterHistoryContext2.getValue().length - 1].getValue() > 0 && + counterHistoryContext1.getValue().length > 0 && + counterHistoryContext1.getValue()[counterHistoryContext1.getValue().length - 1].getValue() > 0) + { + break; + } + } + else + { + if(counterHistoryContext1.getValue().length > 0 && + counterHistoryContext1.getValue()[counterHistoryContext1.getValue().length - 1].getValue() > 0) + { + break; + } + } + } + + return counters; + } + @Test public void testCanGetStatsSinceTime() throws InterruptedException { + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) .build(); @@ -172,11 +379,11 @@ public void testCanGetStatsSinceTime() throws InterruptedException { .with("cacheName", "aCache1"); StatisticQuery.Builder builder = managementRegistry.withCapability("StatisticsCapability") - .queryStatistic("AllCachePutCount") + .queryStatistic("Cache:MissCount") .on(context); ContextualStatistics statistics; - CounterHistory putCount; + CounterHistory getCount; long timestamp; // ------ @@ -186,32 +393,33 @@ public void testCanGetStatsSinceTime() throws InterruptedException { builder.build().execute(); // ------ - // 3 puts and we wait more than 1 second (history frequency) to be sure the scheduler thread has computed a new stat in the history + // 3 gets and we wait more than 1 second (history frequency) to be sure the scheduler thread has computed a new stat in the history // ------ - cacheManager1.getCache("aCache1", Long.class, String.class).put(1L, "1"); - cacheManager1.getCache("aCache1", Long.class, String.class).put(2L, "2"); - cacheManager1.getCache("aCache1", Long.class, String.class).put(2L, "2"); + cacheManager1.getCache("aCache1", Long.class, String.class).get(1L); + cacheManager1.getCache("aCache1", Long.class, String.class).get(2L); + cacheManager1.getCache("aCache1", Long.class, String.class).get(2L); do { Thread.sleep(100); statistics = builder.build().execute().getResult(context); - putCount = statistics.getStatistic(CounterHistory.class); - } while (putCount.getValue().length < 1); + getCount = statistics.getStatistic(CounterHistory.class); + } while (!Thread.currentThread().isInterrupted() && getCount.getValue().length < 1); - // within 1 second of history there has been 3 puts - assertThat(putCount.getValue()[0].getValue(), equalTo(3L)); + // within 1 second of history there has been 3 gets + int mostRecentIndex = getCount.getValue().length - 1; + assertThat(getCount.getValue()[mostRecentIndex].getValue(), equalTo(3L)); // keep time for next call (since) - timestamp = putCount.getValue()[0].getTimestamp(); + timestamp = getCount.getValue()[mostRecentIndex].getTimestamp(); // ------ - // 2 puts and we wait more than 1 second (history frequency) to be sure the scheduler thread has computed a new stat in the history + // 2 gets and we wait more than 1 second (history frequency) to be sure the scheduler thread has computed a new stat in the history // We will get only the stats SINCE last time // ------ - cacheManager1.getCache("aCache1", Long.class, String.class).put(1L, "1"); - cacheManager1.getCache("aCache1", Long.class, String.class).put(2L, "2"); + cacheManager1.getCache("aCache1", Long.class, String.class).get(1L); + cacheManager1.getCache("aCache1", Long.class, String.class).get(2L); // ------ // WITHOUT using since: the history will have 2 values @@ -220,30 +428,32 @@ public void testCanGetStatsSinceTime() throws InterruptedException { do { Thread.sleep(100); statistics = builder.build().execute().getResult(context); - putCount = statistics.getStatistic(CounterHistory.class); - } while (putCount.getValue().length < 2); + getCount = statistics.getStatistic(CounterHistory.class); + } while (!Thread.currentThread().isInterrupted() && getCount.getValue().length < 2); // ------ // WITH since: the history will have 1 value // ------ statistics = builder.since(timestamp + 1).build().execute().getResult(context); - putCount = statistics.getStatistic(CounterHistory.class); + getCount = statistics.getStatistic(CounterHistory.class); // get the counter for each computation at each 1 second - assertThat(Arrays.asList(putCount.getValue()), everyItem(Matchers.>hasProperty("timestamp", greaterThan(timestamp)))); + assertThat(Arrays.asList(getCount.getValue()), everyItem(Matchers.>hasProperty("timestamp", greaterThan(timestamp)))); cacheManager1.close(); } @Test public void testCall() throws ExecutionException { + CacheManager cacheManager1 = null; + try { CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) .build(); ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); - CacheManager cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() + cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() .withCache("aCache1", cacheConfiguration) .withCache("aCache2", cacheConfiguration) .using(managementRegistry) @@ -268,44 +478,103 @@ public void testCall() throws ExecutionException { assertThat(result.getValue(), is(nullValue())); assertThat(cacheManager1.getCache("aCache1", Long.class, String.class).get(1L), is(Matchers.nullValue())); + } + finally { + if(cacheManager1 != null) cacheManager1.close(); + } - cacheManager1.close(); } @Test public void testCallOnInexistignContext() { - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) - .build(); - - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); - - CacheManager cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache1", cacheConfiguration) - .withCache("aCache2", cacheConfiguration) - .using(managementRegistry) - .build(true); - - Context inexisting = Context.empty() - .with("cacheManagerName", "myCM2") - .with("cacheName", "aCache2"); - - ResultSet> results = managementRegistry.withCapability("ActionsCapability") - .call("clear") - .on(inexisting) - .build() - .execute(); - - assertThat(results.size(), equalTo(1)); - assertThat(results.getSingleResult().hasExecuted(), is(false)); - + CacheManager cacheManager1 = null; try { - results.getSingleResult().getValue(); - fail(); - } catch (Exception e) { - assertThat(e, instanceOf(NoSuchElementException.class)); + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) + .build(); + + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); + + cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache1", cacheConfiguration) + .withCache("aCache2", cacheConfiguration) + .using(managementRegistry) + .build(true); + + Context inexisting = Context.empty() + .with("cacheManagerName", "myCM2") + .with("cacheName", "aCache2"); + + ResultSet> results = managementRegistry.withCapability("ActionsCapability") + .call("clear") + .on(inexisting) + .build() + .execute(); + + assertThat(results.size(), equalTo(1)); + assertThat(results.getSingleResult().hasExecuted(), is(false)); + + try { + results.getSingleResult().getValue(); + fail(); + } catch (Exception e) { + assertThat(e, instanceOf(NoSuchElementException.class)); + } + } + finally { + if(cacheManager1 != null) cacheManager1.close(); } - cacheManager1.close(); } + @BeforeClass + public static void loadStatsUtil() throws ClassNotFoundException { + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionCount" , StatisticType.COUNTER_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissCount" , StatisticType.COUNTER_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionRate" , StatisticType.RATE_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitRatio" , StatisticType.RATIO_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissRatio" , StatisticType.RATIO_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MappingCount" , StatisticType.COUNTER_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:OccupiedByteSize" , StatisticType.SIZE_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissRate" , StatisticType.RATE_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitCount" , StatisticType.COUNTER_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitRate" , StatisticType.RATE_HISTORY)); + + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionRate", StatisticType.RATE_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissRate", StatisticType.RATE_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:OccupiedByteSize", StatisticType.SIZE_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:AllocatedByteSize", StatisticType.SIZE_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MappingCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRate", StatisticType.RATE_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRatio", StatisticType.RATIO_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissRatio", StatisticType.RATIO_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MaxMappingCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitCount", StatisticType.COUNTER_HISTORY)); + + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MaxMappingCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitRate", StatisticType.RATE_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:OccupiedByteSize", StatisticType.SIZE_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionRate", StatisticType.RATE_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:AllocatedByteSize", StatisticType.SIZE_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitRatio", StatisticType.RATIO_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissRatio", StatisticType.RATIO_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MappingCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissRate", StatisticType.RATE_HISTORY)); + + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitRate", StatisticType.RATE_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitCount", StatisticType.COUNTER_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitRatio", StatisticType.RATIO_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearRate", StatisticType.RATE_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissRate", StatisticType.RATE_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearCount", StatisticType.COUNTER_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissCount", StatisticType.COUNTER_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissRatio", StatisticType.RATIO_HISTORY)); + + } + + } diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java index e48f878463..3d2bed7c98 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java @@ -19,21 +19,26 @@ import org.ehcache.config.CacheConfiguration; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; import org.ehcache.management.ManagementRegistryServiceConfiguration; import org.ehcache.management.SharedManagementService; import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.Timeout; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.terracotta.management.model.call.ContextualReturn; import org.terracotta.management.model.capabilities.Capability; import org.terracotta.management.model.context.Context; import org.terracotta.management.model.context.ContextContainer; -import org.terracotta.management.registry.ResultSet; import org.terracotta.management.model.stats.ContextualStatistics; -import org.terracotta.management.model.stats.primitive.Counter; +import org.terracotta.management.model.stats.history.CounterHistory; +import org.terracotta.management.registry.ResultSet; +import org.terracotta.management.registry.StatisticQuery; +import org.terracotta.management.registry.StatisticQuery.Builder; import java.util.ArrayList; import java.util.Arrays; @@ -41,6 +46,7 @@ import java.util.List; import java.util.NoSuchElementException; import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.hamcrest.CoreMatchers.equalTo; @@ -64,8 +70,13 @@ public class DefaultSharedManagementServiceTest { ManagementRegistryServiceConfiguration config1; ManagementRegistryServiceConfiguration config2; + @Rule + public final Timeout globalTimeout = Timeout.seconds(10); + @Before public void init() { + EhcacheStatisticsProviderConfiguration config = new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES); + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) .build(); @@ -74,14 +85,14 @@ public void init() { cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() .withCache("aCache1", cacheConfiguration) .using(service) - .using(config1 = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM1")) + .using(config1 = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM1").addConfiguration(config)) .build(true); cacheManager2 = CacheManagerBuilder.newCacheManagerBuilder() .withCache("aCache2", cacheConfiguration) .withCache("aCache3", cacheConfiguration) .using(service) - .using(config2 = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM2")) + .using(config2 = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM2").addConfiguration(config)) .build(true); // this serie of calls make sure the registry still works after a full init / close / init loop @@ -127,26 +138,30 @@ public void testSharedContexts() { @Test public void testSharedCapabilities() { - assertEquals(2, service.getCapabilities().size()); + assertEquals(2, service.getCapabilitiesByContext().size()); - Collection capabilities1 = service.getCapabilities().get(config1.getContext()); - Collection capabilities2 = service.getCapabilities().get(config2.getContext()); + Collection capabilities1 = service.getCapabilitiesByContext().get(config1.getContext()); + Collection capabilities2 = service.getCapabilitiesByContext().get(config2.getContext()); assertThat(capabilities1, hasSize(4)); assertThat(new ArrayList(capabilities1).get(0).getName(), equalTo("ActionsCapability")); - assertThat(new ArrayList(capabilities1).get(1).getName(), equalTo("StatisticsCapability")); + assertThat(new ArrayList(capabilities1).get(1).getName(), equalTo("SettingsCapability")); assertThat(new ArrayList(capabilities1).get(2).getName(), equalTo("StatisticCollectorCapability")); - assertThat(new ArrayList(capabilities1).get(3).getName(), equalTo("SettingsCapability")); + assertThat(new ArrayList(capabilities1).get(3).getName(), equalTo("StatisticsCapability")); + + assertThat(capabilities2, hasSize(4)); assertThat(new ArrayList(capabilities2).get(0).getName(), equalTo("ActionsCapability")); - assertThat(new ArrayList(capabilities2).get(1).getName(), equalTo("StatisticsCapability")); + assertThat(new ArrayList(capabilities2).get(1).getName(), equalTo("SettingsCapability")); assertThat(new ArrayList(capabilities2).get(2).getName(), equalTo("StatisticCollectorCapability")); - assertThat(new ArrayList(capabilities2).get(3).getName(), equalTo("SettingsCapability")); + assertThat(new ArrayList(capabilities2).get(3).getName(), equalTo("StatisticsCapability")); } @Test public void testStats() { + String statisticName = "Cache:MissCount"; + List contextList = Arrays.asList( Context.empty() .with("cacheManagerName", "myCM1") @@ -158,15 +173,14 @@ public void testStats() { .with("cacheManagerName", "myCM2") .with("cacheName", "aCache3")); - cacheManager1.getCache("aCache1", Long.class, String.class).put(1L, "1"); - cacheManager2.getCache("aCache2", Long.class, String.class).put(2L, "2"); - cacheManager2.getCache("aCache3", Long.class, String.class).put(3L, "3"); + cacheManager1.getCache("aCache1", Long.class, String.class).get(1L); + cacheManager2.getCache("aCache2", Long.class, String.class).get(2L); + cacheManager2.getCache("aCache3", Long.class, String.class).get(3L); - ResultSet allCounters = service.withCapability("StatisticsCapability") - .queryStatistic("PutCounter") - .on(contextList) - .build() - .execute(); + Builder builder = service.withCapability("StatisticsCapability") + .queryStatistic(statisticName) + .on(contextList); + ResultSet allCounters = getResultSet(builder, contextList, CounterHistory.class, statisticName); assertThat(allCounters.size(), equalTo(3)); @@ -174,9 +188,36 @@ public void testStats() { assertThat(allCounters.getResult(contextList.get(1)).size(), equalTo(1)); assertThat(allCounters.getResult(contextList.get(2)).size(), equalTo(1)); - assertThat(allCounters.getResult(contextList.get(0)).getStatistic(Counter.class).getValue(), equalTo(1L)); - assertThat(allCounters.getResult(contextList.get(1)).getStatistic(Counter.class).getValue(), equalTo(1L)); - assertThat(allCounters.getResult(contextList.get(2)).getStatistic(Counter.class).getValue(), equalTo(1L)); + + int mostRecentSampleIndex = allCounters.getResult(contextList.get(0)).getStatistic(CounterHistory.class, statisticName).getValue().length - 1; + assertThat(allCounters.getResult(contextList.get(0)).getStatistic(CounterHistory.class, statisticName).getValue()[mostRecentSampleIndex].getValue(), equalTo(1L)); + + mostRecentSampleIndex = allCounters.getResult(contextList.get(1)).getStatistic(CounterHistory.class, statisticName).getValue().length - 1; + assertThat(allCounters.getResult(contextList.get(1)).getStatistic(CounterHistory.class, statisticName).getValue()[mostRecentSampleIndex].getValue(), equalTo(1L)); + + mostRecentSampleIndex = allCounters.getResult(contextList.get(2)).getStatistic(CounterHistory.class, statisticName).getValue().length - 1; + assertThat(allCounters.getResult(contextList.get(2)).getStatistic(CounterHistory.class, statisticName).getValue()[mostRecentSampleIndex].getValue(), equalTo(1L)); + + } + + private static ResultSet getResultSet(StatisticQuery.Builder builder, List contextList, Class type, String statisticsName) { + ResultSet counters = null; + + //wait till Counter history is initialized and contains values > 0. + while(!Thread.currentThread().isInterrupted()) { + counters = builder.build().execute(); + + if(counters.getResult(contextList.get(0)).getStatistic(type, statisticsName).getValue().length > 0 && + counters.getResult(contextList.get(0)).getStatistic(type, statisticsName).getValue()[counters.getResult(contextList.get(0)).getStatistic(type, statisticsName).getValue().length - 1].getValue() > 0 && + counters.getResult(contextList.get(1)).getStatistic(type, statisticsName).getValue().length > 0 && + counters.getResult(contextList.get(1)).getStatistic(type, statisticsName).getValue()[counters.getResult(contextList.get(1)).getStatistic(type, statisticsName).getValue().length - 1].getValue() > 0 && + counters.getResult(contextList.get(2)).getStatistic(type, statisticsName).getValue().length > 0 && + counters.getResult(contextList.get(2)).getStatistic(type, statisticsName).getValue()[counters.getResult(contextList.get(2)).getStatistic(type, statisticsName).getValue().length - 1].getValue() > 0) { + break; + } + } + + return counters; } @Test diff --git a/management/src/test/resources/META-INF/services/org.terracotta.connection.ConnectionService b/management/src/test/resources/META-INF/services/org.terracotta.connection.ConnectionService deleted file mode 100644 index ab3bcf3c65..0000000000 --- a/management/src/test/resources/META-INF/services/org.terracotta.connection.ConnectionService +++ /dev/null @@ -1 +0,0 @@ -org.terracotta.passthrough.PassthroughConnectionService diff --git a/management/src/test/resources/ehcache-management-clustered.xml b/management/src/test/resources/ehcache-management-clustered.xml deleted file mode 100644 index e2e5577e13..0000000000 --- a/management/src/test/resources/ehcache-management-clustered.xml +++ /dev/null @@ -1,46 +0,0 @@ - - - - - - - - - - - - - - - webapp-1 - server-node-1 - - - - 1 - 1 - 100 - 2 - - - - - - - java.lang.String - java.lang.String - - 10 - 1 - 1 - - - - diff --git a/osgi-test/build.gradle b/osgi-test/build.gradle index 94f4f7b1d1..9fe7987689 100644 --- a/osgi-test/build.gradle +++ b/osgi-test/build.gradle @@ -17,22 +17,39 @@ dependencies { ext { paxExamVersion = '3.5.0' - urlVersion = '1.6.0' felixVersion = '4.4.0' } testCompile project(':impl'), project(':xml'), project(':107'), - 'junit:junit:4.11', - "org.ops4j.pax.exam:pax-exam-junit4:$paxExamVersion", "org.apache.felix:org.apache.felix.framework:$felixVersion", "javax.cache:cache-api:$parent.jcacheVersion" + testCompile ("org.ops4j.pax.exam:pax-exam-junit4:$paxExamVersion") { + exclude group:'junit', module:'junit' + exclude group:'org.slf4j', module:'slf4j-api' + } testRuntime "org.slf4j:slf4j-simple:$parent.slf4jVersion", - "org.ops4j.pax.exam:pax-exam-container-native:$paxExamVersion", - "org.ops4j.pax.exam:pax-exam-link-mvn:$paxExamVersion", - "org.ops4j.pax.url:pax-url-aether:$urlVersion" + testRuntime ("org.ops4j.pax.exam:pax-exam-container-native:$paxExamVersion") { + exclude group:'org.slf4j', module:'slf4j-api' + } + testRuntime ("org.ops4j.pax.exam:pax-exam-link-mvn:$paxExamVersion") { + exclude group:'org.slf4j', module:'slf4j-api' + } +} +configurations.testRuntime { + resolutionStrategy.force 'org.apache.maven.wagon:wagon-provider-api:2.5' + resolutionStrategy.force 'org.codehaus.plexus:plexus-utils:3.0.15' + resolutionStrategy.force 'org.eclipse.aether:aether-api:0.9.0.M4' + resolutionStrategy.force 'org.eclipse.aether:aether-impl:0.9.0.M4' + resolutionStrategy.force 'org.eclipse.aether:aether-spi:0.9.0.M4' + resolutionStrategy.force 'org.eclipse.aether:aether-util:0.9.0.M4' + resolutionStrategy.force 'org.sonatype.plexus:plexus-cipher:1.7' + resolutionStrategy.force 'org.sonatype.plexus:plexus-sec-dispatcher:1.4' +} +tasks.withType(JavaCompile) { + options.compilerArgs += ['-Werror'] } sourceSets { diff --git a/osgi-test/src/test/java/org/ehcache/osgi/Jsr107OsgiTest.java b/osgi-test/src/test/java/org/ehcache/osgi/Jsr107OsgiTest.java index 1dcb49ab7e..c8cc4aad14 100644 --- a/osgi-test/src/test/java/org/ehcache/osgi/Jsr107OsgiTest.java +++ b/osgi-test/src/test/java/org/ehcache/osgi/Jsr107OsgiTest.java @@ -56,6 +56,7 @@ public Option[] config() { @Test @Ignore("Needs https://github.com/jsr107/jsr107spec/issues/326 to be fixed and so will wait on javax.cache:cache-api:1.0.1 only") + @SuppressWarnings("unchecked") public void testJsr107EhcacheOsgi() throws Exception { CachingProvider cachingProvider = Caching.getCachingProvider("org.ehcache.jsr107.EhcacheCachingProvider", getClass().getClassLoader()); CacheManager cacheManager = cachingProvider.getCacheManager(getClass().getResource("/org/ehcache/osgi/ehcache-107-osgi.xml").toURI(), getClass().getClassLoader()); diff --git a/osgi-test/src/test/java/org/ehcache/osgi/SimpleOsgiTest.java b/osgi-test/src/test/java/org/ehcache/osgi/SimpleOsgiTest.java index d217eb1fc1..d22fce3748 100644 --- a/osgi-test/src/test/java/org/ehcache/osgi/SimpleOsgiTest.java +++ b/osgi-test/src/test/java/org/ehcache/osgi/SimpleOsgiTest.java @@ -74,7 +74,7 @@ public void testEhcache3AsBundle() { public void testEhcache3WithSerializationAndClientClass() { CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() .withCache("myCache", newCacheConfigurationBuilder(Long.class, Person.class, heap(10)) - .add(new DefaultCopierConfiguration((Class) SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE)) + .add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)) .withClassLoader(getClass().getClassLoader()) .build()) .build(true); diff --git a/transactions/build.gradle b/transactions/build.gradle index c61ea6d5d0..efb780891a 100644 --- a/transactions/build.gradle +++ b/transactions/build.gradle @@ -17,18 +17,28 @@ group = 'org.ehcache' apply plugin: EhOsgi -apply plugin: EhDeploy apply plugin: EhPomMangle dependencies { compile project(':impl'), project(':xml') compile group: 'javax.transaction', name: 'jta', version: '1.1' testCompile project(path: ':core-spi-test') - compile group: 'org.codehaus.btm', name: 'btm', version: '2.1.4' + compile (group: 'org.codehaus.btm', name: 'btm', version: '2.1.4') { + exclude group:'org.slf4j', module:'slf4j-api' + } } // For EhPomMangle dependencies { pomOnlyCompile "org.ehcache:ehcache:$parent.baseVersion" pomOnlyProvided 'javax.transaction:jta:1.1', 'org.codehaus.btm:btm:2.1.4' -} \ No newline at end of file +} + +tasks.withType(JavaCompile) { + options.compilerArgs += ['-Werror'] +} + +project.signing { + required { project.isReleaseVersion && project.gradle.taskGraph.hasTask("uploadArchives") } + sign project.configurations.getByName('archives') +} diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockSerializer.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockSerializer.java index 9b5e3f0e9e..25f4938b8a 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockSerializer.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockSerializer.java @@ -36,7 +36,7 @@ * * @author Ludovic Orban */ -class SoftLockSerializer implements Serializer { +class SoftLockSerializer implements Serializer> { private final ClassLoader classLoader; @@ -45,7 +45,7 @@ class SoftLockSerializer implements Serializer { } @Override - public ByteBuffer serialize(SoftLock object) { + public ByteBuffer serialize(SoftLock object) { ByteArrayOutputStream bout = new ByteArrayOutputStream(); try { ObjectOutputStream oout = new ObjectOutputStream(bout); @@ -64,7 +64,7 @@ public ByteBuffer serialize(SoftLock object) { @SuppressWarnings("unchecked") @Override - public SoftLock read(ByteBuffer entry) throws SerializerException, ClassNotFoundException { + public SoftLock read(ByteBuffer entry) throws SerializerException, ClassNotFoundException { ByteBufferInputStream bin = new ByteBufferInputStream(entry); try { OIS ois = new OIS(bin, classLoader); @@ -85,7 +85,7 @@ public SoftLock read(ByteBuffer entry) throws SerializerException, ClassNotFound } @Override - public boolean equals(SoftLock object, ByteBuffer binary) throws SerializerException, ClassNotFoundException { + public boolean equals(SoftLock object, ByteBuffer binary) throws SerializerException, ClassNotFoundException { return object.equals(read(binary)); } diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockValueCombinedSerializer.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockValueCombinedSerializer.java index afd7b55b02..655b02b06e 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockValueCombinedSerializer.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockValueCombinedSerializer.java @@ -29,10 +29,10 @@ */ class SoftLockValueCombinedSerializer implements Serializer> { - private final AtomicReference>> softLockSerializerRef; + private final AtomicReference>> softLockSerializerRef; private final Serializer valueSerializer; - SoftLockValueCombinedSerializer(AtomicReference>> softLockSerializerRef, Serializer valueSerializer) { + SoftLockValueCombinedSerializer(AtomicReference>> softLockSerializerRef, Serializer valueSerializer) { this.softLockSerializerRef = softLockSerializerRef; this.valueSerializer = valueSerializer; } diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java index b38f100ebf..d6eaf8db56 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java @@ -23,6 +23,7 @@ import org.ehcache.config.EvictionAdvisor; import org.ehcache.core.internal.store.StoreConfigurationImpl; import org.ehcache.core.internal.store.StoreSupport; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.core.spi.store.StoreAccessException; import org.ehcache.impl.config.copy.DefaultCopierConfiguration; import org.ehcache.expiry.Duration; @@ -40,7 +41,6 @@ import org.ehcache.spi.copy.Copier; import org.ehcache.spi.copy.CopyProvider; import org.ehcache.spi.serialization.Serializer; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceDependencies; @@ -710,11 +710,11 @@ public List getConfigurationChangeListeners() return underlyingStore.getConfigurationChangeListeners(); } - private static final class SoftLockValueCombinedSerializerLifecycleHelper { - final AtomicReference softLockSerializerRef; + private static final class SoftLockValueCombinedSerializerLifecycleHelper { + final AtomicReference> softLockSerializerRef; final ClassLoader classLoader; - SoftLockValueCombinedSerializerLifecycleHelper(AtomicReference softLockSerializerRef, ClassLoader classLoader) { + SoftLockValueCombinedSerializerLifecycleHelper(AtomicReference> softLockSerializerRef, ClassLoader classLoader) { this.softLockSerializerRef = softLockSerializerRef; this.classLoader = classLoader; } @@ -730,7 +730,7 @@ public CreatedStoreRef(final Store.Provider storeProvider, final SoftLockValueCo } } - @ServiceDependencies({TimeSourceService.class, JournalProvider.class, CopyProvider.class}) + @ServiceDependencies({TimeSourceService.class, JournalProvider.class, CopyProvider.class, TransactionManagerProvider.class}) public static class Provider implements Store.Provider { private volatile ServiceProvider serviceProvider; @@ -861,7 +861,7 @@ public Duration getExpiryForUpdate(K key, ValueSupplier> o }; // get the PersistenceSpaceIdentifier if the cache is persistent, null otherwise - LocalPersistenceService.PersistenceSpaceIdentifier persistenceSpaceId = findSingletonAmongst(LocalPersistenceService.PersistenceSpaceIdentifier.class, serviceConfigs); + DiskResourceService.PersistenceSpaceIdentifier persistenceSpaceId = findSingletonAmongst(DiskResourceService.PersistenceSpaceIdentifier.class, (Object[]) serviceConfigs); // find the copiers Collection copierConfigs = findAmongst(DefaultCopierConfiguration.class, underlyingServiceConfigs); @@ -878,19 +878,19 @@ public Duration getExpiryForUpdate(K key, ValueSupplier> o // force-in a key copier if none is configured if (keyCopierConfig == null) { - underlyingServiceConfigs.add(new DefaultCopierConfiguration((Class) SerializingCopier.class, DefaultCopierConfiguration.Type.KEY)); + underlyingServiceConfigs.add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)); } else { underlyingServiceConfigs.add(keyCopierConfig); } // force-in a value copier if none is configured, or wrap the configured one in a soft lock copier if (valueCopierConfig == null) { - underlyingServiceConfigs.add(new DefaultCopierConfiguration((Class) SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE)); + underlyingServiceConfigs.add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)); } else { CopyProvider copyProvider = serviceProvider.getService(CopyProvider.class); - Copier valueCopier = copyProvider.createValueCopier(storeConfig.getValueType(), storeConfig.getValueSerializer(), valueCopierConfig); - SoftLockValueCombinedCopier softLockValueCombinedCopier = new SoftLockValueCombinedCopier(valueCopier); - underlyingServiceConfigs.add(new DefaultCopierConfiguration((Copier) softLockValueCombinedCopier, DefaultCopierConfiguration.Type.VALUE)); + Copier valueCopier = copyProvider.createValueCopier(storeConfig.getValueType(), storeConfig.getValueSerializer(), valueCopierConfig); + Copier> softLockValueCombinedCopier = new SoftLockValueCombinedCopier(valueCopier); + underlyingServiceConfigs.add(new DefaultCopierConfiguration>(softLockValueCombinedCopier, DefaultCopierConfiguration.Type.VALUE)); } // lookup the required XAStore services @@ -898,13 +898,15 @@ public Duration getExpiryForUpdate(K key, ValueSupplier> o TimeSource timeSource = serviceProvider.getService(TimeSourceService.class).getTimeSource(); // create the soft lock serializer - AtomicReference>> softLockSerializerRef = new AtomicReference>>(); - SoftLockValueCombinedSerializer softLockValueCombinedSerializer = new SoftLockValueCombinedSerializer(softLockSerializerRef, storeConfig.getValueSerializer()); + AtomicReference> softLockSerializerRef = new AtomicReference>(); + SoftLockValueCombinedSerializer softLockValueCombinedSerializer = new SoftLockValueCombinedSerializer(softLockSerializerRef, storeConfig.getValueSerializer()); // create the underlying store - Store.Configuration> underlyingStoreConfig = new StoreConfigurationImpl>(storeConfig.getKeyType(), (Class) SoftLock.class, evictionAdvisor, + @SuppressWarnings("unchecked") + Class> softLockClass = (Class) SoftLock.class; + Store.Configuration> underlyingStoreConfig = new StoreConfigurationImpl>(storeConfig.getKeyType(), softLockClass, evictionAdvisor, storeConfig.getClassLoader(), expiry, storeConfig.getResourcePools(), storeConfig.getDispatcherConcurrency(), storeConfig.getKeySerializer(), softLockValueCombinedSerializer); - Store> underlyingStore = (Store) underlyingStoreProvider.createStore(underlyingStoreConfig, underlyingServiceConfigs.toArray(new ServiceConfiguration[0])); + Store> underlyingStore = underlyingStoreProvider.createStore(underlyingStoreConfig, underlyingServiceConfigs.toArray(new ServiceConfiguration[0])); // create the XA store TransactionManagerWrapper transactionManagerWrapper = transactionManagerProvider.getTransactionManagerWrapper(); @@ -912,8 +914,8 @@ public Duration getExpiryForUpdate(K key, ValueSupplier> o transactionManagerWrapper, timeSource, journal, uniqueXAResourceId); // create the softLockSerializer lifecycle helper - SoftLockValueCombinedSerializerLifecycleHelper helper = - new SoftLockValueCombinedSerializerLifecycleHelper((AtomicReference)softLockSerializerRef, storeConfig.getClassLoader()); + SoftLockValueCombinedSerializerLifecycleHelper helper = + new SoftLockValueCombinedSerializerLifecycleHelper(softLockSerializerRef, storeConfig.getClassLoader()); createdStores.put(store, new CreatedStoreRef(underlyingStoreProvider, helper)); return store; @@ -927,7 +929,7 @@ public void releaseStore(Store resource) { } Store.Provider underlyingStoreProvider = createdStoreRef.storeProvider; - SoftLockValueCombinedSerializerLifecycleHelper helper = createdStoreRef.lifecycleHelper; + SoftLockValueCombinedSerializerLifecycleHelper helper = createdStoreRef.lifecycleHelper; if (resource instanceof XAStore) { XAStore xaStore = (XAStore) resource; @@ -946,6 +948,7 @@ public void releaseStore(Store resource) { } @Override + @SuppressWarnings("unchecked") public void initStore(Store resource) { CreatedStoreRef createdStoreRef = createdStores.get(resource); if (createdStoreRef == null) { @@ -953,7 +956,7 @@ public void initStore(Store resource) { } Store.Provider underlyingStoreProvider = createdStoreRef.storeProvider; - SoftLockValueCombinedSerializerLifecycleHelper helper = createdStoreRef.lifecycleHelper; + SoftLockValueCombinedSerializerLifecycleHelper helper = createdStoreRef.lifecycleHelper; if (resource instanceof XAStore) { XAStore xaStore = (XAStore) resource; diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAValueHolder.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAValueHolder.java index 688257416e..7550b17807 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAValueHolder.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAValueHolder.java @@ -107,7 +107,8 @@ public boolean equals(Object other) { if (this == other) return true; if (other == null || getClass() != other.getClass()) return false; - XAValueHolder that = (XAValueHolder) other; + @SuppressWarnings("unchecked") + XAValueHolder that = (XAValueHolder) other; if (!super.equals(that)) return false; return value.equals(that.value); diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProvider.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProvider.java index 92a6c943ec..43df56ff50 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProvider.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProvider.java @@ -17,11 +17,12 @@ package org.ehcache.transactions.xa.internal.journal; import org.ehcache.CachePersistenceException; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.spi.persistence.PersistableResourceService; +import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.spi.serialization.Serializer; import org.ehcache.core.spi.service.FileBasedPersistenceContext; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.spi.service.Service; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -29,20 +30,21 @@ /** * @author Ludovic Orban */ +@ServiceDependencies(DiskResourceService.class) public class DefaultJournalProvider implements JournalProvider { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultJournalProvider.class); - private volatile LocalPersistenceService persistenceService; + private volatile DiskResourceService diskResourceService; @Override public void start(ServiceProvider serviceProvider) { - this.persistenceService = serviceProvider.getService(LocalPersistenceService.class); + this.diskResourceService = serviceProvider.getService(DiskResourceService.class); } @Override public void stop() { - this.persistenceService = null; + this.diskResourceService = null; } @Override @@ -54,7 +56,7 @@ public Journal getJournal(PersistableResourceService.PersistenceSpaceIden try { LOGGER.info("Using persistent XAStore journal"); - FileBasedPersistenceContext persistenceContext = persistenceService.createPersistenceContextWithin(persistentSpaceId, "XAJournal"); + FileBasedPersistenceContext persistenceContext = diskResourceService.createPersistenceContextWithin(persistentSpaceId, "XAJournal"); return new PersistentJournal(persistenceContext.getDirectory(), keySerializer); } catch (CachePersistenceException cpe) { throw new RuntimeException(cpe); diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/PersistentJournal.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/PersistentJournal.java index efb048d954..2fe690150a 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/PersistentJournal.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/PersistentJournal.java @@ -96,6 +96,7 @@ public void open() throws IOException { boolean valid = ois.readBoolean(); states.clear(); if (valid) { + @SuppressWarnings("unchecked") Map> readStates = (Map>) ois.readObject(); for (Map.Entry> entry : readStates.entrySet()) { SerializableEntry value = entry.getValue(); diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/txmgr/provider/DefaultTransactionManagerProviderFactory.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/txmgr/provider/DefaultTransactionManagerProviderFactory.java index a32412d032..5edb71e685 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/txmgr/provider/DefaultTransactionManagerProviderFactory.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/internal/txmgr/provider/DefaultTransactionManagerProviderFactory.java @@ -27,6 +27,7 @@ * * @see LookupTransactionManagerProvider */ +@ServiceFactory.RequiresConfiguration public class DefaultTransactionManagerProviderFactory implements ServiceFactory { /** diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/xml/TxCacheManagerServiceConfigurationParser.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/xml/TxCacheManagerServiceConfigurationParser.java index fe283f2a82..fcbbd82636 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/xml/TxCacheManagerServiceConfigurationParser.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/internal/xml/TxCacheManagerServiceConfigurationParser.java @@ -57,7 +57,9 @@ public ServiceCreationConfiguration parseServiceCrea try { ClassLoader defaultClassLoader = ClassLoading.getDefaultClassLoader(); Class aClass = Class.forName(transactionManagerProviderConfigurationClassName, true, defaultClassLoader); - return new LookupTransactionManagerProviderConfiguration((Class) aClass); + @SuppressWarnings("unchecked") + Class clazz = (Class) aClass; + return new LookupTransactionManagerProviderConfiguration(clazz); } catch (Exception e) { throw new XmlConfigurationException("Error configuring XA transaction manager", e); } diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProvider.java b/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProvider.java index 636e34bb64..be431d936b 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProvider.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProvider.java @@ -39,7 +39,6 @@ * unless it can be considered a singleton. *

*/ -@ServiceDependencies(XAStore.Provider.class) public class LookupTransactionManagerProvider implements TransactionManagerProvider { private final TransactionManagerLookup lookup; diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProviderConfiguration.java b/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProviderConfiguration.java index ad42cee91d..1b7ea5217e 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProviderConfiguration.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProviderConfiguration.java @@ -25,6 +25,7 @@ public class LookupTransactionManagerProviderConfiguration implements ServiceCre private final Class lookupClass; + @SuppressWarnings("unchecked") public LookupTransactionManagerProviderConfiguration(String className) throws ClassNotFoundException { this.lookupClass = (Class) Class.forName(className); } diff --git a/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java b/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java index b2ab065b52..d2fec36d9d 100644 --- a/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java +++ b/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java @@ -128,6 +128,7 @@ public void testNonTransactionalAccess() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testXACacheWithWriteThrough() throws Exception { // tag::testXACacheWithWriteThrough[] BitronixTransactionManager transactionManager = diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/internal/EhcacheXAResourceTest.java b/transactions/src/test/java/org/ehcache/transactions/xa/internal/EhcacheXAResourceTest.java index e05d6671d4..d4bd899042 100644 --- a/transactions/src/test/java/org/ehcache/transactions/xa/internal/EhcacheXAResourceTest.java +++ b/transactions/src/test/java/org/ehcache/transactions/xa/internal/EhcacheXAResourceTest.java @@ -20,7 +20,10 @@ import org.ehcache.transactions.xa.internal.journal.Journal; import org.ehcache.transactions.xa.utils.TestXid; import org.hamcrest.Matchers; +import org.junit.Before; import org.junit.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; import javax.transaction.xa.XAException; import javax.transaction.xa.XAResource; @@ -47,13 +50,22 @@ */ public class EhcacheXAResourceTest { + @Mock + private Store> underlyingStore; + @Mock + private Journal journal; + @Mock + private XATransactionContextFactory xaTransactionContextFactory; + @Mock + private XATransactionContext xaTransactionContext; + + @Before + public void setUp() { + MockitoAnnotations.initMocks(this); + } + @Test public void testStartEndWorks() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.createTransactionContext(eq(new TransactionId(new TestXid(0, 0))), refEq(underlyingStore), refEq(journal), anyInt())).thenReturn(xaTransactionContext); @@ -71,11 +83,6 @@ public void testStartEndWorks() throws Exception { @Test public void testTwoNonEndedStartsFails() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.createTransactionContext(eq(new TransactionId(new TestXid(0, 0))), refEq(underlyingStore), refEq(journal), anyInt())).thenReturn(xaTransactionContext); @@ -92,10 +99,6 @@ public void testTwoNonEndedStartsFails() throws Exception { @Test public void testEndWithoutStartFails() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); try { @@ -108,11 +111,6 @@ public void testEndWithoutStartFails() throws Exception { @Test public void testJoinWorks() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.createTransactionContext(eq(new TransactionId(new TestXid(0, 0))), refEq(underlyingStore), refEq(journal), anyInt())).thenReturn(xaTransactionContext); @@ -127,10 +125,6 @@ public void testJoinWorks() throws Exception { @Test public void testRecoverReportsAbortedTx() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(journal.recover()).thenReturn(Collections.singletonMap(new TransactionId(new TestXid(0, 0)), (Collection) Arrays.asList(1L, 2L, 3L))); @@ -142,10 +136,6 @@ public void testRecoverReportsAbortedTx() throws Exception { @Test public void testRecoverIgnoresInFlightTx() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(journal.recover()).thenReturn(Collections.singletonMap(new TransactionId(new TestXid(0, 0)), (Collection) Arrays.asList(1L, 2L, 3L))); @@ -157,10 +147,6 @@ public void testRecoverIgnoresInFlightTx() throws Exception { @Test public void testCannotPrepareUnknownXid() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); try { @@ -173,11 +159,6 @@ public void testCannotPrepareUnknownXid() throws Exception { @Test public void testCannotPrepareNonEndedXid() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.createTransactionContext(eq(new TransactionId(new TestXid(0, 0))), refEq(underlyingStore), refEq(journal), anyInt())).thenReturn(xaTransactionContext); @@ -193,11 +174,6 @@ public void testCannotPrepareNonEndedXid() throws Exception { @Test public void testPrepareOk() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.get(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(xaTransactionContext); @@ -211,11 +187,6 @@ public void testPrepareOk() throws Exception { @Test public void testPrepareReadOnly() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.get(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(xaTransactionContext); @@ -229,11 +200,6 @@ public void testPrepareReadOnly() throws Exception { @Test public void testCannotCommitUnknownXidInFlight() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(false); @@ -250,10 +216,6 @@ public void testCannotCommitUnknownXidInFlight() throws Exception { @Test public void testCannotCommitUnknownXidRecovered() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(false); @@ -268,10 +230,6 @@ public void testCannotCommitUnknownXidRecovered() throws Exception { @Test public void testCannotCommit1PcUnknownXid() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); try { @@ -284,11 +242,6 @@ public void testCannotCommit1PcUnknownXid() throws Exception { @Test public void testCannotCommit1PcNonEndedXid() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.createTransactionContext(eq(new TransactionId(new TestXid(0, 0))), refEq(underlyingStore), refEq(journal), anyInt())).thenReturn(xaTransactionContext); @@ -304,11 +257,6 @@ public void testCannotCommit1PcNonEndedXid() throws Exception { @Test public void testCannotCommitNonPreparedXid() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.get(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(xaTransactionContext); @@ -323,11 +271,6 @@ public void testCannotCommitNonPreparedXid() throws Exception { @Test public void testCannotCommit1PcPreparedXid() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.get(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(xaTransactionContext); @@ -342,11 +285,6 @@ public void testCannotCommit1PcPreparedXid() throws Exception { @Test public void testCommit() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.get(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(xaTransactionContext); @@ -357,11 +295,6 @@ public void testCommit() throws Exception { @Test public void testCommit1Pc() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.get(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(xaTransactionContext); @@ -372,11 +305,6 @@ public void testCommit1Pc() throws Exception { @Test public void testCannotRollbackUnknownXidInFlight() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.get(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(xaTransactionContext); @@ -392,10 +320,6 @@ public void testCannotRollbackUnknownXidInFlight() throws Exception { @Test public void testCannotRollbackUnknownXidRecovered() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(false); @@ -410,11 +334,6 @@ public void testCannotRollbackUnknownXidRecovered() throws Exception { @Test public void testCannotRollbackNonEndedXid() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.createTransactionContext(eq(new TransactionId(new TestXid(0, 0))), refEq(underlyingStore), refEq(journal), anyInt())).thenReturn(xaTransactionContext); @@ -430,11 +349,6 @@ public void testCannotRollbackNonEndedXid() throws Exception { @Test public void testRollback() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.get(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(xaTransactionContext); @@ -445,10 +359,6 @@ public void testRollback() throws Exception { @Test public void testForgetUnknownXid() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(false); @@ -463,10 +373,6 @@ public void testForgetUnknownXid() throws Exception { @Test public void testForgetInDoubtXid() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(true); @@ -481,10 +387,6 @@ public void testForgetInDoubtXid() throws Exception { @Test public void testForget() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(journal.isHeuristicallyTerminated(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(true); @@ -496,11 +398,6 @@ public void testForget() throws Exception { @Test public void testTimeoutStart() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.createTransactionContext(eq(new TransactionId(new TestXid(0, 0))), refEq(underlyingStore), refEq(journal), anyInt())).thenReturn(xaTransactionContext); @@ -519,11 +416,6 @@ public void testTimeoutStart() throws Exception { @Test public void testTimeoutEndSuccess() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.createTransactionContext(eq(new TransactionId(new TestXid(0, 0))), refEq(underlyingStore), refEq(journal), anyInt())).thenReturn(xaTransactionContext); @@ -545,11 +437,6 @@ public void testTimeoutEndSuccess() throws Exception { @Test public void testTimeoutEndFail() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.createTransactionContext(eq(new TransactionId(new TestXid(0, 0))), refEq(underlyingStore), refEq(journal), anyInt())).thenReturn(xaTransactionContext); @@ -570,12 +457,8 @@ public void testTimeoutEndFail() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testPrepareTimeout() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.get(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(xaTransactionContext); @@ -593,11 +476,6 @@ public void testPrepareTimeout() throws Exception { @Test public void testCommit1PcTimeout() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.get(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(xaTransactionContext); @@ -615,10 +493,6 @@ public void testCommit1PcTimeout() throws Exception { @Test public void testRecoveryCommitOnePhaseFails() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(journal.recover()).thenReturn(Collections.singletonMap(new TransactionId(new TestXid(0, 0)), (Collection) Arrays.asList(1L, 2L, 3L))); @@ -639,10 +513,6 @@ public void testRecoveryCommitOnePhaseFails() throws Exception { @Test public void testRecoveryCommit() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(journal.recover()).thenReturn(Collections.singletonMap(new TransactionId(new TestXid(0, 0)), (Collection) Arrays.asList(1L, 2L, 3L))); @@ -662,10 +532,6 @@ public void testRecoveryCommit() throws Exception { @Test public void testRecoveryRollback() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(true); diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/internal/UnSupportedResourceTypeTest.java b/transactions/src/test/java/org/ehcache/transactions/xa/internal/UnSupportedResourceTypeTest.java index acdc433304..04d88dd867 100644 --- a/transactions/src/test/java/org/ehcache/transactions/xa/internal/UnSupportedResourceTypeTest.java +++ b/transactions/src/test/java/org/ehcache/transactions/xa/internal/UnSupportedResourceTypeTest.java @@ -19,6 +19,7 @@ import org.ehcache.config.ResourcePools; import org.ehcache.config.ResourceType; import org.ehcache.core.spi.store.Store; +import org.ehcache.spi.service.ServiceConfiguration; import org.junit.Test; import java.util.HashSet; @@ -36,7 +37,8 @@ public class UnSupportedResourceTypeTest { public void testUnSupportedResourceType() { XAStore.Provider provider = new XAStore.Provider(); - Store.Configuration configuration = mock(Store.Configuration.class); + @SuppressWarnings("unchecked") + Store.Configuration configuration = mock(Store.Configuration.class); ResourcePools resourcePools = mock(ResourcePools.class); Set> resourceTypes = new HashSet>(); @@ -46,7 +48,7 @@ public void testUnSupportedResourceType() { when(resourcePools.getResourceTypeSet()).thenReturn(resourceTypes); try { - provider.createStore(configuration, null); + provider.createStore(configuration, (ServiceConfiguration) null); fail("IllegalStateException expected"); } catch (IllegalStateException e) { diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreTest.java b/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreTest.java index af8b4fe871..df914d04b0 100644 --- a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreTest.java +++ b/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreTest.java @@ -27,6 +27,7 @@ import org.ehcache.core.internal.store.StoreConfigurationImpl; import org.ehcache.core.events.StoreEventDispatcher; import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.core.spi.store.Store; import org.ehcache.expiry.Duration; import org.ehcache.expiry.Expirations; @@ -38,7 +39,6 @@ import org.ehcache.impl.internal.events.NullStoreEventDispatcher; import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; import org.ehcache.impl.internal.spi.copy.DefaultCopyProvider; -import org.ehcache.impl.internal.store.disk.OffHeapDiskStore; import org.ehcache.impl.internal.store.heap.OnHeapStore; import org.ehcache.impl.internal.store.offheap.MemorySizeParser; import org.ehcache.impl.internal.store.offheap.OffHeapStore; @@ -57,12 +57,10 @@ import org.ehcache.transactions.xa.internal.journal.TransientJournal; import org.ehcache.transactions.xa.internal.txmgr.NullXAResourceRegistry; import org.ehcache.transactions.xa.txmgr.TransactionManagerWrapper; -import org.ehcache.transactions.xa.txmgr.btm.BitronixTransactionManagerLookup; -import org.ehcache.transactions.xa.txmgr.provider.LookupTransactionManagerProvider; -import org.ehcache.transactions.xa.txmgr.provider.LookupTransactionManagerProviderConfiguration; import org.ehcache.transactions.xa.txmgr.provider.TransactionManagerProvider; import org.ehcache.transactions.xa.utils.JavaSerializer; import org.ehcache.transactions.xa.utils.TestXid; +import org.junit.Before; import org.junit.Test; import java.util.Arrays; @@ -95,6 +93,9 @@ import javax.transaction.xa.XAResource; import static java.util.Collections.emptySet; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.ehcache.expiry.Duration.of; +import static org.ehcache.expiry.Expirations.timeToLiveExpiration; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -109,7 +110,38 @@ */ public class XAStoreTest { + @SuppressWarnings("unchecked") + private final Class> valueClass = (Class) SoftLock.class; private final TestTransactionManager testTransactionManager = new TestTransactionManager(); + private TransactionManagerWrapper transactionManagerWrapper; + private OnHeapStore> onHeapStore; + private Journal journal; + private TestTimeSource testTimeSource; + private ClassLoader classLoader; + private Serializer keySerializer; + private Serializer> valueSerializer; + private StoreEventDispatcher> eventDispatcher; + private final Expiry expiry = timeToLiveExpiration(of(1, TimeUnit.SECONDS)); + private Copier keyCopier; + private Copier> valueCopier; + + @Before + public void setUp() { + transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); + classLoader = ClassLoader.getSystemClassLoader(); + keySerializer = new JavaSerializer(classLoader); + valueSerializer = new JavaSerializer>(classLoader); + CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); + keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); + valueCopier = copyProvider.createValueCopier(valueClass, valueSerializer); + Store.Configuration> onHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, + null, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), + 0, keySerializer, valueSerializer); + testTimeSource = new TestTimeSource(); + eventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); + onHeapStore = new OnHeapStore>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); + journal = new TransientJournal(); + } @Test public void testXAStoreProviderFailsToRankWhenNoTMProviderConfigured() throws Exception { @@ -137,19 +169,6 @@ public Collection getServicesOfType(Class serviceType) @Test public void testSimpleGetPutRemove() throws Exception { String uniqueXAResourceId = "testSimpleGetPutRemove"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, - null, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - Journal journal = new TransientJournal(); XAStore xaStore = new XAStore(Long.class, String.class, onHeapStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); @@ -204,20 +223,6 @@ public void testSimpleGetPutRemove() throws Exception { @Test public void testConflictingGetPutRemove() throws Exception { String uniqueXAResourceId = "testConflictingGetPutRemove"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, - null, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - Journal journal = new TransientJournal(); - final XAStore xaStore = new XAStore(Long.class, String.class, onHeapStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); final AtomicReference exception = new AtomicReference(); @@ -335,20 +340,6 @@ public void run() { @Test public void testIterate() throws Exception { String uniqueXAResourceId = "testIterate"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, - null, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - Journal journal = new TransientJournal(); - XAStore xaStore = new XAStore(Long.class, String.class, onHeapStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); testTransactionManager.begin(); @@ -438,20 +429,6 @@ public void testIterate() throws Exception { @Test public void testPutIfAbsent() throws Exception { String uniqueXAResourceId = "testPutIfAbsent"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, - null, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - Journal journal = new TransientJournal(); - final XAStore xaStore = new XAStore(Long.class, String.class, onHeapStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); final AtomicReference exception = new AtomicReference(); @@ -501,20 +478,6 @@ public Object call() throws Exception { @Test public void testRemove2Args() throws Exception { String uniqueXAResourceId = "testRemove2Args"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, - null, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - Journal journal = new TransientJournal(); - final XAStore xaStore = new XAStore(Long.class, String.class, onHeapStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); final AtomicReference exception = new AtomicReference(); @@ -600,20 +563,6 @@ public Object call() throws Exception { @Test public void testReplace2Args() throws Exception { String uniqueXAResourceId = "testReplace2Args"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, - null, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - Journal journal = new TransientJournal(); - final XAStore xaStore = new XAStore(Long.class, String.class, onHeapStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); final AtomicReference exception = new AtomicReference(); @@ -698,20 +647,6 @@ public Object call() throws Exception { @Test public void testReplace3Args() throws Exception { String uniqueXAResourceId = "testReplace3Args"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, - null, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - Journal journal = new TransientJournal(); - final XAStore xaStore = new XAStore(Long.class, String.class, onHeapStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); final AtomicReference exception = new AtomicReference(); @@ -794,23 +729,10 @@ public Object call() throws Exception { @Test public void testCompute() throws Exception { String uniqueXAResourceId = "testCompute"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, - classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - StoreEventDispatcher eventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - Store.Configuration offHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, + Store.Configuration> offHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = (OffHeapStore) new OffHeapStore(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); + OffHeapStore> offHeapStore = new OffHeapStore>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); OffHeapStoreLifecycleHelper.init(offHeapStore); TieredStore> tieredStore = new TieredStore>(onHeapStore, offHeapStore); Journal journal = new TransientJournal(); @@ -1034,23 +956,10 @@ public String apply(Long aLong, String s) { @Test public void testComputeIfAbsent() throws Exception { String uniqueXAResourceId = "testComputeIfAbsent"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, - classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - StoreEventDispatcher eventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - Store.Configuration offHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + Store.Configuration> offHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = (OffHeapStore) new OffHeapStore(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); + OffHeapStore> offHeapStore = new OffHeapStore>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); OffHeapStoreLifecycleHelper.init(offHeapStore); TieredStore> tieredStore = new TieredStore>(onHeapStore, offHeapStore); Journal journal = new TransientJournal(); @@ -1112,24 +1021,14 @@ public String apply(Long aLong) { @Test public void testExpiry() throws Exception { String uniqueXAResourceId = "testExpiry"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); - Expiry expiry = Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.SECONDS)); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, - classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), + Store.Configuration> onHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, + null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - StoreEventDispatcher eventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - Store.Configuration offHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + onHeapStore = new OnHeapStore>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); + Store.Configuration> offHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = (OffHeapStore) new OffHeapStore(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); + OffHeapStore> offHeapStore = new OffHeapStore>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); OffHeapStoreLifecycleHelper.init(offHeapStore); TieredStore> tieredStore = new TieredStore>(onHeapStore, offHeapStore); Journal journal = new TransientJournal(); @@ -1154,13 +1053,6 @@ public void testExpiry() throws Exception { @Test public void testExpiryCreateException() throws Exception { String uniqueXAResourceId = "testExpiryCreateException"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); Expiry expiry = new Expiry() { @Override @@ -1178,19 +1070,16 @@ public Duration getExpiryForUpdate(Object key, ValueSupplier o throw new AssertionError(); } }; - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + Store.Configuration> onHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - StoreEventDispatcher eventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - Store.Configuration offHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + OnHeapStore> onHeapStore = new OnHeapStore>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); + Store.Configuration> offHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = (OffHeapStore) new OffHeapStore(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); + OffHeapStore> offHeapStore = new OffHeapStore>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); OffHeapStoreLifecycleHelper.init(offHeapStore); TieredStore> tieredStore = new TieredStore>(onHeapStore, offHeapStore); - Journal journal = new TransientJournal(); XAStore xaStore = new XAStore(Long.class, String.class, tieredStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); @@ -1203,14 +1092,6 @@ public Duration getExpiryForUpdate(Object key, ValueSupplier o @Test public void testExpiryAccessException() throws Exception { String uniqueXAResourceId = "testExpiryAccessException"; - final TestTimeSource testTimeSource = new TestTimeSource(); - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); Expiry expiry = new Expiry() { @Override @@ -1231,18 +1112,16 @@ public Duration getExpiryForUpdate(Object key, ValueSupplier o return Duration.INFINITE; } }; - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + Store.Configuration> onHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), 0, keySerializer, valueSerializer); - StoreEventDispatcher eventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - Store.Configuration offHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + OnHeapStore> onHeapStore = new OnHeapStore>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); + Store.Configuration> offHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = (OffHeapStore) new OffHeapStore(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); + OffHeapStore> offHeapStore = new OffHeapStore>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); OffHeapStoreLifecycleHelper.init(offHeapStore); TieredStore> tieredStore = new TieredStore>(onHeapStore, offHeapStore); - Journal journal = new TransientJournal(); XAStore xaStore = new XAStore(Long.class, String.class, tieredStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); @@ -1263,14 +1142,6 @@ public Duration getExpiryForUpdate(Object key, ValueSupplier o @Test public void testExpiryUpdateException() throws Exception{ String uniqueXAResourceId = "testExpiryUpdateException"; - final TestTimeSource testTimeSource = new TestTimeSource(); - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); Expiry expiry = new Expiry() { @Override @@ -1291,18 +1162,16 @@ public Duration getExpiryForUpdate(Object key, ValueSupplier o return Duration.INFINITE; } }; - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + Store.Configuration> onHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), 0, keySerializer, valueSerializer); - StoreEventDispatcher eventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - Store.Configuration offHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + OnHeapStore> onHeapStore = new OnHeapStore>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); + Store.Configuration> offHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = (OffHeapStore) new OffHeapStore(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); + OffHeapStore> offHeapStore = new OffHeapStore>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); OffHeapStoreLifecycleHelper.init(offHeapStore); TieredStore> tieredStore = new TieredStore>(onHeapStore, offHeapStore); - Journal journal = new TransientJournal(); XAStore xaStore = new XAStore(Long.class, String.class, tieredStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); @@ -1321,27 +1190,17 @@ public Duration getExpiryForUpdate(Object key, ValueSupplier o @Test public void testBulkCompute() throws Exception { String uniqueXAResourceId = "testBulkCompute"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); Expiry expiry = Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.SECONDS)); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + Store.Configuration> onHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - StoreEventDispatcher eventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - Store.Configuration offHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + OnHeapStore> onHeapStore = new OnHeapStore>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); + Store.Configuration> offHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = (OffHeapStore) new OffHeapStore(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); + OffHeapStore> offHeapStore = new OffHeapStore>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); OffHeapStoreLifecycleHelper.init(offHeapStore); TieredStore> tieredStore = new TieredStore>(onHeapStore, offHeapStore); - Journal journal = new TransientJournal(); XAStore xaStore = new XAStore(Long.class, String.class, tieredStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); @@ -1413,27 +1272,17 @@ public void testBulkCompute() throws Exception { @Test public void testBulkComputeIfAbsent() throws Exception { String uniqueXAResourceId = "testBulkComputeIfAbsent"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); Expiry expiry = Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.SECONDS)); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + Store.Configuration> onHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - StoreEventDispatcher eventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - Store.Configuration offHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + OnHeapStore> onHeapStore = new OnHeapStore>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); + Store.Configuration> offHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = (OffHeapStore) new OffHeapStore(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); + OffHeapStore> offHeapStore = new OffHeapStore>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); OffHeapStoreLifecycleHelper.init(offHeapStore); TieredStore> tieredStore = new TieredStore>(onHeapStore, offHeapStore); - Journal journal = new TransientJournal(); XAStore xaStore = new XAStore(Long.class, String.class, tieredStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); @@ -1493,14 +1342,6 @@ public void testBulkComputeIfAbsent() throws Exception { @Test public void testCustomEvictionAdvisor() throws Exception { String uniqueXAResourceId = "testCustomEvictionAdvisor"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); - final AtomicBoolean invoked = new AtomicBoolean(); EvictionAdvisor evictionAdvisor = new EvictionAdvisor() { @@ -1510,14 +1351,12 @@ public boolean adviseAgainstEviction(Long key, SoftLock value) { return false; } }; - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, + Store.Configuration> onHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, evictionAdvisor, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder() .heap(10, EntryUnit.ENTRIES) .build(), 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - Journal journal = new TransientJournal(); + OnHeapStore> onHeapStore = new OnHeapStore>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); final XAStore xaStore = new XAStore(Long.class, String.class, onHeapStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); @@ -1540,13 +1379,11 @@ public boolean adviseAgainstEviction(Long key, SoftLock value) { public void testRank() throws Exception { XAStore.Provider provider = new XAStore.Provider(); XAStoreConfiguration configuration = new XAStoreConfiguration("testXAResourceId"); - ServiceLocator serviceLocator = new ServiceLocator( - provider, - new TieredStore.Provider(), - new OnHeapStore.Provider(), - new OffHeapStore.Provider(), - new OffHeapDiskStore.Provider(), - mock(TransactionManagerProvider.class)); + ServiceLocator serviceLocator = dependencySet() + .with(provider) + .with(Store.Provider.class) + .with(mock(DiskResourceService.class)) + .with(mock(TransactionManagerProvider.class)).build(); serviceLocator.startAllServices(); diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XATransactionContextTest.java b/transactions/src/test/java/org/ehcache/transactions/xa/internal/XATransactionContextTest.java index ce46db5a07..9527aad8dc 100644 --- a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XATransactionContextTest.java +++ b/transactions/src/test/java/org/ehcache/transactions/xa/internal/XATransactionContextTest.java @@ -30,7 +30,10 @@ import org.hamcrest.Description; import org.hamcrest.Matcher; import org.junit.Assert; +import org.junit.Before; import org.junit.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -64,6 +67,16 @@ */ public class XATransactionContextTest { + @Mock + private Store> underlyingStore; + @Mock + private Journal journal; + + @Before + public void setUp() { + MockitoAnnotations.initMocks(this); + } + @Test public void testSimpleCommands() throws Exception { TestTimeSource timeSource = new TestTimeSource(); @@ -196,8 +209,6 @@ public void testHasTimedOut() throws Exception { @Test public void testPrepareReadOnly() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -210,9 +221,8 @@ public void testPrepareReadOnly() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testPrepare() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -256,8 +266,6 @@ public Object answer(InvocationOnMock invocation) throws Throwable { @Test public void testCommitNotPreparedInFlightThrows() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -265,6 +273,7 @@ public void testCommitNotPreparedInFlightThrows() throws Exception { xaTransactionContext.addCommand(1L, new StorePutCommand("one", new XAValueHolder("un", timeSource.getTimeMillis()))); xaTransactionContext.addCommand(2L, new StorePutCommand("two", new XAValueHolder("deux", timeSource.getTimeMillis()))); + @SuppressWarnings("unchecked") Store.ValueHolder> mockValueHolder = mock(Store.ValueHolder.class); when(mockValueHolder.value()).thenReturn(new SoftLock(null, "two", null)); when(underlyingStore.get(eq(2L))).thenReturn(mockValueHolder); @@ -278,9 +287,8 @@ public void testCommitNotPreparedInFlightThrows() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testCommit() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -320,8 +328,6 @@ public void testCommit() throws Exception { @Test public void testCommitInOnePhasePreparedThrows() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -337,9 +343,8 @@ public void testCommitInOnePhasePreparedThrows() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testCommitInOnePhase() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -452,8 +457,6 @@ public Object answer(InvocationOnMock invocation) throws Throwable { @Test public void testRollbackPhase1() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -467,9 +470,8 @@ public void testRollbackPhase1() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testRollbackPhase2() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); final TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -512,8 +514,6 @@ public SoftLock value() { @Test public void testCommitInOnePhaseTimeout() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); final TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -533,8 +533,6 @@ public void testCommitInOnePhaseTimeout() throws Exception { @Test public void testPrepareTimeout() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); final TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -553,9 +551,8 @@ public void testPrepareTimeout() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testCommitConflictsEvicts() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); final TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -595,9 +592,8 @@ public SoftLock value() { } @Test + @SuppressWarnings("unchecked") public void testPrepareConflictsEvicts() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); final TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -616,9 +612,8 @@ public void testPrepareConflictsEvicts() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testRollbackConflictsEvicts() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); final TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAValueHolderTest.java b/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAValueHolderTest.java index e4bf078b95..8d6effd3e1 100644 --- a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAValueHolderTest.java +++ b/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAValueHolderTest.java @@ -45,6 +45,7 @@ public void testSerialization() throws Exception { outputStream.writeObject(valueHolder); outputStream.close(); + @SuppressWarnings("unchecked") XAValueHolder result = (XAValueHolder) new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray())).readObject(); assertThat(result.getId(), is(valueHolder.getId())); @@ -54,4 +55,4 @@ public void testSerialization() throws Exception { assertThat(result.value(), is(valueHolder.value())); assertThat(result.hits(), is(valueHolder.hits())); } -} \ No newline at end of file +} diff --git a/xml/src/main/java/org/ehcache/xml/ConfigurationParser.java b/xml/src/main/java/org/ehcache/xml/ConfigurationParser.java index 13ce28d981..e7af8e9db2 100644 --- a/xml/src/main/java/org/ehcache/xml/ConfigurationParser.java +++ b/xml/src/main/java/org/ehcache/xml/ConfigurationParser.java @@ -52,6 +52,9 @@ import org.ehcache.spi.service.ServiceCreationConfiguration; import org.ehcache.core.internal.util.ClassLoading; import org.w3c.dom.Element; +import org.w3c.dom.NamedNodeMap; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; import org.xml.sax.ErrorHandler; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; @@ -78,8 +81,12 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Properties; import java.util.Set; +import java.util.Stack; import java.util.concurrent.TimeUnit; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import org.ehcache.xml.model.ThreadPoolReferenceType; import org.ehcache.xml.model.ThreadPoolsType; @@ -92,6 +99,7 @@ */ class ConfigurationParser { + private static final Pattern SYSPROP = Pattern.compile("\\$\\{([^}]+)\\}"); private static final SchemaFactory XSD_SCHEMA_FACTORY = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); private static final URL CORE_SCHEMA_URL = XmlConfiguration.class.getResource("/ehcache-core.xsd"); @@ -105,6 +113,23 @@ class ConfigurationParser { private final Map resourceXmlParsers = new HashMap(); private final ConfigType config; + static String replaceProperties(String originalValue, final Properties properties) { + Matcher matcher = SYSPROP.matcher(originalValue); + + StringBuffer sb = new StringBuffer(); + while (matcher.find()) { + final String property = matcher.group(1); + final String value = properties.getProperty(property); + if (value == null) { + throw new IllegalStateException(String.format("Replacement for ${%s} not found!", property)); + } + matcher.appendReplacement(sb, Matcher.quoteReplacement(value)); + } + matcher.appendTail(sb); + final String resolvedValue = sb.toString(); + return resolvedValue.equals(originalValue) ? null : resolvedValue; + } + public ConfigurationParser(String xml) throws IOException, SAXException, JAXBException, ParserConfigurationException { Collection schemaSources = new ArrayList(); schemaSources.add(new StreamSource(CORE_SCHEMA_URL.openStream())); @@ -132,6 +157,9 @@ public ConfigurationParser(String xml) throws IOException, SAXException, JAXBExc DocumentBuilder domBuilder = factory.newDocumentBuilder(); domBuilder.setErrorHandler(new FatalErrorHandler()); Element dom = domBuilder.parse(xml).getDocumentElement(); + + substituteSystemProperties(dom); + if (!CORE_SCHEMA_ROOT_ELEMENT.equals(dom.getLocalName()) || !CORE_SCHEMA_NAMESPACE.equals(dom.getNamespaceURI())) { throw new XmlConfigurationException("Expecting {" + CORE_SCHEMA_NAMESPACE + "}" + CORE_SCHEMA_ROOT_ELEMENT + " element; found {" + dom.getNamespaceURI() + "}" + dom.getLocalName()); @@ -143,6 +171,37 @@ public ConfigurationParser(String xml) throws IOException, SAXException, JAXBExc this.config = unmarshaller.unmarshal(dom, configTypeClass).getValue(); } + private void substituteSystemProperties(final Element dom) { + final Properties properties = System.getProperties(); + Stack nodeLists = new Stack(); + nodeLists.push(dom.getChildNodes()); + while (!nodeLists.isEmpty()) { + NodeList nodeList = nodeLists.pop(); + for (int i = 0; i < nodeList.getLength(); ++i) { + Node currentNode = nodeList.item(i); + if (currentNode.hasChildNodes()) { + nodeLists.push(currentNode.getChildNodes()); + } + final NamedNodeMap attributes = currentNode.getAttributes(); + if (attributes != null) { + for (int j = 0; j < attributes.getLength(); ++j) { + final Node attributeNode = attributes.item(j); + final String newValue = replaceProperties(attributeNode.getNodeValue(), properties); + if (newValue != null) { + attributeNode.setNodeValue(newValue); + } + } + } + if (currentNode.getNodeType() == Node.TEXT_NODE) { + final String newValue = replaceProperties(currentNode.getNodeValue(), properties); + if (newValue != null) { + currentNode.setNodeValue(newValue); + } + } + } + } + } + public Iterable getServiceElements() { return config.getService(); } diff --git a/xml/src/main/java/org/ehcache/xml/XmlConfiguration.java b/xml/src/main/java/org/ehcache/xml/XmlConfiguration.java index cad0290dba..facfba4500 100644 --- a/xml/src/main/java/org/ehcache/xml/XmlConfiguration.java +++ b/xml/src/main/java/org/ehcache/xml/XmlConfiguration.java @@ -325,6 +325,7 @@ private void parseConfiguration() templates.putAll(configurationParser.getTemplates()); } + @SuppressWarnings("unchecked") private Expiry getExpiry(ClassLoader cacheClassLoader, ConfigurationParser.Expiry parsedExpiry) throws ClassNotFoundException, InstantiationException, IllegalAccessException { final Expiry expiry; @@ -459,6 +460,7 @@ public CacheConfigurationBuilder newCacheConfigurationBuilderFromTe return internalCacheConfigurationBuilderFromTemplate(name, keyType, valueType, resourcePoolsBuilder.build()); } + @SuppressWarnings("unchecked") private CacheConfigurationBuilder internalCacheConfigurationBuilderFromTemplate(final String name, final Class keyType, final Class valueType, @@ -554,6 +556,7 @@ private CacheConfigurationBuilder handleListenersConfig(Configurati } if (listenersConfig.listeners() != null) { for (ConfigurationParser.Listener listener : listenersConfig.listeners()) { + @SuppressWarnings("unchecked") final Class> cacheEventListenerClass = (Class>)getClassForName(listener.className(), defaultClassLoader); final List eventListToFireOn = listener.fireOn(); Set eventSetToFireOn = new HashSet(); diff --git a/xml/src/test/java/com/pany/ehcache/integration/TestCacheEventListener.java b/xml/src/test/java/com/pany/ehcache/integration/TestCacheEventListener.java index 0ec20efd59..cab91b7183 100644 --- a/xml/src/test/java/com/pany/ehcache/integration/TestCacheEventListener.java +++ b/xml/src/test/java/com/pany/ehcache/integration/TestCacheEventListener.java @@ -24,9 +24,9 @@ */ public class TestCacheEventListener implements CacheEventListener { - public static CacheEvent FIRED_EVENT; + public static CacheEvent FIRED_EVENT; @Override - public void onEvent(CacheEvent event) { + public void onEvent(CacheEvent event) { FIRED_EVENT = event; } -} \ No newline at end of file +} diff --git a/xml/src/test/java/com/pany/ehcache/integration/TestSecondCacheEventListener.java b/xml/src/test/java/com/pany/ehcache/integration/TestSecondCacheEventListener.java index 4b4651cbcd..a041328b4b 100644 --- a/xml/src/test/java/com/pany/ehcache/integration/TestSecondCacheEventListener.java +++ b/xml/src/test/java/com/pany/ehcache/integration/TestSecondCacheEventListener.java @@ -24,9 +24,9 @@ */ public class TestSecondCacheEventListener implements CacheEventListener { - public static CacheEvent SECOND_LISTENER_FIRED_EVENT; + public static CacheEvent SECOND_LISTENER_FIRED_EVENT; @Override - public void onEvent(CacheEvent event) { + public void onEvent(CacheEvent event) { SECOND_LISTENER_FIRED_EVENT = event; } -} \ No newline at end of file +} diff --git a/xml/src/test/java/com/pany/ehcache/serializer/TestSerializer.java b/xml/src/test/java/com/pany/ehcache/serializer/TestSerializer.java index 5376affb75..fcafefabc7 100644 --- a/xml/src/test/java/com/pany/ehcache/serializer/TestSerializer.java +++ b/xml/src/test/java/com/pany/ehcache/serializer/TestSerializer.java @@ -15,6 +15,7 @@ */ package com.pany.ehcache.serializer; +import org.ehcache.impl.serialization.TransientStateRepository; import org.ehcache.spi.serialization.SerializerException; import org.ehcache.impl.serialization.CompactJavaSerializer; import org.ehcache.spi.serialization.Serializer; @@ -26,7 +27,9 @@ public class TestSerializer implements Serializer { private final Serializer serializer; public TestSerializer(ClassLoader classLoader) { - serializer = new CompactJavaSerializer(classLoader); + CompactJavaSerializer compactJavaSerializer = new CompactJavaSerializer(classLoader); + compactJavaSerializer.init(new TransientStateRepository()); + serializer = compactJavaSerializer; } @Override diff --git a/xml/src/test/java/org/ehcache/docs/GettingStarted.java b/xml/src/test/java/org/ehcache/docs/GettingStarted.java index 2ebce8a713..62632740b2 100644 --- a/xml/src/test/java/org/ehcache/docs/GettingStarted.java +++ b/xml/src/test/java/org/ehcache/docs/GettingStarted.java @@ -36,6 +36,7 @@ public void xmlConfigSample() throws Exception { final URL myUrl = getClass().getResource("/configs/docs/getting-started.xml"); // <1> XmlConfiguration xmlConfig = new XmlConfiguration(myUrl); // <2> CacheManager myCacheManager = CacheManagerBuilder.newCacheManager(xmlConfig); // <3> + myCacheManager.init(); // <4> // end::xmlConfig[] } diff --git a/xml/src/test/java/org/ehcache/xml/XmlConfigurationTest.java b/xml/src/test/java/org/ehcache/xml/XmlConfigurationTest.java index 209728a190..90e86a9afd 100644 --- a/xml/src/test/java/org/ehcache/xml/XmlConfigurationTest.java +++ b/xml/src/test/java/org/ehcache/xml/XmlConfigurationTest.java @@ -22,6 +22,11 @@ import org.ehcache.config.ResourceUnit; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.internal.util.ClassLoading; +import org.ehcache.expiry.Duration; +import org.ehcache.expiry.Expirations; +import org.ehcache.expiry.Expiry; import org.ehcache.impl.config.copy.DefaultCopierConfiguration; import org.ehcache.impl.config.copy.DefaultCopyProviderConfiguration; import org.ehcache.impl.config.event.DefaultCacheEventListenerConfiguration; @@ -31,20 +36,15 @@ import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration; import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration; import org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; -import org.ehcache.impl.copy.SerializingCopier; import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration; import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineProviderConfiguration; +import org.ehcache.impl.copy.SerializingCopier; import org.ehcache.spi.copy.Copier; import org.ehcache.spi.loaderwriter.WriteBehindConfiguration; import org.ehcache.spi.loaderwriter.WriteBehindConfiguration.BatchingConfiguration; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceCreationConfiguration; -import org.ehcache.core.internal.util.ClassLoading; import org.ehcache.xml.exceptions.XmlConfigurationException; import org.hamcrest.CoreMatchers; import org.hamcrest.Matcher; @@ -371,11 +371,11 @@ public void testDefaultSerializerConfiguration() throws Exception { assertThat(configuration, instanceOf(DefaultSerializationProviderConfiguration.class)); DefaultSerializationProviderConfiguration factoryConfiguration = (DefaultSerializationProviderConfiguration) configuration; - assertThat(factoryConfiguration.getTransientSerializers().size(), is(4)); - assertThat(factoryConfiguration.getTransientSerializers().get(CharSequence.class), Matchers.>equalTo(TestSerializer.class)); - assertThat(factoryConfiguration.getTransientSerializers().get(Number.class), Matchers.>equalTo(TestSerializer2.class)); - assertThat(factoryConfiguration.getTransientSerializers().get(Long.class), Matchers.>equalTo(TestSerializer3.class)); - assertThat(factoryConfiguration.getTransientSerializers().get(Integer.class), Matchers.>equalTo(TestSerializer4.class)); + assertThat(factoryConfiguration.getDefaultSerializers().size(), is(4)); + assertThat(factoryConfiguration.getDefaultSerializers().get(CharSequence.class), Matchers.>equalTo(TestSerializer.class)); + assertThat(factoryConfiguration.getDefaultSerializers().get(Number.class), Matchers.>equalTo(TestSerializer2.class)); + assertThat(factoryConfiguration.getDefaultSerializers().get(Long.class), Matchers.>equalTo(TestSerializer3.class)); + assertThat(factoryConfiguration.getDefaultSerializers().get(Integer.class), Matchers.>equalTo(TestSerializer4.class)); List> orderedServiceConfigurations = new ArrayList>(xmlConfig.getCacheConfigurations().get("baz").getServiceConfigurations()); @@ -677,6 +677,30 @@ public void testCustomResource() throws Exception { } } + @Test + public void testSysPropReplace() { + System.getProperties().setProperty("ehcache.match", Number.class.getName()); + XmlConfiguration xmlConfig = new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/systemprops.xml")); + + assertThat(xmlConfig.getCacheConfigurations().get("bar").getKeyType(), sameInstance((Class)Number.class)); + + DefaultPersistenceConfiguration persistenceConfiguration = (DefaultPersistenceConfiguration)xmlConfig.getServiceCreationConfigurations().iterator().next(); + assertThat(persistenceConfiguration.getRootDirectory(), is(new File(System.getProperty("user.home") + "/ehcache"))); + } + + @Test + public void testSysPropReplaceRegExp() { + assertThat(ConfigurationParser.replaceProperties("foo${file.separator}", System.getProperties()), equalTo("foo" + File.separator)); + assertThat(ConfigurationParser.replaceProperties("${file.separator}foo${file.separator}", System.getProperties()), equalTo(File.separator + "foo" + File.separator)); + try { + ConfigurationParser.replaceProperties("${bar}foo", System.getProperties()); + fail("Should have thrown!"); + } catch (IllegalStateException e) { + assertThat(e.getMessage().contains("${bar}"), is(true)); + } + assertThat(ConfigurationParser.replaceProperties("foo", System.getProperties()), nullValue()); + } + private void checkListenerConfigurationExists(Collection configuration) { int count = 0; for (Object o : configuration) { diff --git a/xml/src/test/resources/configs/systemprops.xml b/xml/src/test/resources/configs/systemprops.xml new file mode 100644 index 0000000000..bf18c771d0 --- /dev/null +++ b/xml/src/test/resources/configs/systemprops.xml @@ -0,0 +1,35 @@ + + + + + + + + + + + 5 + + + + ${ehcache.match} + 5 + +