From 28c4fc001c98a29e706703ba586a135d73617516 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Fri, 9 Sep 2016 11:29:19 +0200 Subject: [PATCH 001/218] :snowflake: #1427 Bump version to 3.2.0-SNAPSHOT and update README --- README.adoc | 9 +++++---- build.gradle | 2 +- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/README.adoc b/README.adoc index 01b173a7ce..0f77cfb325 100644 --- a/README.adoc +++ b/README.adoc @@ -12,16 +12,17 @@ For samples, documentation, and usage information, please see http://ehcache.org == Current release -We released 3.1.1 on July 18th 2016. +We released 3.1.2 on September 9th 2016. -The https://github.com/ehcache/ehcache3/releases/tag/v3.1.1[release notes] contain the links to the artifacts and the documentation to help you get started. +The https://github.com/ehcache/ehcache3/releases/tag/v3.1.2[release notes] contain the links to the artifacts and the documentation to help you get started. You should consider upgrading to 3.1.x as it does all 3.0.x does and more with a fully compatible API. The only thing to note is that transactional support has been moved to a separate jar. == Current development & next release -We are now working on the missing features of the clustering tier of Ehcache 3 which will be included in upcoming 3.1.x releases. -We may still do a last 3.0.x release to include all fixes that have been made on it, but this is now less a priority. +We are now working on the missing features of the clustering tier of Ehcache 3 which will be included in upcoming 3.2.x releases. +We may still do 3.1.x release to include all fixes that have been made on it, but this is now less a priority. +There is no longer any plan for a 3.0.x release. See the https://github.com/ehcache/ehcache3/milestones[milestones on GitHub] for more details on the current status. diff --git a/build.gradle b/build.gradle index b850391e31..70a2ad6539 100644 --- a/build.gradle +++ b/build.gradle @@ -16,7 +16,7 @@ import scripts.* ext { - baseVersion = '3.1.2-SNAPSHOT' + baseVersion = '3.2.0-SNAPSHOT' // Third parties offheapVersion = '2.2.2' From 41f55674245ba9d35033bc23ffef1172c39382c2 Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Wed, 7 Sep 2016 16:12:46 -0400 Subject: [PATCH 002/218] :heavy_plus_sign: ClusteringService.isConnected() clustered management scheduled task crashes at cache manager close sevral times because they are not aware of the closing of the underlying connection. So this addition is to enable dependant services to check the connection status. --- .../service/DefaultClusteringService.java | 44 +++++++++---------- .../client/service/ClusteringService.java | 5 +++ .../service/DefaultClusteringServiceTest.java | 4 ++ 3 files changed, 31 insertions(+), 22 deletions(-) diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java index 77f46ff091..694e3500bc 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java @@ -81,7 +81,7 @@ class DefaultClusteringService implements ClusteringService, EntityService { private final ConcurrentMap knownPersistenceSpaces = new ConcurrentHashMap(); private final EhcacheClientEntity.Timeouts operationTimeouts; - private Connection clusterConnection; + private volatile Connection clusterConnection; private EhcacheClientEntityFactory entityFactory; private EhcacheClientEntity entity; @@ -125,7 +125,7 @@ public ClientEntityFactory newClientEntityFactory(St return new AbstractClientEntityFactory(entityIdentifier, entityType, entityVersion, configuration) { @Override protected Connection getConnection() { - if (clusterConnection == null) { + if (!isConnected()) { throw new IllegalStateException(getClass().getSimpleName() + " not started."); } return clusterConnection; @@ -133,6 +133,11 @@ protected Connection getConnection() { }; } + @Override + public boolean isConnected() { + return clusterConnection != null; + } + @Override public void start(final ServiceProvider serviceProvider) { initClusterConnection(); @@ -153,12 +158,7 @@ public void start(final ServiceProvider serviceProvider) { } } catch (RuntimeException e) { entityFactory = null; - try { - clusterConnection.close(); - clusterConnection = null; - } catch (IOException ex) { - LOGGER.warn("Error closing cluster connection: " + ex); - } + closeConnection(); throw e; } } @@ -211,12 +211,7 @@ public void startForMaintenance(ServiceProvider serviceProv if (!entityFactory.acquireLeadership(entityIdentifier)) { entityFactory = null; - try { - clusterConnection.close(); - clusterConnection = null; - } catch (IOException e) { - LOGGER.warn("Error closing cluster connection: " + e); - } + closeConnection(); throw new IllegalStateException("Couldn't acquire cluster-wide maintenance lease"); } inMaintenance = true; @@ -238,14 +233,7 @@ public void stop() { entity = null; - try { - if (clusterConnection != null) { - clusterConnection.close(); - clusterConnection = null; - } - } catch (IOException ex) { - throw new RuntimeException(ex); - } + closeConnection(); } @Override @@ -444,6 +432,18 @@ public void releaseServerStoreProxy(ServerStoreProxy storeProxy) { } } + private void closeConnection() { + Connection conn = clusterConnection; + clusterConnection = null; + if(conn != null) { + try { + conn.close(); + } catch (IOException e) { + LOGGER.warn("Error closing cluster connection: " + e); + } + } + } + /** * Supplies the identifier to use for identifying a client-side cache to its server counterparts. */ diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/service/ClusteringService.java b/clustered/client/src/main/java/org/ehcache/clustered/client/service/ClusteringService.java index bb89c24456..b0cc5ed2f6 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/service/ClusteringService.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/service/ClusteringService.java @@ -31,6 +31,11 @@ public interface ClusteringService extends PersistableResourceService { ClusteringServiceConfiguration getConfiguration(); + /** + * @return true if a connection to a cluster exists + */ + boolean isConnected(); + /** * Gets a {@link ServerStoreProxy} though which a server-resident {@code ServerStore} is accessed. * diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java index f33cf67af7..b1169a3c72 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java @@ -216,7 +216,9 @@ public void testStartStopAutoCreate() throws Exception { .autoCreate() .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); + assertThat(service.isConnected(), is(false)); service.start(null); + assertThat(service.isConnected(), is(true)); assertThat(UnitTestConnectionService.getConnectionProperties(clusterUri).size(), is(1)); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); @@ -326,7 +328,9 @@ public void testStartForMaintenanceAutoStart() throws Exception { .autoCreate() .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); + assertThat(service.isConnected(), is(false)); service.startForMaintenance(null); + assertThat(service.isConnected(), is(true)); assertThat(UnitTestConnectionService.getConnectionProperties(clusterUri).size(), is(1)); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); From 16a484620b248017a8bddecfe0ca4d308ac61216 Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Wed, 7 Sep 2016 16:21:14 -0400 Subject: [PATCH 003/218] :art: Moving ClusteredManagementServiceTest from management to clustered --- build.gradle | 2 +- clustered/integration-test/build.gradle | 8 ++ .../AbstractClusteringManagementTest.java | 108 +++++++--------- .../ClusteringManagementServiceTest.java | 86 ++++--------- .../EhcacheManagerToStringTest.java | 120 +++++------------- .../test/resources/clusteredConfiguration.txt | 4 +- .../test/resources/simpleConfiguration.txt | 6 +- management/build.gradle | 26 +--- .../DefaultClusteringManagementService.java | 9 +- .../registry/DefaultCollectorService.java | 10 +- .../registry/DefaultCollectorServiceTest.java | 4 +- ...rg.terracotta.connection.ConnectionService | 1 - .../ehcache-management-clustered.xml | 46 ------- 13 files changed, 138 insertions(+), 292 deletions(-) rename {management/src/test/java/org/ehcache/management/cluster => clustered/integration-test/src/test/java/org/ehcache/clustered/management}/AbstractClusteringManagementTest.java (58%) rename {management/src/test/java/org/ehcache/management/cluster => clustered/integration-test/src/test/java/org/ehcache/clustered/management}/ClusteringManagementServiceTest.java (78%) rename {management/src/test/java/org/ehcache/core => clustered/integration-test/src/test/java/org/ehcache/clustered/management}/EhcacheManagerToStringTest.java (56%) rename {management => clustered/integration-test}/src/test/resources/clusteredConfiguration.txt (90%) rename {management => clustered/integration-test}/src/test/resources/simpleConfiguration.txt (81%) delete mode 100644 management/src/test/resources/META-INF/services/org.terracotta.connection.ConnectionService delete mode 100644 management/src/test/resources/ehcache-management-clustered.xml diff --git a/build.gradle b/build.gradle index 70a2ad6539..f9282a4797 100644 --- a/build.gradle +++ b/build.gradle @@ -26,7 +26,7 @@ ext { sizeofVersion = '0.3.0' // Clustered - terracottaPlatformVersion = '5.0.6.beta5' + terracottaPlatformVersion = '5.0.6.beta6' managementVersion = terracottaPlatformVersion terracottaApisVersion = '1.0.6.beta' terracottaCoreVersion = '5.0.6-beta2' diff --git a/clustered/integration-test/build.gradle b/clustered/integration-test/build.gradle index cf833e2461..1537eeed1f 100644 --- a/clustered/integration-test/build.gradle +++ b/clustered/integration-test/build.gradle @@ -17,10 +17,16 @@ dependencies { testCompile project(':dist') testCompile project(':clustered:clustered-dist') + testCompile project(':management') + testCompile "org.terracotta.management:management-entity-client:$parent.managementVersion" testCompile group:'org.terracotta', name:'galvan-support', version: galvanVersion testCompile group:'com.google.code.tempus-fugit', name:'tempus-fugit', version:'1.1' testCompile group: 'javax.cache', name: 'cache-api', version: jcacheVersion + + testCompile "org.terracotta.management:management-entity-server:$parent.managementVersion:plugin" + testCompile "org.terracotta.management:monitoring-service:$parent.managementVersion:plugin" + testCompile "org.terracotta.management:monitoring-service-entity:$parent.managementVersion:plugin" } task unzipKit(type: Copy) { @@ -42,9 +48,11 @@ test { dependsOn unzipKit executable = MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'java') environment 'JAVA_HOME', MavenToolchain.javaHome(JavaVersion.VERSION_1_8) + // If you want to see all mutations of the voltron monitoring tree, add to JAVA_OPTS: -Dorg.terracotta.management.service.monitoring.VoltronMonitoringService.DEBUG=true environment 'JAVA_OPTS', '-Dcom.tc.l2.lockmanager.greedy.locks.enabled=false' //If this directory does not exist, tests will fail with a cryptic assert failure systemProperty 'kitInstallationPath', "$unzipKit.destinationDir/${project(':clustered:clustered-dist').archivesBaseName}-$project.version-kit" + systemProperty 'managementPlugins', ["management-model", "management-entity-server", "monitoring-service", "monitoring-service-entity"].collect { String artifact -> project.configurations.testCompile.find { it.name.startsWith("$artifact-$parent.managementVersion") } }.join(':') // Uncomment to include client logging in console output // testLogging.showStandardStreams = true } diff --git a/management/src/test/java/org/ehcache/management/cluster/AbstractClusteringManagementTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java similarity index 58% rename from management/src/test/java/org/ehcache/management/cluster/AbstractClusteringManagementTest.java rename to clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java index 2aab98bbf8..225c589d08 100644 --- a/management/src/test/java/org/ehcache/management/cluster/AbstractClusteringManagementTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java @@ -13,43 +13,33 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.ehcache.management.cluster; +package org.ehcache.clustered.management; -import org.ehcache.clustered.client.internal.EhcacheClientEntityService; -import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockEntityClientService; -import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; -import org.ehcache.clustered.server.EhcacheServerEntityService; import org.junit.After; -import org.junit.AfterClass; +import org.junit.Assert; import org.junit.BeforeClass; +import org.junit.ClassRule; import org.terracotta.connection.Connection; import org.terracotta.connection.ConnectionFactory; import org.terracotta.management.entity.management.ManagementAgentConfig; import org.terracotta.management.entity.management.client.ContextualReturnListener; -import org.terracotta.management.entity.management.client.ManagementAgentEntityClientService; import org.terracotta.management.entity.management.client.ManagementAgentEntityFactory; import org.terracotta.management.entity.management.client.ManagementAgentService; -import org.terracotta.management.entity.management.server.ManagementAgentEntityServerService; import org.terracotta.management.entity.monitoring.client.MonitoringServiceEntity; -import org.terracotta.management.entity.monitoring.client.MonitoringServiceEntityClientService; import org.terracotta.management.entity.monitoring.client.MonitoringServiceEntityFactory; -import org.terracotta.management.entity.monitoring.server.MonitoringServiceEntityServerService; import org.terracotta.management.model.call.ContextualReturn; import org.terracotta.management.model.call.Parameter; import org.terracotta.management.model.cluster.ClientIdentifier; import org.terracotta.management.model.context.Context; import org.terracotta.management.model.stats.ContextualStatistics; -import org.terracotta.offheapresource.OffHeapResourcesConfiguration; -import org.terracotta.offheapresource.OffHeapResourcesProvider; -import org.terracotta.offheapresource.config.OffheapResourcesType; -import org.terracotta.offheapresource.config.ResourceType; -import org.terracotta.passthrough.PassthroughClusterControl; -import org.terracotta.passthrough.PassthroughServer; +import org.terracotta.testing.rules.BasicExternalCluster; +import org.terracotta.testing.rules.Cluster; +import java.io.File; import java.io.Serializable; -import java.math.BigInteger; -import java.net.URI; +import java.util.ArrayList; import java.util.Collection; +import java.util.List; import java.util.Properties; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CountDownLatch; @@ -66,53 +56,31 @@ public abstract class AbstractClusteringManagementTest { + private static final String RESOURCE_CONFIG = + "" + + "" + + "64" + + "" + + "\n"; + protected static MonitoringServiceEntity consumer; - private static PassthroughClusterControl stripeControl; + @ClassRule + public static Cluster CLUSTER = new BasicExternalCluster(new File("build/cluster"), 1, getManagementPlugins(), "", RESOURCE_CONFIG, ""); @BeforeClass public static void beforeClass() throws Exception { - PassthroughServer activeServer = new PassthroughServer(); - activeServer.setServerName("server-1"); - activeServer.setBindPort(9510); - activeServer.setGroupPort(9610); - - // management agent entity - activeServer.registerServerEntityService(new ManagementAgentEntityServerService()); - activeServer.registerClientEntityService(new ManagementAgentEntityClientService()); - - // ehcache entity - activeServer.registerServerEntityService(new EhcacheServerEntityService()); - activeServer.registerClientEntityService(new EhcacheClientEntityService()); - - // RW lock entity (required by ehcache) - activeServer.registerServerEntityService(new VoltronReadWriteLockServerEntityService()); - activeServer.registerClientEntityService(new VoltronReadWriteLockEntityClientService()); - - activeServer.registerServerEntityService(new MonitoringServiceEntityServerService()); - activeServer.registerClientEntityService(new MonitoringServiceEntityClientService()); - - // off-heap service - OffheapResourcesType offheapResourcesType = new OffheapResourcesType(); - ResourceType resourceType = new ResourceType(); - resourceType.setName("primary-server-resource"); - resourceType.setUnit(org.terracotta.offheapresource.config.MemoryUnit.MB); - resourceType.setValue(BigInteger.TEN); - offheapResourcesType.getResource().add(resourceType); - activeServer.registerServiceProvider(new OffHeapResourcesProvider(), new OffHeapResourcesConfiguration(offheapResourcesType)); - - stripeControl = new PassthroughClusterControl("server-1", activeServer); - - consumer = new MonitoringServiceEntityFactory(ConnectionFactory.connect(URI.create("passthrough://server-1:9510/cluster-1"), new Properties())).retrieveOrCreate("MonitoringConsumerEntity"); + CLUSTER.getClusterControl().waitForActive(); + + consumer = new MonitoringServiceEntityFactory(ConnectionFactory.connect(CLUSTER.getConnectionURI(), new Properties())).retrieveOrCreate("MonitoringConsumerEntity"); + // buffer for client-side notifications consumer.createBestEffortBuffer("client-notifications", 1024, Serializable[].class); + // buffer for client-side stats consumer.createBestEffortBuffer("client-statistics", 1024, Serializable[].class); - } - - @AfterClass - public static void afterClass() throws Exception { - if (stripeControl != null) { - stripeControl.tearDown(); - } + // buffer for platform topology changes + consumer.createBestEffortBuffer("platform-notifications", 1024, Serializable[].class); + // buffer for entity notifications + consumer.createBestEffortBuffer("entity-notifications", 1024, Serializable[].class); } @After @@ -126,8 +94,9 @@ protected final void clear() { } protected static void sendManagementCallToCollectStats(String... statNames) throws Exception { - try (Connection managementConsole = ConnectionFactory.connect(URI.create("passthrough://server-1:9510/"), new Properties())) { - ManagementAgentService agent = new ManagementAgentService(new ManagementAgentEntityFactory(managementConsole).retrieveOrCreate(new ManagementAgentConfig())); + Connection managementConnection = CLUSTER.newConnection(); + try { + ManagementAgentService agent = new ManagementAgentService(new ManagementAgentEntityFactory(managementConnection).retrieveOrCreate(new ManagementAgentConfig())); assertThat(agent.getManageableClients().size(), equalTo(2)); @@ -143,15 +112,15 @@ protected static void sendManagementCallToCollectStats(String... statNames) thro assertThat(client, is(notNullValue())); final ClientIdentifier ehcacheClientIdentifier = client; - CountDownLatch callCompleted = new CountDownLatch(1); - AtomicReference managementCallId = new AtomicReference<>(); - BlockingQueue> returns = new LinkedBlockingQueue<>(); + final CountDownLatch callCompleted = new CountDownLatch(1); + final AtomicReference managementCallId = new AtomicReference(); + final BlockingQueue> returns = new LinkedBlockingQueue>(); agent.setContextualReturnListener(new ContextualReturnListener() { @Override public void onContextualReturn(ClientIdentifier from, String id, ContextualReturn aReturn) { try { - assertEquals(ehcacheClientIdentifier, from); + Assert.assertEquals(ehcacheClientIdentifier, from); // make sure the call completed callCompleted.await(10, TimeUnit.SECONDS); assertEquals(managementCallId.get(), id); @@ -176,6 +145,8 @@ public void onContextualReturn(ClientIdentifier from, String id, ContextualRetur // ensure the call is made returns.take(); + } finally { + managementConnection.close(); } } @@ -186,4 +157,13 @@ protected static ContextualStatistics[] waitForNextStats() { return (ContextualStatistics[]) serializables[1]; } + private static List getManagementPlugins() { + String[] paths = System.getProperty("managementPlugins").split(":"); + List plugins = new ArrayList(paths.length); + for (String path : paths) { + plugins.add(new File(path)); + } + return plugins; + } + } diff --git a/management/src/test/java/org/ehcache/management/cluster/ClusteringManagementServiceTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java similarity index 78% rename from management/src/test/java/org/ehcache/management/cluster/ClusteringManagementServiceTest.java rename to clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java index 34627247d9..75386e89a4 100644 --- a/management/src/test/java/org/ehcache/management/cluster/ClusteringManagementServiceTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java @@ -13,12 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.ehcache.management.cluster; +package org.ehcache.clustered.management; import org.ehcache.Cache; import org.ehcache.CacheManager; import org.ehcache.Status; -import org.ehcache.ValueSupplier; import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; import org.ehcache.config.builders.CacheConfigurationBuilder; @@ -27,14 +26,11 @@ import org.ehcache.config.units.MemoryUnit; import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; -import org.ehcache.xml.XmlConfiguration; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.Timeout; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; import org.terracotta.management.entity.management.ManagementAgentConfig; import org.terracotta.management.entity.management.client.ManagementAgentEntityFactory; import org.terracotta.management.model.capabilities.Capability; @@ -45,11 +41,10 @@ import org.terracotta.management.model.stats.primitive.Counter; import java.io.Serializable; -import java.net.URI; import java.util.Arrays; -import java.util.Collection; import java.util.TreeSet; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.hamcrest.CoreMatchers.equalTo; @@ -59,68 +54,41 @@ import static org.hamcrest.collection.IsCollectionWithSize.hasSize; import static org.junit.Assert.assertThat; -@RunWith(Parameterized.class) public class ClusteringManagementServiceTest extends AbstractClusteringManagementTest { - @Parameterized.Parameters - public static Collection data() { - return Arrays.asList(new Object[][]{ - { - new ValueSupplier() { - @Override - public CacheManager value() { - return CacheManagerBuilder.newCacheManagerBuilder() - // cluster config - .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("passthrough://server-1:9510/my-server-entity-1")) - .autoCreate() - .defaultServerResource("primary-server-resource")) - // management config - .using(new DefaultManagementRegistryConfiguration() - .addTags("webapp-1", "server-node-1") - .setCacheManagerAlias("my-super-cache-manager") - .addConfiguration(new EhcacheStatisticsProviderConfiguration( - 1, TimeUnit.MINUTES, - 100, 1, TimeUnit.SECONDS, - 2, TimeUnit.SECONDS))) // TTD reduce to 2 seconds so that the stat collector runs faster - // cache config - .withCache("cache-1", CacheConfigurationBuilder.newCacheConfigurationBuilder( - String.class, String.class, - newResourcePoolsBuilder() - .heap(10, EntryUnit.ENTRIES) - .offheap(1, MemoryUnit.MB) - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB))) - .build()) - .build(true); - } - } - }, { - new ValueSupplier() { - @Override - public CacheManager value() { - CacheManager cacheManager = CacheManagerBuilder.newCacheManager(new XmlConfiguration(getClass().getResource("/ehcache-management-clustered.xml"))); - cacheManager.init(); - return cacheManager; - } - } - }}); - } + private static AtomicInteger N = new AtomicInteger(); @Rule - public final Timeout globalTimeout = new Timeout(10000); - - private final ValueSupplier cacheManagerValueSupplier; + public final Timeout globalTimeout = Timeout.seconds(60); private CacheManager cacheManager; private String clientIdentifier; private long consumerId; - public ClusteringManagementServiceTest(ValueSupplier cacheManagerValueSupplier) { - this.cacheManagerValueSupplier = cacheManagerValueSupplier; - } - @Before public void init() throws Exception { - this.cacheManager = cacheManagerValueSupplier.value(); + this.cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + // cluster config + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/my-server-entity-" + N.incrementAndGet())) + .autoCreate() + .defaultServerResource("primary-server-resource")) + // management config + .using(new DefaultManagementRegistryConfiguration() + .addTags("webapp-1", "server-node-1") + .setCacheManagerAlias("my-super-cache-manager") + .addConfiguration(new EhcacheStatisticsProviderConfiguration( + 1, TimeUnit.MINUTES, + 100, 1, TimeUnit.SECONDS, + 2, TimeUnit.SECONDS))) // TTD reduce to 2 seconds so that the stat collector runs faster + // cache config + .withCache("cache-1", CacheConfigurationBuilder.newCacheConfigurationBuilder( + String.class, String.class, + newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .offheap(1, MemoryUnit.MB) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB))) + .build()) + .build(true); // ensure the CM is running and get its client id assertThat(cacheManager.getStatus(), equalTo(Status.AVAILABLE)); @@ -184,7 +152,7 @@ public void test_notifs_on_add_cache() throws Exception { ContextContainer contextContainer = consumer.getValueForNode(consumerId, new String[]{"management", "clients", clientIdentifier, "registry", "contextContainer"}, ContextContainer.class); assertThat(contextContainer.getSubContexts(), hasSize(2)); - Collection cNames = new TreeSet(); + TreeSet cNames = new TreeSet(); for (ContextContainer container : contextContainer.getSubContexts()) { cNames.add(container.getValue()); } diff --git a/management/src/test/java/org/ehcache/core/EhcacheManagerToStringTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheManagerToStringTest.java similarity index 56% rename from management/src/test/java/org/ehcache/core/EhcacheManagerToStringTest.java rename to clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheManagerToStringTest.java index 7126a17d34..71fe1e342a 100644 --- a/management/src/test/java/org/ehcache/core/EhcacheManagerToStringTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheManagerToStringTest.java @@ -14,43 +14,28 @@ * limitations under the License. */ -package org.ehcache.core; +package org.ehcache.clustered.management; import org.ehcache.CacheManager; import org.ehcache.Status; import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; -import org.ehcache.clustered.client.internal.EhcacheClientEntityService; -import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockEntityClientService; -import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; -import org.ehcache.clustered.server.EhcacheServerEntityService; import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.config.builders.WriteBehindConfigurationBuilder; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.HumanReadable; import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.junit.AfterClass; import org.junit.Assert; -import org.junit.BeforeClass; import org.junit.Test; -import org.terracotta.management.entity.management.client.ManagementAgentEntityClientService; -import org.terracotta.management.entity.management.server.ManagementAgentEntityServerService; -import org.terracotta.management.service.monitoring.IMonitoringConsumer; -import org.terracotta.offheapresource.OffHeapResourcesConfiguration; -import org.terracotta.offheapresource.OffHeapResourcesProvider; -import org.terracotta.offheapresource.config.OffheapResourcesType; -import org.terracotta.offheapresource.config.ResourceType; -import org.terracotta.passthrough.PassthroughClusterControl; -import org.terracotta.passthrough.PassthroughServer; import java.io.File; import java.io.FileNotFoundException; -import java.math.BigInteger; import java.net.URI; import java.util.Map; import java.util.Scanner; @@ -59,11 +44,9 @@ import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.junit.Assert.*; -public class EhcacheManagerToStringTest { - - static IMonitoringConsumer consumer; - static PassthroughClusterControl stripeControl; +public class EhcacheManagerToStringTest extends AbstractClusteringManagementTest { @Test public void simpleOnHeapToString() throws Exception { @@ -93,23 +76,28 @@ public boolean adviseAgainstEviction(String key, String value) { .build()) .build(true); - String actual = ((HumanReadable) cacheManager.getRuntimeConfiguration()).readableString(); - String expected = read("/simpleConfiguration.txt"); - - // only testing part of the string, to avoid collections ordering clashes - Assert.assertThat( - actual.substring(actual.indexOf("resourcePools")).replace(" ", "").replace("\n", "").replaceAll("\\\\|/", "|"), - equalTo( - expected.substring(expected.indexOf("resourcePools")).replace(" ", "").replace("\n", "").replaceAll("\\\\|/", "|") - ) - ); + try { + String actual = ((HumanReadable) cacheManager.getRuntimeConfiguration()).readableString(); + String expected = read("/simpleConfiguration.txt"); + + // only testing part of the string, to avoid collections ordering clashes + assertThat( + actual.substring(actual.indexOf("resourcePools")).replace(" ", "").replace("\n", "").replaceAll("\\\\|/", "|"), + equalTo( + expected.substring(expected.indexOf("resourcePools")).replace(" ", "").replace("\n", "").replaceAll("\\\\|/", "|") + ) + ); + } finally { + cacheManager.close(); + } } @Test public void clusteredToString() throws Exception { + URI uri = CLUSTER.getConnectionURI().resolve("/my-server-entity-1"); CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() // cluster config - .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("passthrough://server-1:9510/my-server-entity-1")) + .with(ClusteringServiceConfigurationBuilder.cluster(uri) .autoCreate() .defaultServerResource("primary-server-resource")) // management config @@ -130,64 +118,26 @@ public void clusteredToString() throws Exception { .build()) .build(true); - String actual = ((HumanReadable) cacheManager.getRuntimeConfiguration()).readableString(); - String expected = read("/clusteredConfiguration.txt"); - - System.out.println(actual); - - // only testing part of the string, to avoid collections ordering clashes - Assert.assertThat( - actual.substring(actual.indexOf("resourcePools")).replace(" ", "").replace("\n", ""), - equalTo( - expected.substring(expected.indexOf("resourcePools")).replace(" ", "").replace("\n", "") - ) - ); - - Assert.assertThat(actual.indexOf("serviceConfigurations: None"), greaterThan(1)); - Assert.assertThat(actual.indexOf("evictionAdvisor: None"), greaterThan(1)); - - if (cacheManager != null && cacheManager.getStatus() == Status.AVAILABLE) { + try { + String actual = ((HumanReadable) cacheManager.getRuntimeConfiguration()).readableString(); + String expected = read("/clusteredConfiguration.txt"); + + // only testing part of the string, to avoid collections ordering clashes + assertThat( + actual.substring(actual.indexOf("resourcePools")).replace(" ", "").replace("\n", ""), + equalTo( + expected.substring(expected.indexOf("resourcePools")).replace(" ", "").replace("\n", "").replace("server-1:9510", uri.getAuthority()) + ) + ); + + assertThat(actual.indexOf("serviceConfigurations: None"), greaterThan(1)); + assertThat(actual.indexOf("evictionAdvisor: None"), greaterThan(1)); + } finally { cacheManager.close(); } } - @BeforeClass - public static void beforeClass() throws Exception { - PassthroughServer activeServer = new PassthroughServer(); - activeServer.setServerName("server-1"); - activeServer.setBindPort(9510); - activeServer.setGroupPort(9610); - - // management agent entity - activeServer.registerServerEntityService(new ManagementAgentEntityServerService()); - activeServer.registerClientEntityService(new ManagementAgentEntityClientService()); - - // ehcache entity - activeServer.registerServerEntityService(new EhcacheServerEntityService()); - activeServer.registerClientEntityService(new EhcacheClientEntityService()); - - // RW lock entity (required by ehcache) - activeServer.registerServerEntityService(new VoltronReadWriteLockServerEntityService()); - activeServer.registerClientEntityService(new VoltronReadWriteLockEntityClientService()); - - // off-heap service - OffheapResourcesType offheapResourcesType = new OffheapResourcesType(); - ResourceType resourceType = new ResourceType(); - resourceType.setName("primary-server-resource"); - resourceType.setUnit(org.terracotta.offheapresource.config.MemoryUnit.MB); - resourceType.setValue(BigInteger.TEN); - offheapResourcesType.getResource().add(resourceType); - activeServer.registerServiceProvider(new OffHeapResourcesProvider(), new OffHeapResourcesConfiguration(offheapResourcesType)); - - stripeControl = new PassthroughClusterControl("server-1", activeServer); - } - - @AfterClass - public static void afterClass() throws Exception { - stripeControl.tearDown(); - } - public static class SampleLoaderWriter implements CacheLoaderWriter { @Override diff --git a/management/src/test/resources/clusteredConfiguration.txt b/clustered/integration-test/src/test/resources/clusteredConfiguration.txt similarity index 90% rename from management/src/test/resources/clusteredConfiguration.txt rename to clustered/integration-test/src/test/resources/clusteredConfiguration.txt index 7e5a0f0cb4..b7ce815907 100644 --- a/management/src/test/resources/clusteredConfiguration.txt +++ b/clustered/integration-test/src/test/resources/clusteredConfiguration.txt @@ -18,7 +18,7 @@ caches: tierHeight: 10 services: - org.ehcache.clustered.client.config.ClusteringServiceConfiguration: - clusterUri: passthrough://server-1:9510/my-server-entity-1 + clusterUri: terracotta://server-1:9510/my-server-entity-1 readOperationTimeout: TimeoutDuration{5 SECONDS} autoCreate: true - - org.ehcache.management.registry.DefaultManagementRegistryConfiguration \ No newline at end of file + - org.ehcache.management.registry.DefaultManagementRegistryConfiguration diff --git a/management/src/test/resources/simpleConfiguration.txt b/clustered/integration-test/src/test/resources/simpleConfiguration.txt similarity index 81% rename from management/src/test/resources/simpleConfiguration.txt rename to clustered/integration-test/src/test/resources/simpleConfiguration.txt index 449a883f16..09765c1a48 100644 --- a/management/src/test/resources/simpleConfiguration.txt +++ b/clustered/integration-test/src/test/resources/simpleConfiguration.txt @@ -5,7 +5,7 @@ caches: serviceConfigurations: - org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration - org.ehcache.impl.config.loaderwriter.writebehind.DefaultWriteBehindConfiguration - evictionAdvisor: org.ehcache.core.EhcacheManagerToStringTest$1 + evictionAdvisor: org.ehcache.clustered.management.EhcacheManagerToStringTest$1 expiry: NoExpiry resourcePools: pools: @@ -15,9 +15,9 @@ caches: offheap: size: 1 MB tierHeight: 1000 - disk: + disk: size: 2 MB (persistent) tierHeight: 100 services: - org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration: - rootDirectory: build/tmp/EhcacheManagerToStringTest \ No newline at end of file + rootDirectory: build/tmp/EhcacheManagerToStringTest diff --git a/management/build.gradle b/management/build.gradle index aaf037ec6f..141fd02156 100644 --- a/management/build.gradle +++ b/management/build.gradle @@ -17,34 +17,18 @@ apply plugin: EhDeploy dependencies { + // optional: if we want xml config compileOnly project(':xml') + + // optional: if we want to use the clustered management layer compileOnly project(':clustered:client') + compileOnly "org.terracotta.management:management-entity-client:$parent.managementVersion" compile project(':api') compile project(':core') compile project(':impl') - compile "org.terracotta.management:management-entity-client:$parent.managementVersion" + compile "org.terracotta.management:management-registry:$parent.managementVersion" testCompile project(':xml') - testCompile project(':clustered:client') - testCompile project(':clustered:server') - testCompile "org.terracotta:entity-test-lib:$parent.entityTestLibVersion" - testCompile "org.terracotta:passthrough-server:$parent.entityTestLibVersion" - testCompile "org.terracotta.management:monitoring-service:$parent.managementVersion" - testCompile "org.terracotta.management:monitoring-service-entity:$parent.managementVersion" - testCompile "org.terracotta.management:management-entity-server:$parent.managementVersion" - testCompile "org.terracotta.entities:clustered-map-server:$parent.terracottaPlatformVersion" testCompile "com.fasterxml.jackson.core:jackson-databind:2.7.5" } - -compileTestJava { - sourceCompatibility = 1.8 - targetCompatibility = 1.8 - options.fork = true; - options.forkOptions.executable = MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'javac') -} - -test { - executable = MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'java') - environment 'JAVA_HOME', MavenToolchain.javaHome(JavaVersion.VERSION_1_8) -} diff --git a/management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementService.java b/management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementService.java index 0313b9303f..d208300d4e 100644 --- a/management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementService.java +++ b/management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementService.java @@ -18,6 +18,7 @@ import org.ehcache.Cache; import org.ehcache.Status; import org.ehcache.clustered.client.service.ClientEntityFactory; +import org.ehcache.clustered.client.service.ClusteringService; import org.ehcache.clustered.client.service.EntityService; import org.ehcache.core.events.CacheManagerListener; import org.ehcache.core.spi.service.CacheManagerProviderService; @@ -48,7 +49,7 @@ import static org.ehcache.impl.internal.executor.ExecutorUtil.shutdownNow; -@ServiceDependencies({CacheManagerProviderService.class, ExecutionService.class, TimeSourceService.class, ManagementRegistryService.class, EntityService.class}) +@ServiceDependencies({CacheManagerProviderService.class, ExecutionService.class, TimeSourceService.class, ManagementRegistryService.class, EntityService.class, ClusteringService.class}) public class DefaultClusteringManagementService implements ClusteringManagementService, CacheManagerListener, CollectorService.Collector { private final ClusteringManagementServiceConfiguration configuration; @@ -59,6 +60,7 @@ public class DefaultClusteringManagementService implements ClusteringManagementS private volatile ClientEntityFactory managementAgentEntityFactory; private volatile InternalCacheManager cacheManager; private volatile ExecutorService managementCallExecutor; + private volatile ClusteringService clusteringService; public DefaultClusteringManagementService() { this(new DefaultClusteringManagementServiceConfiguration()); @@ -70,6 +72,7 @@ public DefaultClusteringManagementService(ClusteringManagementServiceConfigurati @Override public void start(ServiceProvider serviceProvider) { + this.clusteringService = serviceProvider.getService(ClusteringService.class); this.managementRegistryService = serviceProvider.getService(ManagementRegistryService.class); this.cacheManager = serviceProvider.getService(CacheManagerProviderService.class).getCacheManager(); // get an ordered executor to keep ordering of management call requests @@ -163,7 +166,7 @@ public void stateTransition(Status from, Status to) { @Override public void onNotification(ContextualNotification notification) { ManagementAgentService service = managementAgentService; - if (service != null) { + if (service != null && clusteringService.isConnected()) { service.pushNotification(notification); } } @@ -171,7 +174,7 @@ public void onNotification(ContextualNotification notification) { @Override public void onStatistics(Collection statistics) { ManagementAgentService service = managementAgentService; - if (service != null) { + if (service != null && clusteringService.isConnected()) { service.pushStatistics(statistics); } } diff --git a/management/src/main/java/org/ehcache/management/registry/DefaultCollectorService.java b/management/src/main/java/org/ehcache/management/registry/DefaultCollectorService.java index 2f11eb552d..deca1b2e32 100644 --- a/management/src/main/java/org/ehcache/management/registry/DefaultCollectorService.java +++ b/management/src/main/java/org/ehcache/management/registry/DefaultCollectorService.java @@ -65,7 +65,7 @@ private enum EhcacheNotification { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultCollectorService.class); - private ScheduledFuture task; + private volatile ScheduledFuture task; private final ConcurrentMap selectedStatsPerCapability = new ConcurrentHashMap(); private final Collector collector; @@ -174,8 +174,7 @@ public synchronized void startStatisticCollector() { @Override public void run() { try { - // always check if the cache manager is still available - if (!selectedStatsPerCapability.isEmpty()) { + if (task != null && !selectedStatsPerCapability.isEmpty()) { // create the full context list from current caches Collection cacheContexts = new ArrayList(); @@ -196,7 +195,7 @@ public void run() { // next time, only poll history from this time lastPoll.set(timeSource.getTimeMillis()); - if (!statistics.isEmpty()) { + if (task != null && !statistics.isEmpty()) { collector.onStatistics(statistics); } } @@ -211,8 +210,9 @@ public void run() { @Override public synchronized void stopStatisticCollector() { if (task != null) { - task.cancel(false); + ScheduledFuture _task = task; task = null; + _task.cancel(false); } } diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultCollectorServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultCollectorServiceTest.java index 8867bc2e4d..5ce43ef388 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultCollectorServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultCollectorServiceTest.java @@ -56,7 +56,7 @@ public class DefaultCollectorServiceTest { @Test public void updateCollectedStatisticsTest__should_not_add_stats_when_selection_empty() throws Exception { DefaultCollectorService defaultCollectorService = new DefaultCollectorService(); - defaultCollectorService.updateCollectedStatistics("PifCapability", new ArrayList<>()); + defaultCollectorService.updateCollectedStatistics("PifCapability", new ArrayList()); assertThat(defaultCollectorService.getSelectedStatsPerCapability().size(), equalTo(0)); } @@ -73,7 +73,7 @@ public void updateCollectedStatisticsTest__add_stats_and_then_clear_them() throw assertThat(defaultCollectorService.getSelectedStatsPerCapability().size(), equalTo(1)); - defaultCollectorService.updateCollectedStatistics("PifCapability", new ArrayList<>()); + defaultCollectorService.updateCollectedStatistics("PifCapability", new ArrayList()); assertThat(defaultCollectorService.getSelectedStatsPerCapability().size(), equalTo(0)); } diff --git a/management/src/test/resources/META-INF/services/org.terracotta.connection.ConnectionService b/management/src/test/resources/META-INF/services/org.terracotta.connection.ConnectionService deleted file mode 100644 index ab3bcf3c65..0000000000 --- a/management/src/test/resources/META-INF/services/org.terracotta.connection.ConnectionService +++ /dev/null @@ -1 +0,0 @@ -org.terracotta.passthrough.PassthroughConnectionService diff --git a/management/src/test/resources/ehcache-management-clustered.xml b/management/src/test/resources/ehcache-management-clustered.xml deleted file mode 100644 index e2e5577e13..0000000000 --- a/management/src/test/resources/ehcache-management-clustered.xml +++ /dev/null @@ -1,46 +0,0 @@ - - - - - - - - - - - - - - - webapp-1 - server-node-1 - - - - 1 - 1 - 100 - 2 - - - - - - - java.lang.String - java.lang.String - - 10 - 1 - 1 - - - - From d6065124a6482a0d287daa794d2e70c93a6a650a Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Thu, 8 Sep 2016 15:52:25 -0400 Subject: [PATCH 004/218] :arrow_up: Compat' with tc-platform 5.0.6.beta7 (Terracotta-OSS/terracotta-platform#147) --- build.gradle | 2 +- clustered/integration-test/build.gradle | 2 +- .../AbstractClusteringManagementTest.java | 20 +++++++++--------- .../ClusteringManagementServiceTest.java | 21 ++++++++++--------- 4 files changed, 23 insertions(+), 22 deletions(-) diff --git a/build.gradle b/build.gradle index f9282a4797..bf95e3abd3 100644 --- a/build.gradle +++ b/build.gradle @@ -26,7 +26,7 @@ ext { sizeofVersion = '0.3.0' // Clustered - terracottaPlatformVersion = '5.0.6.beta6' + terracottaPlatformVersion = '5.0.6.beta7' managementVersion = terracottaPlatformVersion terracottaApisVersion = '1.0.6.beta' terracottaCoreVersion = '5.0.6-beta2' diff --git a/clustered/integration-test/build.gradle b/clustered/integration-test/build.gradle index 1537eeed1f..ad9f462418 100644 --- a/clustered/integration-test/build.gradle +++ b/clustered/integration-test/build.gradle @@ -52,7 +52,7 @@ test { environment 'JAVA_OPTS', '-Dcom.tc.l2.lockmanager.greedy.locks.enabled=false' //If this directory does not exist, tests will fail with a cryptic assert failure systemProperty 'kitInstallationPath', "$unzipKit.destinationDir/${project(':clustered:clustered-dist').archivesBaseName}-$project.version-kit" - systemProperty 'managementPlugins', ["management-model", "management-entity-server", "monitoring-service", "monitoring-service-entity"].collect { String artifact -> project.configurations.testCompile.find { it.name.startsWith("$artifact-$parent.managementVersion") } }.join(':') + systemProperty 'managementPlugins', ["management-model", "management-registry", "sequence-generator", "management-entity-server", "monitoring-service", "monitoring-service-entity"].collect { String artifact -> project.configurations.testCompile.find { it.name.startsWith("$artifact-$parent.managementVersion") } }.join(':') // Uncomment to include client logging in console output // testLogging.showStandardStreams = true } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java index 225c589d08..0e661a5a86 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java @@ -31,12 +31,12 @@ import org.terracotta.management.model.call.Parameter; import org.terracotta.management.model.cluster.ClientIdentifier; import org.terracotta.management.model.context.Context; +import org.terracotta.management.model.message.Message; import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.testing.rules.BasicExternalCluster; import org.terracotta.testing.rules.Cluster; import java.io.File; -import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; import java.util.List; @@ -74,13 +74,13 @@ public static void beforeClass() throws Exception { consumer = new MonitoringServiceEntityFactory(ConnectionFactory.connect(CLUSTER.getConnectionURI(), new Properties())).retrieveOrCreate("MonitoringConsumerEntity"); // buffer for client-side notifications - consumer.createBestEffortBuffer("client-notifications", 1024, Serializable[].class); + consumer.createBestEffortBuffer("client-notifications", 1024, Message.class); // buffer for client-side stats - consumer.createBestEffortBuffer("client-statistics", 1024, Serializable[].class); + consumer.createBestEffortBuffer("client-statistics", 1024, Message.class); // buffer for platform topology changes - consumer.createBestEffortBuffer("platform-notifications", 1024, Serializable[].class); + consumer.createBestEffortBuffer("platform-notifications", 1024, Message.class); // buffer for entity notifications - consumer.createBestEffortBuffer("entity-notifications", 1024, Serializable[].class); + consumer.createBestEffortBuffer("entity-notifications", 1024, Message.class); } @After @@ -89,8 +89,8 @@ public final void clearBuffers() throws Exception { } protected final void clear() { - while (consumer.readBuffer("client-notifications", Serializable[].class) != null) ; - while (consumer.readBuffer("client-statistics", Serializable[].class) != null) ; + while (consumer.readBuffer("client-notifications", Message.class) != null) ; + while (consumer.readBuffer("client-statistics", Message.class) != null) ; } protected static void sendManagementCallToCollectStats(String... statNames) throws Exception { @@ -152,9 +152,9 @@ public void onContextualReturn(ClientIdentifier from, String id, ContextualRetur protected static ContextualStatistics[] waitForNextStats() { // uses the monitoring consumre entity to get the content of the stat buffer when some stats are collected - Serializable[] serializables; - while ((serializables = consumer.readBuffer("client-statistics", Serializable[].class)) == null) { Thread.yield(); } - return (ContextualStatistics[]) serializables[1]; + Message message; + while ((message = consumer.readBuffer("client-statistics", Message.class)) == null) { Thread.yield(); } + return message.unwrap(ContextualStatistics[].class); } private static List getManagementPlugins() { diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java index 75386e89a4..89196d12c7 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java @@ -35,6 +35,7 @@ import org.terracotta.management.entity.management.client.ManagementAgentEntityFactory; import org.terracotta.management.model.capabilities.Capability; import org.terracotta.management.model.context.ContextContainer; +import org.terracotta.management.model.message.Message; import org.terracotta.management.model.notification.ContextualNotification; import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.management.model.stats.history.CounterHistory; @@ -132,8 +133,8 @@ public void test_capabilities_exposed() throws Exception { @Test public void test_notifs_sent_at_CM_init() throws Exception { - assertThat(((ContextualNotification) consumer.readBuffer("client-notifications", Serializable[].class)[1]).getType(), equalTo("CLIENT_REGISTRY_UPDATED")); - assertThat(((ContextualNotification) consumer.readBuffer("client-notifications", Serializable[].class)[1]).getType(), equalTo("CLIENT_TAGS_UPDATED")); + assertThat(consumer.readBuffer("client-notifications", Message.class).unwrap(ContextualNotification.class).getType(), equalTo("CLIENT_REGISTRY_UPDATED")); + assertThat(consumer.readBuffer("client-notifications", Message.class).unwrap(ContextualNotification.class).getType(), equalTo("CLIENT_TAGS_UPDATED")); assertThat(consumer.readBuffer("client-notifications", Serializable[].class), is(nullValue())); } @@ -158,10 +159,10 @@ public void test_notifs_on_add_cache() throws Exception { } assertThat(cNames, equalTo(new TreeSet(Arrays.asList("cache-1", "cache-2")))); - assertThat(((ContextualNotification) consumer.readBuffer("client-notifications", Serializable[].class)[1]).getType(), equalTo("CLIENT_REGISTRY_UPDATED")); - assertThat(((ContextualNotification) consumer.readBuffer("client-notifications", Serializable[].class)[1]).getType(), equalTo("CLIENT_REGISTRY_UPDATED")); - assertThat(((ContextualNotification) consumer.readBuffer("client-notifications", Serializable[].class)[1]).getType(), equalTo("CACHE_ADDED")); - assertThat(consumer.readBuffer("client-notifications", Serializable[].class), is(nullValue())); + assertThat(consumer.readBuffer("client-notifications", Message.class).unwrap(ContextualNotification.class).getType(), equalTo("CLIENT_REGISTRY_UPDATED")); + assertThat(consumer.readBuffer("client-notifications", Message.class).unwrap(ContextualNotification.class).getType(), equalTo("CLIENT_REGISTRY_UPDATED")); + assertThat(consumer.readBuffer("client-notifications", Message.class).unwrap(ContextualNotification.class).getType(), equalTo("CACHE_ADDED")); + assertThat(consumer.readBuffer("client-notifications", Message.class), is(nullValue())); } @Test @@ -170,10 +171,10 @@ public void test_notifs_on_remove_cache() throws Exception { cacheManager.removeCache("cache-2"); - assertThat(((ContextualNotification) consumer.readBuffer("client-notifications", Serializable[].class)[1]).getType(), equalTo("CLIENT_REGISTRY_UPDATED")); - assertThat(((ContextualNotification) consumer.readBuffer("client-notifications", Serializable[].class)[1]).getType(), equalTo("CLIENT_REGISTRY_UPDATED")); - assertThat(((ContextualNotification) consumer.readBuffer("client-notifications", Serializable[].class)[1]).getType(), equalTo("CACHE_REMOVED")); - assertThat(consumer.readBuffer("client-notifications", Serializable[].class), is(nullValue())); + assertThat(consumer.readBuffer("client-notifications", Message.class).unwrap(ContextualNotification.class).getType(), equalTo("CLIENT_REGISTRY_UPDATED")); + assertThat(consumer.readBuffer("client-notifications", Message.class).unwrap(ContextualNotification.class).getType(), equalTo("CLIENT_REGISTRY_UPDATED")); + assertThat(consumer.readBuffer("client-notifications", Message.class).unwrap(ContextualNotification.class).getType(), equalTo("CACHE_REMOVED")); + assertThat(consumer.readBuffer("client-notifications", Message.class), is(nullValue())); } @Test From 8f3b111ac03b2873fd75bcfda85bfcfe7ebc8089 Mon Sep 17 00:00:00 2001 From: Albin Suresh Date: Wed, 7 Sep 2016 00:03:40 +0530 Subject: [PATCH 005/218] Closes #1411 Stateful contract for serializers --- .../ehcache/spi/serialization/Serializer.java | 11 +- .../spi/serialization/StatefulSerializer.java | 47 +++ .../service/ClusteredStateRepository.java | 1 - .../client/internal/store/ClusteredStore.java | 30 +- .../operations/codecs/OperationsCodec.java | 8 + .../asciidoc/user/serializers-copiers.adoc | 42 ++- ...ultSerializationProviderConfiguration.java | 41 +-- .../DefaultSerializationProvider.java | 50 +-- .../internal/store/disk/OffHeapDiskStore.java | 40 ++- .../impl/internal/store/heap/OnHeapStore.java | 19 +- .../internal/store/offheap/OffHeapStore.java | 17 +- .../serialization/ByteArraySerializer.java | 16 - .../impl/serialization/CharSerializer.java | 16 - .../serialization/CompactJavaSerializer.java | 21 +- .../impl/serialization/DoubleSerializer.java | 16 - .../impl/serialization/FloatSerializer.java | 16 - .../impl/serialization/IntegerSerializer.java | 16 - .../impl/serialization/LongSerializer.java | 16 - .../serialization/PlainJavaSerializer.java | 5 - .../impl/serialization/StringSerializer.java | 15 - .../TransientStateRepository.java | 2 +- ...erializationProviderConfigurationTest.java | 116 ++++++- .../DefaultSerializationProviderTest.java | 311 +++++++++++++++++- .../disk/OffHeapDiskStoreProviderTest.java | 3 +- .../ehcache/integration/SerializersTest.java | 181 ++++++++++ 25 files changed, 820 insertions(+), 236 deletions(-) create mode 100644 api/src/main/java/org/ehcache/spi/serialization/StatefulSerializer.java create mode 100644 integration-test/src/test/java/org/ehcache/integration/SerializersTest.java diff --git a/api/src/main/java/org/ehcache/spi/serialization/Serializer.java b/api/src/main/java/org/ehcache/spi/serialization/Serializer.java index 96b42c33ca..90beca481f 100644 --- a/api/src/main/java/org/ehcache/spi/serialization/Serializer.java +++ b/api/src/main/java/org/ehcache/spi/serialization/Serializer.java @@ -23,15 +23,8 @@ * Implementations must be thread-safe. *

*

- * When used within the default serialization provider, there are additional requirements. - * The implementations must define either or both of the two constructors: - *

- *
Serializer(ClassLoader loader) - *
This constructor is used to initialize the serializer for transient caches. - *
Serializer(ClassLoader loader, org.ehcache.spi.persistence.StateRepository stateRepository) - *
This constructor is used to initialize the serializer for persistent caches and allows them to store any relevant - * state in the provided repository. - *
+ * When used within the default serialization provider, there is an additional requirement. + * The implementations must define a constructor that takes in a {@code ClassLoader}. * The {@code ClassLoader} value may be {@code null}. If not {@code null}, the class loader * instance provided should be used during deserialization to load classes needed by the deserialized objects. *

diff --git a/api/src/main/java/org/ehcache/spi/serialization/StatefulSerializer.java b/api/src/main/java/org/ehcache/spi/serialization/StatefulSerializer.java new file mode 100644 index 0000000000..3b3d0e0ff0 --- /dev/null +++ b/api/src/main/java/org/ehcache/spi/serialization/StatefulSerializer.java @@ -0,0 +1,47 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.spi.serialization; + +import org.ehcache.spi.persistence.StateRepository; + +/** + * Implementations of this interface can have their state maintained in a {@code StateRepository}. + * The state will be maintained by the authoritative tier of the cache for which this is configured. + *

+ * Implementations must be thread-safe. + *

+ *

+ * When used within the default serialization provider, there is an additional constructor requirement. + * The implementations must define a constructor that takes in a {@code ClassLoader}. + * Post instantiation, the state repository will be injected with the {@code init} method invocation. + * This is guaranteed to happen before any serialization/deserialization interaction. + *

+ * + * @param the type of the instances to serialize + * + * @see Serializer + */ +public interface StatefulSerializer extends Serializer { + + /** + * This method is used to inject a {@code StateRepository} to the serializer + * by the authoritative tier of a cache during the cache initialization. + * The passed in state repository will have the persistent properties of the injecting tier. + * + * @param stateRepository the state repository + */ + void init(StateRepository stateRepository); +} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepository.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepository.java index 06b20fcb92..5885ee523b 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepository.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepository.java @@ -18,7 +18,6 @@ import org.ehcache.clustered.client.internal.EhcacheClientEntity; import org.ehcache.clustered.client.service.ClusteringService; -import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; import org.ehcache.spi.persistence.StateRepository; import java.io.Serializable; diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java index 17a8b29ddf..a7fc5fb5e5 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java @@ -51,6 +51,9 @@ import org.ehcache.core.spi.time.TimeSourceService; import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; import org.ehcache.impl.internal.events.NullStoreEventDispatcher; +import org.ehcache.spi.persistence.StateRepository; +import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; @@ -574,11 +577,34 @@ public void initStore(final Store resource) { throw new IllegalArgumentException("Given clustered tier is not managed by this provider : " + resource); } final ClusteredStore clusteredStore = (ClusteredStore) resource; + ClusteredCacheIdentifier cacheIdentifier = storeConfig.getCacheIdentifier(); try { - clusteredStore.storeProxy = clusteringService.getServerStoreProxy(storeConfig.getCacheIdentifier(), storeConfig.getStoreConfig(), storeConfig.getConsistency()); + clusteredStore.storeProxy = clusteringService.getServerStoreProxy(cacheIdentifier, storeConfig.getStoreConfig(), storeConfig.getConsistency()); } catch (CachePersistenceException e) { - throw new RuntimeException("Unable to create clustered tier proxy - " + storeConfig.getCacheIdentifier(), e); + throw new RuntimeException("Unable to create clustered tier proxy - " + cacheIdentifier, e); } + + Serializer keySerializer = clusteredStore.codec.getKeySerializer(); + if (keySerializer instanceof StatefulSerializer) { + StateRepository stateRepository = null; + try { + stateRepository = clusteringService.getStateRepositoryWithin(cacheIdentifier, cacheIdentifier.getId() + "-Key"); + } catch (CachePersistenceException e) { + throw new RuntimeException(e); + } + ((StatefulSerializer)keySerializer).init(stateRepository); + } + Serializer valueSerializer = clusteredStore.codec.getValueSerializer(); + if (valueSerializer instanceof StatefulSerializer) { + StateRepository stateRepository = null; + try { + stateRepository = clusteringService.getStateRepositoryWithin(cacheIdentifier, cacheIdentifier.getId() + "-Value"); + } catch (CachePersistenceException e) { + throw new RuntimeException(e); + } + ((StatefulSerializer)valueSerializer).init(stateRepository); + } + clusteredStore.storeProxy.addInvalidationListener(new ServerStoreProxy.InvalidationListener() { @Override public void onInvalidateHash(long hash) { diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/codecs/OperationsCodec.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/codecs/OperationsCodec.java index c6868312b6..16bbf347f9 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/codecs/OperationsCodec.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/codecs/OperationsCodec.java @@ -42,4 +42,12 @@ public Operation decode(ByteBuffer buffer) { buffer.rewind(); return opCode.decode(buffer, keySerializer, valueSerializer); } + + public Serializer getKeySerializer() { + return keySerializer; + } + + public Serializer getValueSerializer() { + return valueSerializer; + } } diff --git a/docs/src/docs/asciidoc/user/serializers-copiers.adoc b/docs/src/docs/asciidoc/user/serializers-copiers.adoc index 27be820b30..db867df01e 100644 --- a/docs/src/docs/asciidoc/user/serializers-copiers.adoc +++ b/docs/src/docs/asciidoc/user/serializers-copiers.adoc @@ -125,40 +125,38 @@ of the serialized types as they might not be available in the current class load [[persistent-vs-transient-caches]] ==== Persistent vs. transient caches -When configured on a persistent cache, serializers may need to persist and restore their state across restarts. -For clustered caches there might be an additional requirement that the state of the serializer must be visible to all clients using the same cache(configured with the same serializer). -To address these requirement you have to implement a constructor with the following signature: +All custom serializers must have a constructor with the following signature: ```java -public MySerializer(ClassLoader classLoader, StateRepository stateRepository) { +public MySerializer(ClassLoader classLoader) { } ``` -otherwise persistent caches won't be able to use your serializer. - -The `StateRepository.getPersistentConcurrentMap()` provides a `ConcurrentMap` that you can use to store any relevant state. -The users don't have to worry about the persistence aspects of this map as it is taken care by `Ehcache`. -In the case of a disk persistent cache, the contents of the map will be persisted locally on to the disk. -For clustered caches the contents are persisted in the cluster itself so that other clients using the same cache can also access the contents of the map. - -Attempting to configure a serializer that lacks such constructor on a persistent cache using either of +Attempting to configure a serializer that lacks such constructor on a cache using either of `CacheConfigurationBuilder.withKeySerializer(Class> keySerializerClass)` or `CacheConfigurationBuilder.withValueSerializer(Class> valueSerializerClass)` will be sanctioned with an exception upon cache initialization. -Configuring a serializer that lacks such constructor by instance on a persistent cache using either of -`CacheConfigurationBuilder.withKeySerializer(Serializer keySerializer)` or `CacheConfigurationBuilder.withValueSerializer(Serializer valueSerializer)` -will work, but the responsibility of persisting and restoring the serializer's state across restarts lies on you. +But if an instance of the serializer is configured using either of +`CacheConfigurationBuilder.withKeySerializer(Serializer keySerializer)` or +`CacheConfigurationBuilder.withValueSerializer(Serializer valueSerializer)` +it will work since the instantiation is done by the user code itself. + +Registering a serializer that lacks such constructor at the cache manager level will prevent it from being chosen for caches. + +Custom serializer implementations could have some state that is used in the serialization/deserialization process. +When configured on a persistent cache, the state of such serializers needs to be persisted across restarts. -On caches that have no persistent capable store, serializers must have a constructor: +To address these requirements you can have a `StatefulSerializer` implementation. +`StatefulSerializer` is a specialized `Serializer` with an additional _init_ method with the following signature: ```java -public MySerializer(ClassLoader classLoader) { +public void init(StateRepository repository) { } ``` -Attempting to configure a serializer that lacks such constructor on a transient cache using either of -`CacheConfigurationBuilder.withKeySerializer(Class> keySerializerClass)` or -`CacheConfigurationBuilder.withValueSerializer(Class> valueSerializerClass)` -will be sanctioned with an exception upon cache initialization. +The `StateRepository.getPersistentConcurrentMap(String, Class, Class)` provides a `ConcurrentMap` that you can use to store any relevant state. +The `StateRepository` is provided by the authoritative tier of the cache and hence will have the same persistence properties of that tier. +For persistent caches it is highly recommended that all state is stored in these maps as the users won't have to worry about the persistence aspects of this map as it is taken care by `Ehcache`. -Registering a serializer that lacks such constructor at the cache manager level will prevent it from being chosen for persistent caches. +* In the case of a disk persistent cache, the contents of the map will be persisted locally on to the disk. +* For clustered caches the contents are persisted in the cluster itself so that other clients using the same cache can also access the contents of the map. NOTE: The constructor with the signature `(ClassLoader classLoader, FileBasedPersistenceContext persistenceContext)` that existed in v3.0 is still supported to respect backward compatibility but the usage is limited to disk based caches. diff --git a/impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfiguration.java b/impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfiguration.java index 55dc152642..e0cda53697 100644 --- a/impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfiguration.java +++ b/impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfiguration.java @@ -20,7 +20,6 @@ import java.util.LinkedHashMap; import java.util.Map; -import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.SerializationProvider; import org.ehcache.spi.serialization.Serializer; import org.ehcache.core.spi.service.FileBasedPersistenceContext; @@ -94,31 +93,27 @@ public DefaultSerializationProviderConfiguration addSerializerFor(Class s throw new NullPointerException("Serializer class cannot be null"); } - boolean transientConstructorPresent; - boolean persistentConstructorPresent; - - if(transientConstructorPresent = isConstructorPresent(serializerClass, ClassLoader.class)) { - if (!overwrite && transientSerializers.containsKey(serializableClass)) { - throw new IllegalArgumentException("Duplicate transient serializer for class : " + serializableClass.getName()); - } else { - transientSerializers.put(serializableClass, serializerClass); + boolean baseConstructorPresent = isConstructorPresent(serializerClass, ClassLoader.class); + if (baseConstructorPresent) { + if (!overwrite) { + if (transientSerializers.containsKey(serializableClass)) { + throw new IllegalArgumentException("Duplicate transient serializer for class : " + serializableClass.getName()); + } + if (persistentSerializers.containsKey(serializableClass)) { + throw new IllegalArgumentException("Duplicate persistent serializer for class : " + serializableClass.getName()); + } } + transientSerializers.put(serializableClass, serializerClass); + persistentSerializers.put(serializableClass, serializerClass); } - if (persistentConstructorPresent = isConstructorPresent(serializerClass, ClassLoader.class, StateRepository.class)) { - if (!overwrite && persistentSerializers.containsKey(serializableClass)) { - throw new IllegalArgumentException("Duplicate persistent serializer for class : " + serializableClass.getName()); - } else { - persistentSerializers.put(serializableClass, serializerClass); - } + boolean legacyConstructorPresent = isConstructorPresent(serializerClass, ClassLoader.class, FileBasedPersistenceContext.class); + if(!baseConstructorPresent && !legacyConstructorPresent) { + throw new IllegalArgumentException("The serializer: " + serializerClass.getName() + + " does not meet the constructor requirements for either transient or persistent caches."); } - if (isConstructorPresent(serializerClass, ClassLoader.class, FileBasedPersistenceContext.class)) { - if (persistentConstructorPresent) { - throw new IllegalArgumentException("Serializer cannot have constructors taking (ClassLoader, StateRepository) and (ClassLoader, FileBasedPersistenceContext)" + - " - you should remove the second one as it is deprecated since version 3.1.0"); - } - persistentConstructorPresent = true; + if (!baseConstructorPresent && legacyConstructorPresent) { if (!overwrite && persistentSerializers.containsKey(serializableClass)) { throw new IllegalArgumentException("Duplicate persistent serializer for class : " + serializableClass.getName()); } else { @@ -126,10 +121,6 @@ public DefaultSerializationProviderConfiguration addSerializerFor(Class s } } - if(!transientConstructorPresent && !persistentConstructorPresent) { - throw new IllegalArgumentException("The serializer: " + serializerClass.getName() - + " does not meet the constructor requirements for either transient or persistent caches."); - } return this; } diff --git a/impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProvider.java b/impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProvider.java index 35ec028cd2..d654b3af58 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProvider.java +++ b/impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProvider.java @@ -23,16 +23,14 @@ import org.ehcache.impl.serialization.CharSerializer; import org.ehcache.core.internal.service.ServiceLocator; import org.ehcache.impl.serialization.CompactJavaSerializer; -import org.ehcache.impl.serialization.CompactPersistentJavaSerializer; import org.ehcache.impl.serialization.DoubleSerializer; import org.ehcache.impl.serialization.FloatSerializer; import org.ehcache.impl.serialization.IntegerSerializer; import org.ehcache.impl.serialization.LongSerializer; -import org.ehcache.impl.serialization.PlainJavaSerializer; import org.ehcache.impl.serialization.StringSerializer; -import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.persistence.PersistableResourceService; import org.ehcache.spi.persistence.PersistableResourceService.PersistenceSpaceIdentifier; +import org.ehcache.spi.serialization.StatefulSerializer; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.spi.serialization.SerializationProvider; import org.ehcache.spi.serialization.Serializer; @@ -163,11 +161,11 @@ public TransientProvider(Map, Class>> serialize @Override protected Serializer createSerializer(String suffix, Class clazz, ClassLoader classLoader, DefaultSerializerConfiguration config, ServiceConfiguration... configs) throws UnsupportedTypeException { + Class> klazz = getSerializerClassFor(clazz, config, classLoader); try { - Class> klazz = getClassFor(clazz, config, classLoader); return constructSerializer(clazz, klazz.getConstructor(ClassLoader.class), classLoader); } catch (NoSuchMethodException e) { - throw new RuntimeException(e); + throw new RuntimeException(klazz + " does not meet the constructor requirement for transient caches", e); } } @@ -193,32 +191,44 @@ private PersistentProvider(Map, Class>> seriali @Override protected Serializer createSerializer(String suffix, Class clazz, ClassLoader classLoader, DefaultSerializerConfiguration config, ServiceConfiguration... configs) throws UnsupportedTypeException { - Class> klazz = getClassFor(clazz, config, classLoader); - PersistenceSpaceIdentifier space = findSingletonAmongst(PersistenceSpaceIdentifier.class, (Object[]) configs); - PersistableResourceService service = serviceProvider.getService(space.getServiceType()); - - String subSpaceName = DefaultSerializationProvider.class.getSimpleName() + suffix; + Class> klazz = getSerializerClassFor(clazz, config, classLoader); + String errorMessage = klazz + " does not meet the constructor requirements for persistent caches"; + + if (StatefulSerializer.class.isAssignableFrom(klazz)) { + try { + Constructor> constructor = klazz.getConstructor(ClassLoader.class); + return constructSerializer(clazz, constructor, classLoader); + } catch (NoSuchMethodException e) { + throw new RuntimeException(errorMessage, e); + } + } else { + PersistenceSpaceIdentifier space = findSingletonAmongst(PersistenceSpaceIdentifier.class, (Object[]) configs); + PersistableResourceService service = serviceProvider.getService(space.getServiceType()); - try { - Constructor> constructor = klazz.getConstructor(ClassLoader.class, StateRepository.class); - StateRepository stateRepository = service.getStateRepositoryWithin(space, subSpaceName); - return constructSerializer(clazz, constructor, classLoader, stateRepository); - } catch (NoSuchMethodException e) { if (service instanceof LocalPersistenceService) { try { Constructor> constructor = klazz.getConstructor(ClassLoader.class, FileBasedPersistenceContext.class); + String subSpaceName = DefaultSerializationProvider.class.getSimpleName() + suffix; FileBasedPersistenceContext context = ((LocalPersistenceService) service).createPersistenceContextWithin(space, subSpaceName); return constructSerializer(clazz, constructor, classLoader, context); } catch (NoSuchMethodException nsmex) { - throw new RuntimeException(nsmex); + try { + Constructor> constructor = klazz.getConstructor(ClassLoader.class); + return constructSerializer(clazz, constructor, classLoader); + } catch (NoSuchMethodException e) { + throw new RuntimeException(errorMessage, e); + } } catch (CachePersistenceException cpex) { throw new RuntimeException(cpex); } } else { - throw new RuntimeException(e); + try { + Constructor> constructor = klazz.getConstructor(ClassLoader.class); + return constructSerializer(clazz, constructor, classLoader); + } catch (NoSuchMethodException e) { + throw new RuntimeException(errorMessage, e); + } } - } catch (CachePersistenceException e) { - throw new RuntimeException(e); } } @@ -256,7 +266,7 @@ public Serializer createValueSerializer(Class clazz, ClassLoader class protected abstract Serializer createSerializer(String suffix, Class clazz, ClassLoader classLoader, DefaultSerializerConfiguration config, ServiceConfiguration... configs) throws UnsupportedTypeException; - protected Class> getClassFor(Class clazz, DefaultSerializerConfiguration config, ClassLoader classLoader) throws UnsupportedTypeException { + protected Class> getSerializerClassFor(Class clazz, DefaultSerializerConfiguration config, ClassLoader classLoader) throws UnsupportedTypeException { if (config != null) { Class> configured = config.getClazz(); if (configured != null) { diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java index c7684a3ce5..b07d050bb8 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java @@ -37,6 +37,8 @@ import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.time.TimeSourceService; import org.ehcache.spi.persistence.PersistableResourceService.PersistenceSpaceIdentifier; +import org.ehcache.spi.persistence.StateRepository; +import org.ehcache.spi.serialization.StatefulSerializer; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; @@ -72,6 +74,7 @@ import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.concurrent.TimeUnit; @@ -297,9 +300,10 @@ private File getMetadataFile() { @ServiceDependencies({TimeSourceService.class, SerializationProvider.class, ExecutionService.class}) public static class Provider implements Store.Provider, AuthoritativeTier.Provider { - private final Set> createdStores = Collections.newSetFromMap(new ConcurrentWeakIdentityHashMap, Boolean>()); + private final Map, PersistenceSpaceIdentifier> createdStores = new ConcurrentWeakIdentityHashMap, PersistenceSpaceIdentifier>(); private final String defaultThreadPool; private volatile ServiceProvider serviceProvider; + private volatile LocalPersistenceService localPersistenceService; public Provider() { this(null); @@ -337,7 +341,7 @@ private OffHeapDiskStore createStoreInternal(Configuration st } MemoryUnit unit = (MemoryUnit)diskPool.getUnit(); - LocalPersistenceService localPersistenceService = serviceProvider.getService(LocalPersistenceService.class); + this.localPersistenceService = serviceProvider.getService(LocalPersistenceService.class); if (localPersistenceService == null) { throw new IllegalStateException("No LocalPersistenceService could be found - did you configure it at the CacheManager level?"); } @@ -359,7 +363,7 @@ private OffHeapDiskStore createStoreInternal(Configuration st OffHeapDiskStore offHeapStore = new OffHeapDiskStore(persistenceContext, executionService, threadPoolAlias, writerConcurrency, storeConfig, timeSource, eventDispatcher, unit.toBytes(diskPool.getSize())); - createdStores.add(offHeapStore); + createdStores.put(offHeapStore, space); return offHeapStore; } catch (CachePersistenceException cpex) { throw new RuntimeException("Unable to create persistence context in " + space, cpex); @@ -368,7 +372,7 @@ private OffHeapDiskStore createStoreInternal(Configuration st @Override public void releaseStore(Store resource) { - if (!createdStores.contains(resource)) { + if (createdStores.remove(resource) == null) { throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); } try { @@ -397,10 +401,34 @@ static void close(final OffHeapDiskStore resource) throws IOExcepti @Override public void initStore(Store resource) { - if (!createdStores.contains(resource)) { + PersistenceSpaceIdentifier identifier = createdStores.get(resource); + if (identifier == null) { throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); } - init((OffHeapDiskStore)resource); + OffHeapDiskStore diskStore = (OffHeapDiskStore) resource; + + Serializer keySerializer = diskStore.keySerializer; + if (keySerializer instanceof StatefulSerializer) { + StateRepository stateRepository = null; + try { + stateRepository = localPersistenceService.getStateRepositoryWithin(identifier, "key-serializer"); + } catch (CachePersistenceException e) { + throw new RuntimeException(e); + } + ((StatefulSerializer)keySerializer).init(stateRepository); + } + Serializer valueSerializer = diskStore.valueSerializer; + if (valueSerializer instanceof StatefulSerializer) { + StateRepository stateRepository = null; + try { + stateRepository = localPersistenceService.getStateRepositoryWithin(identifier, "value-serializer"); + } catch (CachePersistenceException e) { + throw new RuntimeException(e); + } + ((StatefulSerializer)valueSerializer).init(stateRepository); + } + + init(diskStore); } static void init(final OffHeapDiskStore resource) { diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java index c90fce7434..19987f6dd3 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java @@ -46,7 +46,10 @@ import org.ehcache.impl.internal.store.heap.holders.SerializedOnHeapValueHolder; import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.time.TimeSourceService; +import org.ehcache.impl.serialization.TransientStateRepository; import org.ehcache.sizeof.annotations.IgnoreSizeOf; +import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.events.StoreEventSource; @@ -413,7 +416,7 @@ public OnHeapValueHolder apply(K mappedKey, OnHeapValueHolder mappedValue) case MISS: return false; default: - throw new AssertionError("Unknow enum value " + outcome); + throw new AssertionError("Unknown enum value " + outcome); } } catch (RuntimeException re) { storeEventDispatcher.releaseEventSinkAfterFailure(eventSink, re); @@ -1684,6 +1687,16 @@ static void close(final OnHeapStore onHeapStore) { @Override public void initStore(Store resource) { checkResource(resource); + + List copiers = createdStores.get(resource); + for (Copier copier : copiers) { + if(copier instanceof SerializingCopier) { + Serializer serializer = ((SerializingCopier)copier).getSerializer(); + if(serializer instanceof StatefulSerializer) { + ((StatefulSerializer)serializer).init(new TransientStateRepository()); + } + } + } } private void checkResource(Object resource) { @@ -1721,7 +1734,7 @@ public void releaseCachingTier(CachingTier resource) { @Override public void initCachingTier(CachingTier resource) { - initStore((Store) resource); + checkResource(resource); } @Override @@ -1736,7 +1749,7 @@ public void releaseHigherCachingTier(HigherCachingTier resource) { @Override public void initHigherCachingTier(HigherCachingTier resource) { - initStore((Store) resource); + checkResource(resource); } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java index e3a5e6c8e2..c098eb445e 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java @@ -31,6 +31,9 @@ import org.ehcache.impl.internal.store.offheap.portability.SerializerPortability; import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.time.TimeSourceService; +import org.ehcache.impl.serialization.TransientStateRepository; +import org.ehcache.spi.persistence.PersistableResourceService; +import org.ehcache.spi.serialization.StatefulSerializer; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; @@ -55,6 +58,7 @@ import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Set; import static org.ehcache.impl.internal.store.offheap.OffHeapStoreUtils.getBufferSource; @@ -179,6 +183,17 @@ public void initStore(Store resource) { if (!createdStores.contains(resource)) { throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); } + + OffHeapStore offHeapStore = (OffHeapStore) resource; + Serializer keySerializer = offHeapStore.keySerializer; + if (keySerializer instanceof StatefulSerializer) { + ((StatefulSerializer)keySerializer).init(new TransientStateRepository()); + } + Serializer valueSerializer = offHeapStore.valueSerializer; + if (valueSerializer instanceof StatefulSerializer) { + ((StatefulSerializer)valueSerializer).init(new TransientStateRepository()); + } + init((OffHeapStore)resource); } @@ -251,7 +266,7 @@ public void initCachingTier(LowerCachingTier resource) { if (!createdStores.contains(resource)) { throw new IllegalArgumentException("Given caching tier is not managed by this provider : " + resource); } - initStore((Store) resource); + init((OffHeapStore) resource); } } } diff --git a/impl/src/main/java/org/ehcache/impl/serialization/ByteArraySerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/ByteArraySerializer.java index d350339ea6..8f9dbb1e6a 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/ByteArraySerializer.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/ByteArraySerializer.java @@ -16,7 +16,6 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.SerializerException; import org.ehcache.spi.serialization.Serializer; @@ -51,21 +50,6 @@ public ByteArraySerializer() { public ByteArraySerializer(ClassLoader classLoader) { } - /** - * Constructor to enable this serializer as a persistent one. - *

- * Parameters are ignored as {@code byte[]} is a base java type and this implementation requires no state. - *

- * - * @param classLoader the classloader to use - * @param stateRepository the state repository - * - * @see Serializer - */ - public ByteArraySerializer(ClassLoader classLoader, StateRepository stateRepository) { - - } - /** * {@inheritDoc} */ diff --git a/impl/src/main/java/org/ehcache/impl/serialization/CharSerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/CharSerializer.java index cd05711d41..c72ba3b82f 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/CharSerializer.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/CharSerializer.java @@ -16,7 +16,6 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.Serializer; import java.nio.ByteBuffer; @@ -46,21 +45,6 @@ public CharSerializer() { public CharSerializer(ClassLoader classLoader) { } - /** - * Constructor to enable this serializer as a persistent one. - *

- * Parameters are ignored as {@link Character} is a base java type and this implementation requires no state. - *

- * - * @param classLoader the classloader to use - * @param stateRepository the state repository - * - * @see Serializer - */ - public CharSerializer(ClassLoader classLoader, StateRepository stateRepository) { - - } - /** * {@inheritDoc} */ diff --git a/impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java index de287192ba..4fa0612bf2 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java @@ -18,7 +18,6 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; -import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; @@ -35,15 +34,14 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; -import org.ehcache.core.spi.function.NullaryFunction; import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.SerializerException; import org.ehcache.impl.internal.util.ByteBufferInputStream; import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; /** * A trivially compressed Java serialization based serializer. @@ -53,9 +51,9 @@ * {@code Class} and the integer representation are stored in a single on-heap * map. */ -public class CompactJavaSerializer implements Serializer { +public class CompactJavaSerializer implements StatefulSerializer { - private final ConcurrentMap readLookup; + private volatile ConcurrentMap readLookup; private final ConcurrentMap readLookupLocalCache = new ConcurrentHashMap(); private final ConcurrentMap writeLookup = new ConcurrentHashMap(); @@ -72,13 +70,8 @@ public class CompactJavaSerializer implements Serializer { * @see Serializer */ public CompactJavaSerializer(ClassLoader loader) { - this(loader, new TransientStateRepository()); - } - - public CompactJavaSerializer(ClassLoader loader, StateRepository stateRepository) { this.loader = loader; - this.readLookup = stateRepository.getPersistentConcurrentMap("CompactJavaSerializer-ObjectStreamClassIndex", Integer.class, ObjectStreamClass.class); - loadMappingsInWriteContext(readLookup.entrySet(), true); + init(new TransientStateRepository()); } CompactJavaSerializer(ClassLoader loader, Map mappings) { @@ -97,6 +90,12 @@ public CompactJavaSerializer(ClassLoader loader, StateRepository stateRepository } } + @Override + public void init(final StateRepository stateRepository) { + this.readLookup = stateRepository.getPersistentConcurrentMap("CompactJavaSerializer-ObjectStreamClassIndex", Integer.class, ObjectStreamClass.class); + loadMappingsInWriteContext(readLookup.entrySet(), true); + } + Map getSerializationMappings() { return Collections.unmodifiableMap(new HashMap(readLookup)); } diff --git a/impl/src/main/java/org/ehcache/impl/serialization/DoubleSerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/DoubleSerializer.java index e3e578d8be..9ded987569 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/DoubleSerializer.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/DoubleSerializer.java @@ -16,7 +16,6 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.Serializer; import java.nio.ByteBuffer; @@ -46,21 +45,6 @@ public DoubleSerializer() { public DoubleSerializer(ClassLoader classLoader) { } - /** - * Constructor to enable this serializer as a persistent one. - *

- * Parameters are ignored as {@link Double} is a base java type and this implementation requires no state. - *

- * - * @param classLoader the classloader to use - * @param stateRepository the state repository - * - * @see Serializer - */ - public DoubleSerializer(ClassLoader classLoader, StateRepository stateRepository) { - - } - /** * {@inheritDoc} */ diff --git a/impl/src/main/java/org/ehcache/impl/serialization/FloatSerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/FloatSerializer.java index 73760d6cab..a15cf7382c 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/FloatSerializer.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/FloatSerializer.java @@ -16,7 +16,6 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.Serializer; import java.nio.ByteBuffer; @@ -46,21 +45,6 @@ public FloatSerializer() { public FloatSerializer(ClassLoader classLoader) { } - /** - * Constructor to enable this serializer as a persistent one. - *

- * Parameters are ignored as {@link Float} is a base java type and this implementation requires no state. - *

- * - * @param classLoader the classloader to use - * @param stateRepository the state repository - * - * @see Serializer - */ - public FloatSerializer(ClassLoader classLoader, StateRepository stateRepository) { - - } - /** * {@inheritDoc} */ diff --git a/impl/src/main/java/org/ehcache/impl/serialization/IntegerSerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/IntegerSerializer.java index 659c0ac8c3..f4efe01892 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/IntegerSerializer.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/IntegerSerializer.java @@ -16,7 +16,6 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.Serializer; import java.nio.ByteBuffer; @@ -46,21 +45,6 @@ public IntegerSerializer() { public IntegerSerializer(ClassLoader classLoader) { } - /** - * Constructor to enable this serializer as a persistent one. - *

- * Parameters are ignored as {@link Integer} is a base java type and this implementation requires no state. - *

- * - * @param classLoader the classloader to use - * @param stateRepository the state repository - * - * @see Serializer - */ - public IntegerSerializer(ClassLoader classLoader, StateRepository stateRepository) { - - } - /** * {@inheritDoc} */ diff --git a/impl/src/main/java/org/ehcache/impl/serialization/LongSerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/LongSerializer.java index 2c983daee9..ce7fd97477 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/LongSerializer.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/LongSerializer.java @@ -16,7 +16,6 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.Serializer; import java.nio.ByteBuffer; @@ -46,21 +45,6 @@ public LongSerializer() { public LongSerializer(ClassLoader classLoader) { } - /** - * Constructor to enable this serializer as a persistent one. - *

- * Parameters are ignored as {@link Long} is a base java type and this implementation requires no state. - *

- * - * @param classLoader the classloader to use - * @param stateRepository the state repository - * - * @see Serializer - */ - public LongSerializer(ClassLoader classLoader, StateRepository stateRepository) { - - } - /** * {@inheritDoc} */ diff --git a/impl/src/main/java/org/ehcache/impl/serialization/PlainJavaSerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/PlainJavaSerializer.java index a1c02e1ab8..b72abb4df2 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/PlainJavaSerializer.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/PlainJavaSerializer.java @@ -17,7 +17,6 @@ package org.ehcache.impl.serialization; import org.ehcache.impl.internal.util.ByteBufferInputStream; -import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.serialization.SerializerException; @@ -43,10 +42,6 @@ public PlainJavaSerializer(ClassLoader classLoader) { this.classLoader = classLoader; } - public PlainJavaSerializer(ClassLoader classLoader, StateRepository stateRepository) throws IOException, ClassNotFoundException { - this(classLoader); - } - @Override public ByteBuffer serialize(T object) { ByteArrayOutputStream bout = new ByteArrayOutputStream(); diff --git a/impl/src/main/java/org/ehcache/impl/serialization/StringSerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/StringSerializer.java index add9eb458d..cc4f84e5a3 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/StringSerializer.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/StringSerializer.java @@ -20,7 +20,6 @@ import java.io.IOException; import java.nio.ByteBuffer; -import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.serialization.SerializerException; @@ -49,20 +48,6 @@ public StringSerializer() { public StringSerializer(ClassLoader classLoader) { } - /** - * Constructor to enable this serializer as a persistent one. - *

- * Parameters are ignored as {@link String} is a base java type and this implementation requires no state. - *

- * - * @param classLoader the classloader to use - * @param stateRepository the state repository - * - * @see Serializer - */ - public StringSerializer(ClassLoader classLoader, StateRepository stateRepository) { - } - /** * {@inheritDoc} */ diff --git a/impl/src/main/java/org/ehcache/impl/serialization/TransientStateRepository.java b/impl/src/main/java/org/ehcache/impl/serialization/TransientStateRepository.java index 004f1cee73..12519df360 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/TransientStateRepository.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/TransientStateRepository.java @@ -25,7 +25,7 @@ /** * TransientStateRepository */ -class TransientStateRepository implements StateRepository { +public class TransientStateRepository implements StateRepository { private ConcurrentMap> knownMaps = new ConcurrentHashMap>(); diff --git a/impl/src/test/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfigurationTest.java b/impl/src/test/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfigurationTest.java index 3421a2cf35..130e15a43f 100644 --- a/impl/src/test/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfigurationTest.java +++ b/impl/src/test/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfigurationTest.java @@ -16,26 +16,30 @@ package org.ehcache.impl.config.serializer; +import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.SerializerException; import org.ehcache.spi.serialization.Serializer; import org.ehcache.core.spi.service.FileBasedPersistenceContext; +import org.ehcache.spi.serialization.StatefulSerializer; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.ExpectedException; import java.nio.ByteBuffer; import static org.junit.Assert.*; -/** - * Created by alsu on 30/09/15. - */ public class DefaultSerializationProviderConfigurationTest { + @Rule + public ExpectedException expectedException = ExpectedException.none(); + @Test public void testAddSerializerForTransient() throws Exception { DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); config.addSerializerFor(Long.class, TransientSerializer.class); - assertTrue(config.getPersistentSerializers().isEmpty()); + assertSame(TransientSerializer.class, config.getPersistentSerializers().get(Long.class)); assertSame(TransientSerializer.class, config.getTransientSerializers().get(Long.class)); } @@ -48,6 +52,15 @@ public void testAddSerializerForPersistent() throws Exception { assertSame(PersistentSerializer.class, config.getPersistentSerializers().get(Long.class)); } + @Test + public void testAddSerializerForTransientPersistentLegacyCombo() throws Exception { + DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); + config.addSerializerFor(Long.class, LegacyComboSerializer.class); + + assertSame(LegacyComboSerializer.class, config.getPersistentSerializers().get(Long.class)); + assertSame(LegacyComboSerializer.class, config.getTransientSerializers().get(Long.class)); + } + @Test public void testAddSerializerForTransientPersistentCombo() throws Exception { DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); @@ -57,12 +70,20 @@ public void testAddSerializerForTransientPersistentCombo() throws Exception { assertSame(ComboSerializer.class, config.getTransientSerializers().get(Long.class)); } - @Test(expected = IllegalArgumentException.class) - public void testAddSerializerForUnusable() throws Exception { + @Test + public void testAddSerializerForConstructorless() throws Exception { + expectedException.expectMessage("does not meet the constructor requirements for either transient or persistent caches"); DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); config.addSerializerFor(Long.class, UnusableSerializer.class); } + @Test + public void testAddSerializerForStatefulOnly() throws Exception { + expectedException.expectMessage("does not meet the constructor requirements for either transient or persistent caches"); + DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); + config.addSerializerFor(Long.class, YetAnotherUnusableSerializer.class); + } + private static class TransientSerializer implements Serializer { public TransientSerializer(ClassLoader loader) { @@ -105,12 +126,12 @@ public boolean equals(final Long object, final ByteBuffer binary) throws ClassNo } } - private static class ComboSerializer implements Serializer { + private static class LegacyComboSerializer implements Serializer { - public ComboSerializer(ClassLoader loader) { + public LegacyComboSerializer(ClassLoader loader) { } - public ComboSerializer(ClassLoader loader, FileBasedPersistenceContext context) { + public LegacyComboSerializer(ClassLoader loader, FileBasedPersistenceContext context) { } @Override @@ -146,4 +167,79 @@ public boolean equals(final Long object, final ByteBuffer binary) throws ClassNo throw new UnsupportedOperationException("Implement me!"); } } -} \ No newline at end of file + + private static class ComboSerializer implements StatefulSerializer { + + public ComboSerializer(ClassLoader loader) { + } + + @Override + public void init(final StateRepository stateRepository) { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public ByteBuffer serialize(final Long object) throws SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public Long read(final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public boolean equals(final Long object, final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + } + + private static class AnotherUnusableSerializer implements StatefulSerializer { + + public AnotherUnusableSerializer(ClassLoader loader, FileBasedPersistenceContext context) { + } + + @Override + public void init(final StateRepository stateRepository) { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public ByteBuffer serialize(final Long object) throws SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public Long read(final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public boolean equals(final Long object, final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + } + + private static class YetAnotherUnusableSerializer implements StatefulSerializer { + + @Override + public void init(final StateRepository stateRepository) { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public ByteBuffer serialize(final Long object) throws SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public Long read(final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + + @Override + public boolean equals(final Long object, final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + throw new UnsupportedOperationException("Implement me!"); + } + } +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java index da0ed8b45d..883dddfb06 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java @@ -20,26 +20,25 @@ import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration; import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration; -import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; import org.ehcache.impl.serialization.ByteArraySerializer; import org.ehcache.impl.serialization.CharSerializer; import org.ehcache.impl.serialization.CompactJavaSerializer; -import org.ehcache.impl.serialization.CompactPersistentJavaSerializer; import org.ehcache.impl.serialization.DoubleSerializer; import org.ehcache.impl.serialization.FloatSerializer; import org.ehcache.impl.serialization.IntegerSerializer; import org.ehcache.impl.serialization.LongSerializer; -import org.ehcache.impl.serialization.PlainJavaSerializer; import org.ehcache.impl.serialization.StringSerializer; import org.ehcache.spi.persistence.PersistableResourceService; import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.serialization.SerializerException; +import org.ehcache.spi.serialization.StatefulSerializer; import org.ehcache.spi.serialization.UnsupportedTypeException; import org.ehcache.spi.service.ServiceProvider; import org.hamcrest.Matchers; import org.junit.Rule; import org.junit.Test; +import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; import java.io.Closeable; @@ -61,8 +60,6 @@ import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** @@ -73,6 +70,9 @@ public class DefaultSerializationProviderTest { @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); + @Rule + public ExpectedException expectedException = ExpectedException.none(); + @Test public void testCreateSerializerNoConfig() throws Exception { DefaultSerializationProviderConfiguration dspfConfig = new DefaultSerializationProviderConfiguration(); @@ -291,6 +291,205 @@ public void testDefaultByteArraySerializer() throws Exception { assertThat(keySerializer, instanceOf(ByteArraySerializer.class)); } + @Test + public void testCreateTransientSerializerWithoutConstructor() throws Exception { + expectedException.expect(RuntimeException.class); + expectedException.expectMessage("does not meet the constructor requirement for transient caches"); + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration((Class) BaseSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); + } + + @Test + public void testCreatePersistentSerializerWithoutConstructor() throws Exception { + expectedException.expect(RuntimeException.class); + expectedException.expectMessage("does not meet the constructor requirements for persistent caches"); + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration((Class) BaseSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); + } + + @Test + public void testCreateTransientStatefulSerializerWithoutConstructor() throws Exception { + expectedException.expect(RuntimeException.class); + expectedException.expectMessage("does not meet the constructor requirement for transient caches"); + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration((Class) StatefulBaseSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); + } + + @Test + public void testCreatePersistentStatefulSerializerWithoutConstructor() throws Exception { + expectedException.expect(RuntimeException.class); + expectedException.expectMessage("does not meet the constructor requirements for persistent caches"); + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration((Class) StatefulBaseSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); + } + + @Test + public void testCreateTransientMinimalSerializer() throws Exception { + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + MinimalSerializer.baseConstructorInvoked = false; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(MinimalSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + Serializer valueSerializer = + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); + assertThat(valueSerializer, instanceOf(MinimalSerializer.class)); + assertThat(MinimalSerializer.baseConstructorInvoked, is(true)); + } + + @Test + public void testCreatePersistentMinimalSerializer() throws Exception { + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + MinimalSerializer.baseConstructorInvoked = false; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(MinimalSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + Serializer valueSerializer = + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); + assertThat(valueSerializer, instanceOf(MinimalSerializer.class)); + assertThat(MinimalSerializer.baseConstructorInvoked, is(true)); + } + + @Test + public void testTransientMinimalStatefulSerializer() throws Exception { + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + MinimalStatefulSerializer.baseConstructorInvoked = false; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(MinimalStatefulSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + Serializer valueSerializer = + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); + assertThat(valueSerializer, instanceOf(MinimalStatefulSerializer.class)); + assertThat(MinimalStatefulSerializer.baseConstructorInvoked, is(true)); + } + + @Test + public void testPersistentMinimalStatefulSerializer() throws Exception { + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + MinimalStatefulSerializer.baseConstructorInvoked = false; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(MinimalStatefulSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + Serializer valueSerializer = + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); + assertThat(valueSerializer, instanceOf(MinimalStatefulSerializer.class)); + assertThat(MinimalStatefulSerializer.baseConstructorInvoked, is(true)); + } + + @Test + public void testTransientLegacySerializer() throws Exception { + expectedException.expect(RuntimeException.class); + expectedException.expectMessage("does not meet the constructor requirement for transient caches"); + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(LegacySerializer.class, DefaultSerializerConfiguration.Type.VALUE); + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); + } + + @Test + public void testPersistentLegacySerializer() throws Exception { + DefaultSerializationProvider provider = getStartedProvider(); + + LegacySerializer.legacyConstructorInvoked = false; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(LegacySerializer.class, DefaultSerializerConfiguration.Type.VALUE); + Serializer valueSerializer = + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); + assertThat(valueSerializer, instanceOf(LegacySerializer.class)); + assertThat(LegacySerializer.legacyConstructorInvoked, is(true)); + } + + @Test + public void testTransientLegacyComboSerializer() throws Exception { + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + LegacyComboSerializer.baseConstructorInvoked = false; + LegacyComboSerializer.legacyConstructorInvoked = false; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(LegacyComboSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + Serializer valueSerializer = + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); + assertThat(valueSerializer, instanceOf(LegacyComboSerializer.class)); + assertThat(LegacyComboSerializer.baseConstructorInvoked, is(true)); + assertThat(LegacyComboSerializer.legacyConstructorInvoked, is(false)); + } + + @Test + public void testPersistentLegacyComboSerializer() throws Exception { + DefaultSerializationProvider provider = getStartedProvider(); + + LegacyComboSerializer.baseConstructorInvoked = false; + LegacyComboSerializer.legacyConstructorInvoked = false; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(LegacyComboSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + Serializer valueSerializer = + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); + assertThat(valueSerializer, instanceOf(LegacyComboSerializer.class)); + assertThat(LegacyComboSerializer.baseConstructorInvoked, is(false)); + assertThat(LegacyComboSerializer.legacyConstructorInvoked, is(true)); + } + + @Test + public void testCreateTransientStatefulLegacySerializer() throws Exception { + expectedException.expect(RuntimeException.class); + expectedException.expectMessage("does not meet the constructor requirement for transient caches"); + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration((Class) StatefulLegacySerializer.class, DefaultSerializerConfiguration.Type.VALUE); + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); + } + + @Test + public void testCreatePersistentStatefulLegacySerializer() throws Exception { + expectedException.expect(RuntimeException.class); + expectedException.expectMessage("does not meet the constructor requirements for persistent caches"); + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration((Class) StatefulLegacySerializer.class, DefaultSerializerConfiguration.Type.VALUE); + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); + } + + @Test + public void testTransientStatefulLegacyComboSerializer() throws Exception { + DefaultSerializationProvider provider = new DefaultSerializationProvider(null); + provider.start(providerContaining()); + + StatefulLegacyComboSerializer.baseConstructorInvoked = false; + StatefulLegacyComboSerializer.legacyConstructorInvoked = false; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(StatefulLegacyComboSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + Serializer valueSerializer = + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); + assertThat(valueSerializer, instanceOf(StatefulLegacyComboSerializer.class)); + assertThat(StatefulLegacyComboSerializer.baseConstructorInvoked, is(true)); + assertThat(StatefulLegacyComboSerializer.legacyConstructorInvoked, is(false)); + } + + @Test + public void testPersistentStatefulLegacyComboSerializer() throws Exception { + DefaultSerializationProvider provider = getStartedProvider(); + + StatefulLegacyComboSerializer.baseConstructorInvoked = false; + StatefulLegacyComboSerializer.legacyConstructorInvoked = false; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(StatefulLegacyComboSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + Serializer valueSerializer = + provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); + assertThat(valueSerializer, instanceOf(StatefulLegacyComboSerializer.class)); + assertThat(StatefulLegacyComboSerializer.baseConstructorInvoked, is(true)); + assertThat(StatefulLegacyComboSerializer.legacyConstructorInvoked, is(false)); + } + private PersistableResourceService.PersistenceSpaceIdentifier getPersistenceSpaceIdentifierMock() { PersistableResourceService.PersistenceSpaceIdentifier spaceIdentifier = mock(LocalPersistenceService.PersistenceSpaceIdentifier.class); when(spaceIdentifier.getServiceType()).thenReturn(LocalPersistenceService.class); @@ -302,9 +501,6 @@ private DefaultSerializationProvider getStartedProvider() throws CachePersistenc ServiceProvider serviceProvider = mock(ServiceProvider.class); LocalPersistenceService persistenceService = mock(LocalPersistenceService.class); - StateRepository stateRepository = mock(StateRepository.class); - when(stateRepository.getPersistentConcurrentMap(any(String.class), any(Class.class), any(Class.class))).thenReturn(new ConcurrentHashMap()); - when(persistenceService.getStateRepositoryWithin(any(PersistableResourceService.PersistenceSpaceIdentifier.class), any(String.class))).thenReturn(stateRepository); when(persistenceService.createPersistenceContextWithin(any(PersistableResourceService.PersistenceSpaceIdentifier.class), anyString())) .thenReturn(new FileBasedPersistenceContext() { @Override @@ -371,4 +567,103 @@ public boolean equals(Object object, ByteBuffer binary) throws ClassNotFoundExce return false; } } + + public static class BaseSerializer implements Serializer { + + @Override + public ByteBuffer serialize(final T object) throws SerializerException { + return null; + } + + @Override + public T read(final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + return null; + } + + @Override + public boolean equals(final T object, final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + return false; + } + } + + public static class MinimalSerializer extends BaseSerializer { + + private static boolean baseConstructorInvoked = false; + + public MinimalSerializer(ClassLoader loader) { + baseConstructorInvoked = true; + } + + } + + //Stateful but no constructor + public static class StatefulBaseSerializer extends BaseSerializer implements StatefulSerializer { + + @Override + public void init(final StateRepository stateRepository) { + } + } + + public static class MinimalStatefulSerializer extends BaseSerializer implements StatefulSerializer { + + private static boolean baseConstructorInvoked = false; + + public MinimalStatefulSerializer(ClassLoader loader) { + baseConstructorInvoked = true; + } + + @Override + public void init(final StateRepository stateRepository) { + } + } + + public static class LegacySerializer extends BaseSerializer { + + private static boolean legacyConstructorInvoked = false; + + public LegacySerializer(ClassLoader loader, FileBasedPersistenceContext context) { + legacyConstructorInvoked = true; + } + } + + public static class LegacyComboSerializer extends BaseSerializer { + + private static boolean baseConstructorInvoked = false; + private static boolean legacyConstructorInvoked = false; + + public LegacyComboSerializer(ClassLoader loader) { + baseConstructorInvoked = true; + } + + public LegacyComboSerializer(ClassLoader loader, FileBasedPersistenceContext context) { + legacyConstructorInvoked = true; + } + } + + public static class StatefulLegacySerializer extends StatefulBaseSerializer { + + private static boolean legacyConstructorInvoked = false; + + public StatefulLegacySerializer(ClassLoader loader, FileBasedPersistenceContext context) { + legacyConstructorInvoked = true; + } + } + + public static class StatefulLegacyComboSerializer extends BaseSerializer implements StatefulSerializer { + + private static boolean baseConstructorInvoked = false; + private static boolean legacyConstructorInvoked = false; + + public StatefulLegacyComboSerializer(final ClassLoader loader) { + baseConstructorInvoked = true; + } + + public StatefulLegacyComboSerializer(ClassLoader loader, FileBasedPersistenceContext context) { + legacyConstructorInvoked = true; + } + + @Override + public void init(final StateRepository stateRepository) { + } + } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java index 6c8af6efe3..5f8748166c 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java @@ -32,6 +32,7 @@ import org.ehcache.impl.internal.DefaultTimeSourceService; import org.ehcache.impl.serialization.LongSerializer; import org.ehcache.impl.serialization.StringSerializer; +import org.ehcache.spi.persistence.PersistableResourceService; import org.ehcache.spi.serialization.SerializationProvider; import org.ehcache.spi.serialization.Serializer; import org.junit.Test; @@ -65,7 +66,7 @@ public void testStatisticsAssociations() throws Exception { provider.start(serviceLocator); - OffHeapDiskStore store = provider.createStore(getStoreConfig()); + OffHeapDiskStore store = provider.createStore(getStoreConfig(), mock(PersistableResourceService.PersistenceSpaceIdentifier.class)); Query storeQuery = queryBuilder() .children() diff --git a/integration-test/src/test/java/org/ehcache/integration/SerializersTest.java b/integration-test/src/test/java/org/ehcache/integration/SerializersTest.java new file mode 100644 index 0000000000..a8f6d5153b --- /dev/null +++ b/integration-test/src/test/java/org/ehcache/integration/SerializersTest.java @@ -0,0 +1,181 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.integration; + +import org.ehcache.CacheManager; +import org.ehcache.PersistentCacheManager; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.impl.copy.SerializingCopier; +import org.ehcache.integration.domain.Person; +import org.ehcache.spi.persistence.StateRepository; +import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.SerializerException; +import org.ehcache.spi.serialization.StatefulSerializer; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.rules.TemporaryFolder; + +import java.nio.ByteBuffer; + +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.persistence; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertThat; + +public class SerializersTest { + + @Rule + public TemporaryFolder temporaryFolder = new TemporaryFolder(); + @Rule + public ExpectedException expectedException = ExpectedException.none(); + + @Test + public void testStatefulSerializer() throws Exception { + StatefulSerializerImpl serializer = new StatefulSerializerImpl(); + testSerializerWithByRefHeapCache(serializer); + assertThat(serializer.initCount, is(0)); + + serializer = new StatefulSerializerImpl(); + testSerializerWithByValueHeapCache(serializer); + assertThat(serializer.initCount, is(1)); + + serializer = new StatefulSerializerImpl(); + testSerializerWithOffheapCache(serializer); + assertThat(serializer.initCount, is(1)); + + serializer = new StatefulSerializerImpl(); + testSerializerWithHeapOffheapCache(serializer); + assertThat(serializer.initCount, is(1)); + + serializer = new StatefulSerializerImpl(); + testSerializerWithDiskCache(serializer); + assertThat(serializer.initCount, is(1)); + + serializer = new StatefulSerializerImpl(); + testSerializerWithHeapDiskCache(serializer); + assertThat(serializer.initCount, is(1)); + + serializer = new StatefulSerializerImpl(); + testSerializerWithThreeTierCache(serializer); + assertThat(serializer.initCount, is(1)); + + } + + private void testSerializerWithByRefHeapCache(Serializer serializer) throws Exception { + CacheManagerBuilder cmBuilder = + newCacheManagerBuilder() + .withCache("heapByRefCache", + newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().heap(10)) + .withKeySerializer(serializer) + ); + cmBuilder.build(true); + } + + private void testSerializerWithByValueHeapCache(Serializer serializer) throws Exception { + CacheManagerBuilder cmBuilder = + newCacheManagerBuilder() + .withCache("heapByValueCache", + newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().heap(10)) + .withKeyCopier((Class)SerializingCopier.class) + .withKeySerializer(serializer) + ); + cmBuilder.build(true); + } + + private void testSerializerWithOffheapCache(Serializer serializer) throws Exception { + CacheManagerBuilder cmBuilder = + newCacheManagerBuilder() + .withCache("offheapCache", + newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().offheap(2, MemoryUnit.MB)) + .withKeySerializer(serializer) + ); + cmBuilder.build(true); + } + + private void testSerializerWithHeapOffheapCache(Serializer serializer) throws Exception { + CacheManagerBuilder cmBuilder = + newCacheManagerBuilder() + .withCache("heapOffheapCache", + newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().heap(10).offheap(2, MemoryUnit.MB)) + .withKeySerializer(serializer) + ); + cmBuilder.build(true); + } + + private void testSerializerWithDiskCache(Serializer serializer) throws Exception { + CacheManagerBuilder cmBuilder = + newCacheManagerBuilder() + .with(persistence(temporaryFolder.newFolder().getAbsolutePath())) + .withCache("diskCache", + newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().disk(8, MemoryUnit.MB, true)) + .withKeySerializer(serializer) + ); + cmBuilder.build(true); + } + + private void testSerializerWithHeapDiskCache(Serializer serializer) throws Exception { + CacheManagerBuilder cmBuilder = + newCacheManagerBuilder() + .with(persistence(temporaryFolder.newFolder().getAbsolutePath())) + .withCache("heapDiskCache", + newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().heap(10).disk(8, MemoryUnit.MB, true)) + .withKeySerializer(serializer) + ); + cmBuilder.build(true); + } + + private void testSerializerWithThreeTierCache(Serializer serializer) throws Exception { + CacheManagerBuilder cmBuilder = + newCacheManagerBuilder() + .with(persistence(temporaryFolder.newFolder().getAbsolutePath())) + .withCache("heapOffheapDiskCache", + newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().heap(10).offheap(2, MemoryUnit.MB).disk(8, MemoryUnit.MB, true)) + .withKeySerializer(serializer) + ); + cmBuilder.build(true); + } + + public static class StatefulSerializerImpl implements StatefulSerializer { + + private int initCount = 0; + + @Override + public void init(final StateRepository stateRepository) { + initCount++; + } + + @Override + public ByteBuffer serialize(final T object) throws SerializerException { + return null; + } + + @Override + public T read(final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + return null; + } + + @Override + public boolean equals(final T object, final ByteBuffer binary) throws ClassNotFoundException, SerializerException { + return false; + } + } + +} From 4d84c794a2fbb131e8f7da4f09ca1796870eb95b Mon Sep 17 00:00:00 2001 From: Kevin Cleereman Date: Mon, 12 Sep 2016 07:16:11 -0600 Subject: [PATCH 006/218] Issue #883: Check tier inversion by tier height instead of by tier type --- .../ClusteringManagementServiceTest.java | 4 +- .../EhcacheManagerToStringTest.java | 2 +- .../test/resources/clusteredConfiguration.txt | 2 +- .../core/config/ResourcePoolsImpl.java | 29 ++++-- .../core/config/ResourcePoolsImplTest.java | 88 +++++++++++++++++++ 5 files changed, 115 insertions(+), 10 deletions(-) diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java index 89196d12c7..14d9627a05 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java @@ -87,7 +87,7 @@ public void init() throws Exception { newResourcePoolsBuilder() .heap(10, EntryUnit.ENTRIES) .offheap(1, MemoryUnit.MB) - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB))) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) .build()) .build(true); @@ -147,7 +147,7 @@ public void test_notifs_on_add_cache() throws Exception { newResourcePoolsBuilder() .heap(10, EntryUnit.ENTRIES) .offheap(1, MemoryUnit.MB) - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB))) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) .build()); ContextContainer contextContainer = consumer.getValueForNode(consumerId, new String[]{"management", "clients", clientIdentifier, "registry", "contextContainer"}, ContextContainer.class); diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheManagerToStringTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheManagerToStringTest.java index 71fe1e342a..3f9d01f7d6 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheManagerToStringTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheManagerToStringTest.java @@ -114,7 +114,7 @@ public void clusteredToString() throws Exception { newResourcePoolsBuilder() .heap(10, EntryUnit.ENTRIES) .offheap(1, MemoryUnit.MB) - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB))) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) .build()) .build(true); diff --git a/clustered/integration-test/src/test/resources/clusteredConfiguration.txt b/clustered/integration-test/src/test/resources/clusteredConfiguration.txt index b7ce815907..b04ba4816d 100644 --- a/clustered/integration-test/src/test/resources/clusteredConfiguration.txt +++ b/clustered/integration-test/src/test/resources/clusteredConfiguration.txt @@ -14,7 +14,7 @@ caches: size: 1 MB tierHeight: 1000 clustered-dedicated: - size: 1 MB (persistent) + size: 2 MB (persistent) tierHeight: 10 services: - org.ehcache.clustered.client.config.ClusteringServiceConfiguration: diff --git a/core/src/main/java/org/ehcache/core/config/ResourcePoolsImpl.java b/core/src/main/java/org/ehcache/core/config/ResourcePoolsImpl.java index c91abd953c..ddaa0473a9 100644 --- a/core/src/main/java/org/ehcache/core/config/ResourcePoolsImpl.java +++ b/core/src/main/java/org/ehcache/core/config/ResourcePoolsImpl.java @@ -24,6 +24,7 @@ import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.Comparator; import java.util.EnumMap; import java.util.HashMap; @@ -100,26 +101,42 @@ public ResourcePools validateAndMerge(ResourcePools toBeUpdated) { * @param pools the resource pools to validate */ public static void validateResourcePools(Collection pools) { - EnumMap coreResources = new EnumMap(ResourceType.Core.class); - for (ResourcePool pool : pools) { - if (pool.getType() instanceof ResourceType.Core) { - coreResources.put((ResourceType.Core)pool.getType(), (SizedResourcePool)pool); + List ordered = new ArrayList(pools.size()); + for(ResourcePool pool : pools) { + if (pool instanceof SizedResourcePool) { + ordered.add((SizedResourcePool)pool); } } + Collections.sort(ordered, new Comparator() { + @Override + public int compare(final SizedResourcePool o1, final SizedResourcePool o2) { + int retVal = o2.getType().getTierHeight() - o1.getType().getTierHeight(); + if(retVal == 0) { + return o1.toString().compareTo(o2.toString()); + } else { + return retVal; + } + } + }); - List ordered = new ArrayList(coreResources.values()); for (int i = 0; i < ordered.size(); i++) { for (int j = 0; j < i; j++) { SizedResourcePool upper = ordered.get(j); SizedResourcePool lower = ordered.get(i); boolean inversion; + boolean ambiguity; try { + ambiguity = upper.getType().getTierHeight() == lower.getType().getTierHeight(); inversion = (upper.getUnit().compareTo(upper.getSize(), lower.getSize(), lower.getUnit()) >= 0) - || (lower.getUnit().compareTo(lower.getSize(), upper.getSize(), upper.getUnit()) <= 0); + || (lower.getUnit().compareTo(lower.getSize(), upper.getSize(), upper.getUnit()) <= 0); } catch (IllegalArgumentException e) { + ambiguity = false; inversion = false; } + if (ambiguity) { + throw new IllegalArgumentException("Tiering Ambiguity: '" + upper + "' has the same tier height as '" + lower + "'"); + } if (inversion) { throw new IllegalArgumentException("Tiering Inversion: '" + upper + "' is not smaller than '" + lower + "'"); } diff --git a/core/src/test/java/org/ehcache/core/config/ResourcePoolsImplTest.java b/core/src/test/java/org/ehcache/core/config/ResourcePoolsImplTest.java index dc31d3fcf2..ccf8806af1 100644 --- a/core/src/test/java/org/ehcache/core/config/ResourcePoolsImplTest.java +++ b/core/src/test/java/org/ehcache/core/config/ResourcePoolsImplTest.java @@ -45,6 +45,39 @@ */ public class ResourcePoolsImplTest { + private static class ArbitraryType implements ResourceType { + private final int tierHeight; + + public ArbitraryType(int tierHeight) { + this.tierHeight = tierHeight; + } + + @Override + public Class getResourcePoolClass() { + return SizedResourcePool.class; + } + + @Override + public boolean isPersistable() { + return false; + } + + @Override + public boolean requiresSerialization() { + return false; + } + + @Override + public int getTierHeight() { + return tierHeight; + } + + @Override + public String toString() { + return "arbitrary"; + } + } + @Test public void testMismatchedUnits() { Collection> pools = asList( @@ -69,6 +102,61 @@ public void testMatchingUnequalUnitsWellTiered() { validateResourcePools(pools); } + @Test + public void testArbitraryPoolWellTieredHeap() { + Collection> pools = asList( + new SizedResourcePoolImpl(HEAP, 9, MB, false), + new SizedResourcePoolImpl(new ArbitraryType(HEAP.getTierHeight() - 1), 10, MB, false)); + validateResourcePools(pools); + } + + @Test + public void testArbitraryPoolWellTieredOffHeap() { + Collection> pools = asList( + new SizedResourcePoolImpl(new ArbitraryType(OFFHEAP.getTierHeight() + 1), 9, MB, false), + new SizedResourcePoolImpl(OFFHEAP, 10, MB, false)); + validateResourcePools(pools); + } + + @Test + public void testArbitraryPoolInversionHeap() { + Collection> pools = asList( + new SizedResourcePoolImpl(HEAP, 10, MB, false), + new SizedResourcePoolImpl(new ArbitraryType(HEAP.getTierHeight() - 1), 10, MB, false)); + try { + validateResourcePools(pools); + fail("Expected IllegalArgumentException"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), is("Tiering Inversion: 'Pool {10 MB heap}' is not smaller than 'Pool {10 MB arbitrary}'")); + } + } + + @Test + public void testArbitraryPoolInversionOffHeap() { + Collection> pools = asList( + new SizedResourcePoolImpl(new ArbitraryType(OFFHEAP.getTierHeight() + 1), 10, MB, false), + new SizedResourcePoolImpl(OFFHEAP, 10, MB, false)); + try { + validateResourcePools(pools); + fail("Expected IllegalArgumentException"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), is("Tiering Inversion: 'Pool {10 MB arbitrary}' is not smaller than 'Pool {10 MB offheap}'")); + } + } + + @Test + public void testArbitraryPoolAmbiguity() { + Collection> pools = asList( + new SizedResourcePoolImpl(new ArbitraryType(OFFHEAP.getTierHeight()), 10, MB, false), + new SizedResourcePoolImpl(OFFHEAP, 10, MB, false)); + try { + validateResourcePools(pools); + fail("Expected IllegalArgumentException"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), is("Tiering Ambiguity: 'Pool {10 MB arbitrary}' has the same tier height as 'Pool {10 MB offheap}'")); + } + } + @Test public void testEntryResourceMatch() { Collection> pools = asList( From 14875dbc05b5367e7b362d3a75d2feac45ac3ee0 Mon Sep 17 00:00:00 2001 From: Anthony Dahanne Date: Tue, 13 Sep 2016 13:08:20 -0400 Subject: [PATCH 007/218] Issue #1436 : Fix management plugins paths on Windows for galvan --- clustered/integration-test/build.gradle | 2 +- .../clustered/management/AbstractClusteringManagementTest.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/clustered/integration-test/build.gradle b/clustered/integration-test/build.gradle index ad9f462418..932462e77e 100644 --- a/clustered/integration-test/build.gradle +++ b/clustered/integration-test/build.gradle @@ -52,7 +52,7 @@ test { environment 'JAVA_OPTS', '-Dcom.tc.l2.lockmanager.greedy.locks.enabled=false' //If this directory does not exist, tests will fail with a cryptic assert failure systemProperty 'kitInstallationPath', "$unzipKit.destinationDir/${project(':clustered:clustered-dist').archivesBaseName}-$project.version-kit" - systemProperty 'managementPlugins', ["management-model", "management-registry", "sequence-generator", "management-entity-server", "monitoring-service", "monitoring-service-entity"].collect { String artifact -> project.configurations.testCompile.find { it.name.startsWith("$artifact-$parent.managementVersion") } }.join(':') + systemProperty 'managementPlugins', ["management-model", "management-registry", "sequence-generator", "management-entity-server", "monitoring-service", "monitoring-service-entity"].collect { String artifact -> project.configurations.testCompile.find { it.name.startsWith("$artifact-$parent.managementVersion") } }.join(File.pathSeparator) // Uncomment to include client logging in console output // testLogging.showStandardStreams = true } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java index 0e661a5a86..f0e06859f1 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java @@ -158,7 +158,7 @@ protected static ContextualStatistics[] waitForNextStats() { } private static List getManagementPlugins() { - String[] paths = System.getProperty("managementPlugins").split(":"); + String[] paths = System.getProperty("managementPlugins").split(File.pathSeparator); List plugins = new ArrayList(paths.length); for (String path : paths) { plugins.add(new File(path)); From 4f8535366ce4e8e0a3708df158797bc91dfba2cd Mon Sep 17 00:00:00 2001 From: Anthony Dahanne Date: Tue, 13 Sep 2016 12:27:15 -0400 Subject: [PATCH 008/218] Issue #1436 : Normalize line endings to avoid windows issues * was tested successfully on windows, even after switching the text files to CRLF --- .../management/EhcacheManagerToStringTest.java | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheManagerToStringTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheManagerToStringTest.java index 3f9d01f7d6..de223619ed 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheManagerToStringTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheManagerToStringTest.java @@ -17,7 +17,6 @@ package org.ehcache.clustered.management; import org.ehcache.CacheManager; -import org.ehcache.Status; import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; import org.ehcache.config.EvictionAdvisor; @@ -31,7 +30,6 @@ import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; -import org.junit.Assert; import org.junit.Test; import java.io.File; @@ -77,8 +75,8 @@ public boolean adviseAgainstEviction(String key, String value) { .build(true); try { - String actual = ((HumanReadable) cacheManager.getRuntimeConfiguration()).readableString(); - String expected = read("/simpleConfiguration.txt"); + String actual = normalizeForLineEndings(((HumanReadable) cacheManager.getRuntimeConfiguration()).readableString()); + String expected = normalizeForLineEndings(read("/simpleConfiguration.txt")); // only testing part of the string, to avoid collections ordering clashes assertThat( @@ -119,8 +117,8 @@ public void clusteredToString() throws Exception { .build(true); try { - String actual = ((HumanReadable) cacheManager.getRuntimeConfiguration()).readableString(); - String expected = read("/clusteredConfiguration.txt"); + String actual = normalizeForLineEndings(((HumanReadable) cacheManager.getRuntimeConfiguration()).readableString()); + String expected = normalizeForLineEndings(read("/clusteredConfiguration.txt")); // only testing part of the string, to avoid collections ordering clashes assertThat( @@ -180,4 +178,7 @@ private String read(String path) throws FileNotFoundException { } } + private static String normalizeForLineEndings(String stringToNormalize) { + return stringToNormalize.replace("\r\n","\n").replace("\r","\n"); + } } From 365fcce028061f1e14e88bdb26132f20a77448ec Mon Sep 17 00:00:00 2001 From: Albin Suresh Date: Tue, 13 Sep 2016 14:10:17 +0530 Subject: [PATCH 009/218] Closes #1432 Remove FileBasedPersistentContext constructor contract for serializers --- .../asciidoc/user/serializers-copiers.adoc | 2 +- ...ultSerializationProviderConfiguration.java | 54 ++-- .../DefaultSerializationProvider.java | 252 ++++++------------ .../serialization/CompactJavaSerializer.java | 22 +- .../CompactPersistentJavaSerializer.java | 126 --------- ...erializationProviderConfigurationTest.java | 112 +++----- .../DefaultSerializationProviderTest.java | 20 +- .../store/heap/OnHeapStoreByValueTest.java | 3 +- .../impl/serialization/AddedFieldTest.java | 8 +- .../serialization/AddedSuperClassTest.java | 8 +- .../serialization/ArrayPackageScopeTest.java | 5 +- .../serialization/BasicSerializationTest.java | 16 +- .../CompactJavaSerializerClassLoaderTest.java | 10 +- ...mpactJavaSerializerClassUnloadingTest.java | 10 +- .../CompactPersistentJavaSerializerTest.java | 59 ---- .../ehcache/impl/serialization/EnumTest.java | 11 +- .../serialization/FieldTypeChangeTest.java | 8 +- .../impl/serialization/GetFieldTest.java | 5 +- .../impl/serialization/PutFieldTest.java | 8 +- .../serialization/ReadObjectNoDataTest.java | 5 +- .../SerializeAfterEvolutionTest.java | 7 +- .../ehcache/serializer/TestSerializer.java | 5 +- .../org/ehcache/xml/XmlConfigurationTest.java | 10 +- 23 files changed, 205 insertions(+), 561 deletions(-) delete mode 100644 impl/src/main/java/org/ehcache/impl/serialization/CompactPersistentJavaSerializer.java delete mode 100644 impl/src/test/java/org/ehcache/impl/serialization/CompactPersistentJavaSerializerTest.java diff --git a/docs/src/docs/asciidoc/user/serializers-copiers.adoc b/docs/src/docs/asciidoc/user/serializers-copiers.adoc index db867df01e..d8cec9cf0c 100644 --- a/docs/src/docs/asciidoc/user/serializers-copiers.adoc +++ b/docs/src/docs/asciidoc/user/serializers-copiers.adoc @@ -159,7 +159,7 @@ For persistent caches it is highly recommended that all state is stored in these * For clustered caches the contents are persisted in the cluster itself so that other clients using the same cache can also access the contents of the map. NOTE: The constructor with the signature `(ClassLoader classLoader, FileBasedPersistenceContext persistenceContext)` - that existed in v3.0 is still supported to respect backward compatibility but the usage is limited to disk based caches. + that existed till v3.1 has been removed since v3.2 in favor of `StatefulSerializer`s. [[copiers]] == Copiers diff --git a/impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfiguration.java b/impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfiguration.java index e0cda53697..c59425109d 100644 --- a/impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfiguration.java +++ b/impl/src/main/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfiguration.java @@ -24,14 +24,17 @@ import org.ehcache.spi.serialization.Serializer; import org.ehcache.core.spi.service.FileBasedPersistenceContext; import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * {@link ServiceCreationConfiguration} for the default {@link SerializationProvider}. */ public class DefaultSerializationProviderConfiguration implements ServiceCreationConfiguration { - private final Map, Class>> transientSerializers = new LinkedHashMap, Class>>(); - private final Map, Class>> persistentSerializers = new LinkedHashMap, Class>>(); + private static final Logger LOGGER = LoggerFactory.getLogger(DefaultSerializationProviderConfiguration.class); + + private final Map, Class>> defaultSerializers = new LinkedHashMap, Class>>(); /** * Creates a new configuration instance. @@ -46,8 +49,7 @@ public DefaultSerializationProviderConfiguration() { * @param other the other to copy from */ public DefaultSerializationProviderConfiguration(DefaultSerializationProviderConfiguration other) { - transientSerializers.putAll(other.transientSerializers); - persistentSerializers.putAll(other.persistentSerializers); + defaultSerializers.putAll(other.defaultSerializers); } /** @@ -93,32 +95,19 @@ public DefaultSerializationProviderConfiguration addSerializerFor(Class s throw new NullPointerException("Serializer class cannot be null"); } - boolean baseConstructorPresent = isConstructorPresent(serializerClass, ClassLoader.class); - if (baseConstructorPresent) { - if (!overwrite) { - if (transientSerializers.containsKey(serializableClass)) { - throw new IllegalArgumentException("Duplicate transient serializer for class : " + serializableClass.getName()); - } - if (persistentSerializers.containsKey(serializableClass)) { - throw new IllegalArgumentException("Duplicate persistent serializer for class : " + serializableClass.getName()); - } - } - transientSerializers.put(serializableClass, serializerClass); - persistentSerializers.put(serializableClass, serializerClass); + if(!isConstructorPresent(serializerClass, ClassLoader.class)) { + throw new IllegalArgumentException("The serializer: " + serializerClass.getName() + " does not have a constructor that takes in a ClassLoader."); } - boolean legacyConstructorPresent = isConstructorPresent(serializerClass, ClassLoader.class, FileBasedPersistenceContext.class); - if(!baseConstructorPresent && !legacyConstructorPresent) { - throw new IllegalArgumentException("The serializer: " + serializerClass.getName() - + " does not meet the constructor requirements for either transient or persistent caches."); + if (isConstructorPresent(serializerClass, ClassLoader.class, FileBasedPersistenceContext.class)) { + LOGGER.warn(serializerClass.getName() + " class has a constructor that takes in a FileBasedPersistenceContext. " + + "Support for this constructor has been removed since version 3.2. Consider removing it."); } - if (!baseConstructorPresent && legacyConstructorPresent) { - if (!overwrite && persistentSerializers.containsKey(serializableClass)) { - throw new IllegalArgumentException("Duplicate persistent serializer for class : " + serializableClass.getName()); - } else { - persistentSerializers.put(serializableClass, serializerClass); - } + if (defaultSerializers.containsKey(serializableClass) && !overwrite) { + throw new IllegalArgumentException("Duplicate serializer for class : " + serializableClass.getName()); + } else { + defaultSerializers.put(serializableClass, serializerClass); } return this; @@ -133,21 +122,12 @@ private static boolean isConstructorPresent(Class clazz, Class... args) { } } - /** - * Returns the map of class to serializer class for transient serializers. - * - * @return the map from class to serializer class - */ - public Map, Class>> getTransientSerializers() { - return unmodifiableMap(transientSerializers); - } - /** * Returns the map of class to serializer class for persistent serializers. * * @return the map from class to serializer class */ - public Map, Class>> getPersistentSerializers() { - return unmodifiableMap(persistentSerializers); + public Map, Class>> getDefaultSerializers() { + return unmodifiableMap(defaultSerializers); } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProvider.java b/impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProvider.java index d654b3af58..4631519be9 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProvider.java +++ b/impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProvider.java @@ -18,7 +18,6 @@ import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration; import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration; -import org.ehcache.CachePersistenceException; import org.ehcache.impl.serialization.ByteArraySerializer; import org.ehcache.impl.serialization.CharSerializer; import org.ehcache.core.internal.service.ServiceLocator; @@ -28,15 +27,11 @@ import org.ehcache.impl.serialization.IntegerSerializer; import org.ehcache.impl.serialization.LongSerializer; import org.ehcache.impl.serialization.StringSerializer; -import org.ehcache.spi.persistence.PersistableResourceService; -import org.ehcache.spi.persistence.PersistableResourceService.PersistenceSpaceIdentifier; -import org.ehcache.spi.serialization.StatefulSerializer; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.spi.serialization.SerializationProvider; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.serialization.UnsupportedTypeException; import org.ehcache.core.spi.service.FileBasedPersistenceContext; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.core.internal.util.ConcurrentWeakIdentityHashMap; @@ -55,8 +50,6 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; -import static org.ehcache.core.internal.service.ServiceLocator.findSingletonAmongst; - /** * @author Ludovic Orban */ @@ -64,31 +57,25 @@ public class DefaultSerializationProvider implements SerializationProvider { private static final Logger LOG = LoggerFactory.getLogger(DefaultSerializationProvider.class); - private final TransientProvider transientProvider; - private final PersistentProvider persistentProvider; + protected final Map, Class>> serializers; final ConcurrentWeakIdentityHashMap, AtomicInteger> providedVsCount = new ConcurrentWeakIdentityHashMap, AtomicInteger>(); final Set> instantiated = Collections.newSetFromMap(new ConcurrentWeakIdentityHashMap, Boolean>()); public DefaultSerializationProvider(DefaultSerializationProviderConfiguration configuration) { if (configuration != null) { - transientProvider = new TransientProvider(configuration.getTransientSerializers()); - persistentProvider = new PersistentProvider(configuration.getPersistentSerializers()); + this.serializers = new LinkedHashMap, Class>>(configuration.getDefaultSerializers()); } else { - transientProvider = new TransientProvider(Collections., Class>>emptyMap()); - persistentProvider = new PersistentProvider(Collections., Class>>emptyMap()); + this.serializers = new LinkedHashMap, Class>>(Collections., Class>>emptyMap()); } } @Override public Serializer createKeySerializer(Class clazz, ClassLoader classLoader, ServiceConfiguration... configs) throws UnsupportedTypeException { - Serializer serializer = (Serializer)getUserProvidedSerializer(find(DefaultSerializerConfiguration.Type.KEY, configs)); + DefaultSerializerConfiguration configuration = find(DefaultSerializerConfiguration.Type.KEY, configs); + Serializer serializer = getUserProvidedSerializer(configuration); if (serializer == null) { - if (findSingletonAmongst(PersistenceSpaceIdentifier.class, (Object[])configs) == null) { - serializer = transientProvider.createKeySerializer(clazz, classLoader, configs); - } else { - serializer = persistentProvider.createKeySerializer(clazz, classLoader, configs); - } + serializer = createSerializer(clazz, classLoader, configuration, configs); instantiated.add(serializer); } updateProvidedInstanceCounts(serializer); @@ -97,19 +84,74 @@ public Serializer createKeySerializer(Class clazz, ClassLoader classLo @Override public Serializer createValueSerializer(Class clazz, ClassLoader classLoader, ServiceConfiguration... configs) throws UnsupportedTypeException { - Serializer serializer = (Serializer)getUserProvidedSerializer(find(DefaultSerializerConfiguration.Type.VALUE, configs)); + DefaultSerializerConfiguration configuration = find(DefaultSerializerConfiguration.Type.VALUE, configs); + Serializer serializer = getUserProvidedSerializer(configuration); if (serializer == null) { - if (findSingletonAmongst(PersistenceSpaceIdentifier.class, (Object[])configs) == null) { - serializer = transientProvider.createValueSerializer(clazz, classLoader, configs); - } else { - serializer = persistentProvider.createValueSerializer(clazz, classLoader, configs); - } + serializer = createSerializer(clazz, classLoader, configuration, configs); instantiated.add(serializer); } updateProvidedInstanceCounts(serializer); return serializer; } + private Serializer createSerializer(Class clazz, ClassLoader classLoader, DefaultSerializerConfiguration config, ServiceConfiguration... configs) throws UnsupportedTypeException { + Class> klazz = getSerializerClassFor(clazz, config); + + try { + klazz.getConstructor(ClassLoader.class, FileBasedPersistenceContext.class); + LOG.warn(klazz.getName() + " class has a constructor that takes in a FileBasedPersistenceContext. " + + "Support for this constructor has been removed since version 3.2. Consider removing it."); + } catch (NoSuchMethodException e) { + // Ideal + } + + try { + return constructSerializer(clazz, klazz.getConstructor(ClassLoader.class), classLoader); + } catch (NoSuchMethodException e) { + throw new RuntimeException(klazz + " does not have a constructor that takes in a ClassLoader.", e); + } + } + + + private Class> getSerializerClassFor(Class clazz, DefaultSerializerConfiguration config) throws UnsupportedTypeException { + if (config != null) { + Class> configured = config.getClazz(); + if (configured != null) { + return configured; + } + } + + @SuppressWarnings("unchecked") + Class> direct = (Class>) serializers.get(clazz); + if (direct != null) { + return direct; + } + for (Map.Entry, Class>> entry : serializers.entrySet()) { + if (entry.getKey().isAssignableFrom(clazz)) { + @SuppressWarnings("unchecked") + Class> type = (Class>)entry.getValue(); + return type; + } + } + throw new UnsupportedTypeException("No serializer found for type '" + clazz.getName() + "'"); + } + + private Serializer constructSerializer(Class clazz, Constructor> constructor, Object ... args) { + try { + Serializer serializer = constructor.newInstance(args); + LOG.debug("Serializer for <{}> : {}", clazz.getName(), serializer); + return serializer; + } catch (InstantiationException e) { + throw new RuntimeException(e); + } catch (IllegalAccessException e) { + throw new RuntimeException(e); + } catch (IllegalArgumentException e) { + throw new AssertionError(e); + } catch (InvocationTargetException e) { + throw new RuntimeException(e); + } + } + private void updateProvidedInstanceCounts(Serializer serializer) { AtomicInteger currentCount = providedVsCount.putIfAbsent(serializer, new AtomicInteger(1)); if(currentCount != null) { @@ -138,8 +180,14 @@ public void releaseSerializer(final Serializer serializer) throws IOException @Override public void start(ServiceProvider serviceProvider) { - transientProvider.start(serviceProvider); - persistentProvider.start(serviceProvider); + addDefaultSerializerIfNoneRegistered(serializers, Serializable.class, CompactJavaSerializer.asTypedSerializer()); + addDefaultSerializerIfNoneRegistered(serializers, Long.class, LongSerializer.class); + addDefaultSerializerIfNoneRegistered(serializers, Integer.class, IntegerSerializer.class); + addDefaultSerializerIfNoneRegistered(serializers, Float.class, FloatSerializer.class); + addDefaultSerializerIfNoneRegistered(serializers, Double.class, DoubleSerializer.class); + addDefaultSerializerIfNoneRegistered(serializers, Character.class, CharSerializer.class); + addDefaultSerializerIfNoneRegistered(serializers, String.class, StringSerializer.class); + addDefaultSerializerIfNoneRegistered(serializers, byte[].class, ByteArraySerializer.class); } @Override @@ -153,156 +201,6 @@ private static void addDefaultSerializerIfNoneRegistered(Map, Class } } - static class TransientProvider extends AbstractProvider { - - public TransientProvider(Map, Class>> serializers) { - super(serializers); - } - - @Override - protected Serializer createSerializer(String suffix, Class clazz, ClassLoader classLoader, DefaultSerializerConfiguration config, ServiceConfiguration... configs) throws UnsupportedTypeException { - Class> klazz = getSerializerClassFor(clazz, config, classLoader); - try { - return constructSerializer(clazz, klazz.getConstructor(ClassLoader.class), classLoader); - } catch (NoSuchMethodException e) { - throw new RuntimeException(klazz + " does not meet the constructor requirement for transient caches", e); - } - } - - public void start(ServiceProvider serviceProvider) { - addDefaultSerializerIfNoneRegistered(serializers, Serializable.class, (Class) CompactJavaSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, Long.class, LongSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, Integer.class, IntegerSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, Float.class, FloatSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, Double.class, DoubleSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, Character.class, CharSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, String.class, StringSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, byte[].class, ByteArraySerializer.class); - } - } - - static class PersistentProvider extends AbstractProvider { - - private ServiceProvider serviceProvider; - - private PersistentProvider(Map, Class>> serializers) { - super(serializers); - } - - @Override - protected Serializer createSerializer(String suffix, Class clazz, ClassLoader classLoader, DefaultSerializerConfiguration config, ServiceConfiguration... configs) throws UnsupportedTypeException { - Class> klazz = getSerializerClassFor(clazz, config, classLoader); - String errorMessage = klazz + " does not meet the constructor requirements for persistent caches"; - - if (StatefulSerializer.class.isAssignableFrom(klazz)) { - try { - Constructor> constructor = klazz.getConstructor(ClassLoader.class); - return constructSerializer(clazz, constructor, classLoader); - } catch (NoSuchMethodException e) { - throw new RuntimeException(errorMessage, e); - } - } else { - PersistenceSpaceIdentifier space = findSingletonAmongst(PersistenceSpaceIdentifier.class, (Object[]) configs); - PersistableResourceService service = serviceProvider.getService(space.getServiceType()); - - if (service instanceof LocalPersistenceService) { - try { - Constructor> constructor = klazz.getConstructor(ClassLoader.class, FileBasedPersistenceContext.class); - String subSpaceName = DefaultSerializationProvider.class.getSimpleName() + suffix; - FileBasedPersistenceContext context = ((LocalPersistenceService) service).createPersistenceContextWithin(space, subSpaceName); - return constructSerializer(clazz, constructor, classLoader, context); - } catch (NoSuchMethodException nsmex) { - try { - Constructor> constructor = klazz.getConstructor(ClassLoader.class); - return constructSerializer(clazz, constructor, classLoader); - } catch (NoSuchMethodException e) { - throw new RuntimeException(errorMessage, e); - } - } catch (CachePersistenceException cpex) { - throw new RuntimeException(cpex); - } - } else { - try { - Constructor> constructor = klazz.getConstructor(ClassLoader.class); - return constructSerializer(clazz, constructor, classLoader); - } catch (NoSuchMethodException e) { - throw new RuntimeException(errorMessage, e); - } - } - } - } - - public void start(ServiceProvider serviceProvider) { - this.serviceProvider = serviceProvider; - addDefaultSerializerIfNoneRegistered(serializers, Serializable.class, (Class) CompactJavaSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, Long.class, LongSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, Integer.class, IntegerSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, Float.class, FloatSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, Double.class, DoubleSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, Character.class, CharSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, String.class, StringSerializer.class); - addDefaultSerializerIfNoneRegistered(serializers, byte[].class, ByteArraySerializer.class); - } - - } - - static abstract class AbstractProvider { - - protected final Map, Class>> serializers; - - private AbstractProvider(Map, Class>> serializers) { - this.serializers = new LinkedHashMap, Class>>(serializers); - } - - public Serializer createKeySerializer(Class clazz, ClassLoader classLoader, ServiceConfiguration... configs) throws UnsupportedTypeException { - DefaultSerializerConfiguration conf = find(DefaultSerializerConfiguration.Type.KEY, configs); - return createSerializer("-Key", clazz, classLoader, conf, configs); - } - - public Serializer createValueSerializer(Class clazz, ClassLoader classLoader, ServiceConfiguration... configs) throws UnsupportedTypeException { - DefaultSerializerConfiguration conf = find(DefaultSerializerConfiguration.Type.VALUE, configs); - return createSerializer("-Value", clazz, classLoader, conf, configs); - } - - protected abstract Serializer createSerializer(String suffix, Class clazz, ClassLoader classLoader, DefaultSerializerConfiguration config, ServiceConfiguration... configs) throws UnsupportedTypeException; - - protected Class> getSerializerClassFor(Class clazz, DefaultSerializerConfiguration config, ClassLoader classLoader) throws UnsupportedTypeException { - if (config != null) { - Class> configured = config.getClazz(); - if (configured != null) { - return configured; - } - } - - Class> direct = (Class>) serializers.get(clazz); - if (direct != null) { - return direct; - } - for (Map.Entry, Class>> entry : serializers.entrySet()) { - if (entry.getKey().isAssignableFrom(clazz)) { - return (Class>) entry.getValue(); - } - } - throw new UnsupportedTypeException("No serializer found for type '" + clazz.getName() + "'"); - } - - protected Serializer constructSerializer(Class clazz, Constructor> constructor, Object ... args) { - try { - Serializer serializer = constructor.newInstance(args); - LOG.debug("Serializer for <{}> : {}", clazz.getName(), serializer); - return serializer; - } catch (InstantiationException e) { - throw new RuntimeException(e); - } catch (IllegalAccessException e) { - throw new RuntimeException(e); - } catch (IllegalArgumentException e) { - throw new AssertionError(e); - } catch (InvocationTargetException e) { - throw new RuntimeException(e); - } - } - } - private static Serializer getUserProvidedSerializer(DefaultSerializerConfiguration conf) { if(conf != null) { Serializer instance = conf.getInstance(); diff --git a/impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java index 4fa0612bf2..3409aa2345 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java @@ -71,23 +71,11 @@ public class CompactJavaSerializer implements StatefulSerializer { */ public CompactJavaSerializer(ClassLoader loader) { this.loader = loader; - init(new TransientStateRepository()); } - CompactJavaSerializer(ClassLoader loader, Map mappings) { - this(loader); - for (Entry e : mappings.entrySet()) { - Integer encoding = e.getKey(); - ObjectStreamClass disconnectedOsc = disconnect(e.getValue()); - readLookup.put(encoding, disconnectedOsc); - readLookupLocalCache.put(encoding, disconnectedOsc); - if (writeLookup.putIfAbsent(new SerializableDataKey(disconnectedOsc, true), encoding) != null) { - throw new AssertionError("Corrupted data " + mappings); - } - if (nextStreamIndex < encoding + 1) { - nextStreamIndex = encoding + 1; - } - } + @SuppressWarnings("unchecked") + public static Class> asTypedSerializer() { + return (Class) CompactJavaSerializer.class; } @Override @@ -96,10 +84,6 @@ public void init(final StateRepository stateRepository) { loadMappingsInWriteContext(readLookup.entrySet(), true); } - Map getSerializationMappings() { - return Collections.unmodifiableMap(new HashMap(readLookup)); - } - /** * {@inheritDoc} */ diff --git a/impl/src/main/java/org/ehcache/impl/serialization/CompactPersistentJavaSerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/CompactPersistentJavaSerializer.java deleted file mode 100644 index e0f383c513..0000000000 --- a/impl/src/main/java/org/ehcache/impl/serialization/CompactPersistentJavaSerializer.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.serialization; - -import java.io.Closeable; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; -import java.io.ObjectStreamClass; -import java.io.OutputStream; -import java.nio.ByteBuffer; -import java.util.Map; -import org.ehcache.spi.serialization.SerializerException; -import org.ehcache.spi.serialization.Serializer; -import org.ehcache.core.spi.service.FileBasedPersistenceContext; - -/** - * A trivially compressed Java serialization based serializer with persistent mappings. - *

- * Class descriptors in the resultant bytes are encoded as integers. Mappings - * between the integer representation and the {@link ObjectStreamClass}, and the - * {@code Class} and the integer representation are stored in a single on-heap - * map. - */ -public class CompactPersistentJavaSerializer implements Serializer, Closeable { - - private final File stateFile; - private final CompactJavaSerializer serializer; - - /** - * Constructor to enable this serializer as a persistent one. - * - * @param classLoader the classloader to use - * @param persistence the persistence context to use - * - * @see Serializer - */ - public CompactPersistentJavaSerializer(ClassLoader classLoader, FileBasedPersistenceContext persistence) throws IOException, ClassNotFoundException { - this.stateFile = new File(persistence.getDirectory(), "CompactPersistentJavaSerializer.state"); - if (stateFile.exists()) { - serializer = new CompactJavaSerializer(classLoader, readSerializationMappings(stateFile)); - } else { - serializer = new CompactJavaSerializer(classLoader); - } - } - - /** - * Closes this serializer instance, causing mappings to be persisted. - * - * @throws IOException in case mappings cannot be persisted. - */ - @Override - public final void close() throws IOException { - writeSerializationMappings(stateFile, serializer.getSerializationMappings()); - } - - private static Map readSerializationMappings(File stateFile) throws IOException, ClassNotFoundException { - FileInputStream fin = new FileInputStream(stateFile); - try { - ObjectInputStream oin = new ObjectInputStream(fin); - try { - return (Map) oin.readObject(); - } finally { - oin.close(); - } - } finally { - fin.close(); - } - } - - private static void writeSerializationMappings(File stateFile, Map mappings) throws IOException { - OutputStream fout = new FileOutputStream(stateFile); - try { - ObjectOutputStream oout = new ObjectOutputStream(fout); - try { - oout.writeObject(mappings); - } finally { - oout.close(); - } - } finally { - fout.close(); - } - } - - /** - * {@inheritDoc} - */ - @Override - public ByteBuffer serialize(T object) throws SerializerException { - return serializer.serialize(object); - } - - /** - * {@inheritDoc} - */ - @Override - public T read(ByteBuffer binary) throws ClassNotFoundException, SerializerException { - return serializer.read(binary); - } - - /** - * {@inheritDoc} - */ - @Override - public boolean equals(T object, ByteBuffer binary) throws ClassNotFoundException, SerializerException { - return serializer.equals(object, binary); - } - -} diff --git a/impl/src/test/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfigurationTest.java b/impl/src/test/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfigurationTest.java index 130e15a43f..c4cbb3ae98 100644 --- a/impl/src/test/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfigurationTest.java +++ b/impl/src/test/java/org/ehcache/impl/config/serializer/DefaultSerializationProviderConfigurationTest.java @@ -35,79 +35,56 @@ public class DefaultSerializationProviderConfigurationTest { public ExpectedException expectedException = ExpectedException.none(); @Test - public void testAddSerializerForTransient() throws Exception { + public void testAddSerializerFor() throws Exception { DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); - config.addSerializerFor(Long.class, TransientSerializer.class); + config.addSerializerFor(Long.class, MinimalSerializer.class); - assertSame(TransientSerializer.class, config.getPersistentSerializers().get(Long.class)); - assertSame(TransientSerializer.class, config.getTransientSerializers().get(Long.class)); + assertSame(MinimalSerializer.class, config.getDefaultSerializers().get(Long.class)); } @Test - public void testAddSerializerForPersistent() throws Exception { + public void testAddSerializerForDuplicateThrows() throws Exception { DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); - config.addSerializerFor(Long.class, PersistentSerializer.class); - - assertTrue(config.getTransientSerializers().isEmpty()); - assertSame(PersistentSerializer.class, config.getPersistentSerializers().get(Long.class)); + config.addSerializerFor(Long.class, MinimalSerializer.class); + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage("Duplicate serializer for class"); + config.addSerializerFor(Long.class, MinimalSerializer.class); } @Test - public void testAddSerializerForTransientPersistentLegacyCombo() throws Exception { + public void testAddSerializerForConstructorless() throws Exception { + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); - config.addSerializerFor(Long.class, LegacyComboSerializer.class); - - assertSame(LegacyComboSerializer.class, config.getPersistentSerializers().get(Long.class)); - assertSame(LegacyComboSerializer.class, config.getTransientSerializers().get(Long.class)); + config.addSerializerFor(Long.class, UnusableSerializer.class); } @Test - public void testAddSerializerForTransientPersistentCombo() throws Exception { + public void testAddSerializerForStatefulSerializer() throws Exception { DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); - config.addSerializerFor(Long.class, ComboSerializer.class); - - assertSame(ComboSerializer.class, config.getPersistentSerializers().get(Long.class)); - assertSame(ComboSerializer.class, config.getTransientSerializers().get(Long.class)); + config.addSerializerFor(Long.class, MinimalStatefulSerializer.class); + assertSame(MinimalStatefulSerializer.class, config.getDefaultSerializers().get(Long.class)); } @Test - public void testAddSerializerForConstructorless() throws Exception { - expectedException.expectMessage("does not meet the constructor requirements for either transient or persistent caches"); + public void testAddSerializerForStatefulConstructorless() throws Exception { + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); - config.addSerializerFor(Long.class, UnusableSerializer.class); + config.addSerializerFor(Long.class, UnusableStatefulSerializer.class); } @Test - public void testAddSerializerForStatefulOnly() throws Exception { - expectedException.expectMessage("does not meet the constructor requirements for either transient or persistent caches"); + public void testAddSerializerForLegacySerializer() throws Exception { + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); DefaultSerializationProviderConfiguration config = new DefaultSerializationProviderConfiguration(); - config.addSerializerFor(Long.class, YetAnotherUnusableSerializer.class); - } - - private static class TransientSerializer implements Serializer { - - public TransientSerializer(ClassLoader loader) { - } - - @Override - public ByteBuffer serialize(final Long object) throws SerializerException { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public Long read(final ByteBuffer binary) throws ClassNotFoundException, SerializerException { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public boolean equals(final Long object, final ByteBuffer binary) throws ClassNotFoundException, SerializerException { - throw new UnsupportedOperationException("Implement me!"); - } + config.addSerializerFor(Long.class, LegacySerializer.class); } - private static class PersistentSerializer implements Serializer { + private static class MinimalSerializer implements Serializer { - public PersistentSerializer(ClassLoader loader, FileBasedPersistenceContext context) { + public MinimalSerializer(ClassLoader loader) { } @Override @@ -126,12 +103,9 @@ public boolean equals(final Long object, final ByteBuffer binary) throws ClassNo } } - private static class LegacyComboSerializer implements Serializer { + private static class LegacySerializer implements Serializer { - public LegacyComboSerializer(ClassLoader loader) { - } - - public LegacyComboSerializer(ClassLoader loader, FileBasedPersistenceContext context) { + public LegacySerializer(ClassLoader loader, FileBasedPersistenceContext context) { } @Override @@ -168,35 +142,9 @@ public boolean equals(final Long object, final ByteBuffer binary) throws ClassNo } } - private static class ComboSerializer implements StatefulSerializer { - - public ComboSerializer(ClassLoader loader) { - } - - @Override - public void init(final StateRepository stateRepository) { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public ByteBuffer serialize(final Long object) throws SerializerException { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public Long read(final ByteBuffer binary) throws ClassNotFoundException, SerializerException { - throw new UnsupportedOperationException("Implement me!"); - } - - @Override - public boolean equals(final Long object, final ByteBuffer binary) throws ClassNotFoundException, SerializerException { - throw new UnsupportedOperationException("Implement me!"); - } - } - - private static class AnotherUnusableSerializer implements StatefulSerializer { + private static class MinimalStatefulSerializer implements StatefulSerializer { - public AnotherUnusableSerializer(ClassLoader loader, FileBasedPersistenceContext context) { + public MinimalStatefulSerializer(ClassLoader loader) { } @Override @@ -220,7 +168,7 @@ public boolean equals(final Long object, final ByteBuffer binary) throws ClassNo } } - private static class YetAnotherUnusableSerializer implements StatefulSerializer { + private static class UnusableStatefulSerializer implements StatefulSerializer { @Override public void init(final StateRepository stateRepository) { diff --git a/impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java index 883dddfb06..737488913d 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java @@ -294,7 +294,7 @@ public void testDefaultByteArraySerializer() throws Exception { @Test public void testCreateTransientSerializerWithoutConstructor() throws Exception { expectedException.expect(RuntimeException.class); - expectedException.expectMessage("does not meet the constructor requirement for transient caches"); + expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); DefaultSerializationProvider provider = new DefaultSerializationProvider(null); provider.start(providerContaining()); @@ -305,7 +305,7 @@ public void testCreateTransientSerializerWithoutConstructor() throws Exception { @Test public void testCreatePersistentSerializerWithoutConstructor() throws Exception { expectedException.expect(RuntimeException.class); - expectedException.expectMessage("does not meet the constructor requirements for persistent caches"); + expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); DefaultSerializationProvider provider = new DefaultSerializationProvider(null); provider.start(providerContaining()); @@ -316,7 +316,7 @@ public void testCreatePersistentSerializerWithoutConstructor() throws Exception @Test public void testCreateTransientStatefulSerializerWithoutConstructor() throws Exception { expectedException.expect(RuntimeException.class); - expectedException.expectMessage("does not meet the constructor requirement for transient caches"); + expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); DefaultSerializationProvider provider = new DefaultSerializationProvider(null); provider.start(providerContaining()); @@ -327,7 +327,7 @@ public void testCreateTransientStatefulSerializerWithoutConstructor() throws Exc @Test public void testCreatePersistentStatefulSerializerWithoutConstructor() throws Exception { expectedException.expect(RuntimeException.class); - expectedException.expectMessage("does not meet the constructor requirements for persistent caches"); + expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); DefaultSerializationProvider provider = new DefaultSerializationProvider(null); provider.start(providerContaining()); @@ -390,7 +390,7 @@ public void testPersistentMinimalStatefulSerializer() throws Exception { @Test public void testTransientLegacySerializer() throws Exception { expectedException.expect(RuntimeException.class); - expectedException.expectMessage("does not meet the constructor requirement for transient caches"); + expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); DefaultSerializationProvider provider = new DefaultSerializationProvider(null); provider.start(providerContaining()); @@ -404,6 +404,8 @@ public void testPersistentLegacySerializer() throws Exception { LegacySerializer.legacyConstructorInvoked = false; DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(LegacySerializer.class, DefaultSerializerConfiguration.Type.VALUE); + expectedException.expect(RuntimeException.class); + expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); Serializer valueSerializer = provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); assertThat(valueSerializer, instanceOf(LegacySerializer.class)); @@ -435,14 +437,14 @@ public void testPersistentLegacyComboSerializer() throws Exception { Serializer valueSerializer = provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); assertThat(valueSerializer, instanceOf(LegacyComboSerializer.class)); - assertThat(LegacyComboSerializer.baseConstructorInvoked, is(false)); - assertThat(LegacyComboSerializer.legacyConstructorInvoked, is(true)); + assertThat(LegacyComboSerializer.baseConstructorInvoked, is(true)); + assertThat(LegacyComboSerializer.legacyConstructorInvoked, is(false)); } @Test public void testCreateTransientStatefulLegacySerializer() throws Exception { expectedException.expect(RuntimeException.class); - expectedException.expectMessage("does not meet the constructor requirement for transient caches"); + expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); DefaultSerializationProvider provider = new DefaultSerializationProvider(null); provider.start(providerContaining()); @@ -453,7 +455,7 @@ public void testCreateTransientStatefulLegacySerializer() throws Exception { @Test public void testCreatePersistentStatefulLegacySerializer() throws Exception { expectedException.expect(RuntimeException.class); - expectedException.expectMessage("does not meet the constructor requirements for persistent caches"); + expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); DefaultSerializationProvider provider = new DefaultSerializationProvider(null); provider.start(providerContaining()); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueTest.java index d1b2c2b8ae..b2a34f938a 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueTest.java @@ -28,7 +28,6 @@ import org.ehcache.expiry.Expiry; import org.ehcache.core.spi.function.Function; import org.ehcache.impl.copy.SerializingCopier; -import org.ehcache.impl.serialization.CompactJavaSerializer; import org.ehcache.core.spi.time.SystemTimeSource; import org.ehcache.core.spi.time.TimeSource; import org.ehcache.impl.internal.store.AbstractValueHolder; @@ -73,7 +72,7 @@ public Long copyForWrite(Long obj) { public void testKeyCopierCalledOnGetOrComputeIfAbsent() throws Exception { LongCopier keyCopier = new LongCopier(); OnHeapStore store = newStore(SystemTimeSource.INSTANCE, Expirations.noExpiration(), Eviction.noAdvice(), - keyCopier, new SerializingCopier(new CompactJavaSerializer(ClassLoader.getSystemClassLoader())), 100); + keyCopier, new SerializingCopier(new JavaSerializer(ClassLoader.getSystemClassLoader())), 100); ValueHolder computed = store.getOrComputeIfAbsent(1L, new Function>() { @Override diff --git a/impl/src/test/java/org/ehcache/impl/serialization/AddedFieldTest.java b/impl/src/test/java/org/ehcache/impl/serialization/AddedFieldTest.java index a39b1d6000..390371a274 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/AddedFieldTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/AddedFieldTest.java @@ -16,7 +16,7 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; import org.hamcrest.core.Is; import org.junit.Assert; import org.junit.Test; @@ -41,7 +41,8 @@ public class AddedFieldTest { @Test public void addingSerializableField() throws Exception { - Serializer serializer = new CompactJavaSerializer(null); + StatefulSerializer serializer = new CompactJavaSerializer(null); + serializer.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(A_write.class, IncompatibleSerializable_write.class, Serializable_write.class); Serializable a = (Serializable) loaderA.loadClass(newClassName(A_write.class)).newInstance(); @@ -58,7 +59,8 @@ public void addingSerializableField() throws Exception { @Test public void addingExternalizableField() throws Exception { - Serializer serializer = new CompactJavaSerializer(null); + StatefulSerializer serializer = new CompactJavaSerializer(null); + serializer.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(B_write.class, Externalizable_write.class); Serializable a = (Serializable) loaderA.loadClass(newClassName(B_write.class)).newInstance(); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/AddedSuperClassTest.java b/impl/src/test/java/org/ehcache/impl/serialization/AddedSuperClassTest.java index 0edf4f059b..634798b206 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/AddedSuperClassTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/AddedSuperClassTest.java @@ -16,7 +16,7 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; import org.junit.Test; import java.io.Serializable; @@ -35,7 +35,8 @@ public class AddedSuperClassTest { @Test public void testAddedSuperClass() throws Exception { - Serializer serializer = new CompactJavaSerializer(null); + StatefulSerializer serializer = new CompactJavaSerializer(null); + serializer.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(A_2.class, AddedSuperClass_Hidden.class); Serializable a = (Serializable) loaderA.loadClass(newClassName(A_2.class)).newInstance(); @@ -51,7 +52,8 @@ public void testAddedSuperClass() throws Exception { @Test public void testAddedSuperClassNotHidden() throws Exception { - Serializer serializer = new CompactJavaSerializer(null); + StatefulSerializer serializer = new CompactJavaSerializer(null); + serializer.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(A_2.class, AddedSuperClass_Hidden.class); Serializable a = (Serializable) loaderA.loadClass(newClassName(A_2.class)).newInstance(); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/ArrayPackageScopeTest.java b/impl/src/test/java/org/ehcache/impl/serialization/ArrayPackageScopeTest.java index 05fb13465d..6b3703e058 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/ArrayPackageScopeTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/ArrayPackageScopeTest.java @@ -16,7 +16,7 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; import org.junit.Assert; import org.junit.Test; @@ -37,7 +37,8 @@ public class ArrayPackageScopeTest { @Test public void testArrayPackageScope() throws Exception { - Serializer serializer = new CompactJavaSerializer(null); + StatefulSerializer serializer = new CompactJavaSerializer(null); + serializer.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(Foo_A.class); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/BasicSerializationTest.java b/impl/src/test/java/org/ehcache/impl/serialization/BasicSerializationTest.java index 6921bc5383..f08d6921b3 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/BasicSerializationTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/BasicSerializationTest.java @@ -23,9 +23,7 @@ import java.util.HashMap; import java.util.Random; -import org.ehcache.impl.serialization.CompactJavaSerializer; -import org.ehcache.spi.serialization.Serializer; - +import org.ehcache.spi.serialization.StatefulSerializer; import org.hamcrest.core.Is; import org.hamcrest.core.IsEqual; import org.hamcrest.core.IsNot; @@ -42,7 +40,8 @@ public class BasicSerializationTest { @Test public void testSimpleObject() throws ClassNotFoundException { - Serializer test = new CompactJavaSerializer(null); + StatefulSerializer test = new CompactJavaSerializer(null); + test.init(new TransientStateRepository()); String input = ""; String result = (String) test.read(test.serialize(input)); @@ -53,7 +52,8 @@ public void testSimpleObject() throws ClassNotFoundException { @Test public void testComplexObject() throws ClassNotFoundException { - Serializer test = new CompactJavaSerializer(null); + StatefulSerializer test = new CompactJavaSerializer(null); + test.init(new TransientStateRepository()); HashMap input = new HashMap(); input.put(1, "one"); @@ -74,7 +74,8 @@ public void testComplexObject() throws ClassNotFoundException { @Test public void testPrimitiveClasses() throws ClassNotFoundException { - Serializer s = new CompactJavaSerializer(null); + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); Class[] out = (Class[]) s.read(s.serialize(PRIMITIVE_CLASSES)); @@ -88,7 +89,8 @@ public void testProxyInstance() throws ClassNotFoundException { int foo = rand.nextInt(); float bar = rand.nextFloat(); - Serializer s = new CompactJavaSerializer(null); + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); Object proxy = s.read(s.serialize((Serializable) Proxy.newProxyInstance(BasicSerializationTest.class.getClassLoader(), new Class[]{Foo.class, Bar.class}, new Handler(foo, bar)))); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassLoaderTest.java b/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassLoaderTest.java index 7e2399c3e8..240d3617a5 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassLoaderTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassLoaderTest.java @@ -21,9 +21,7 @@ import static org.ehcache.impl.serialization.SerializerTestUtilities.popTccl; import static org.ehcache.impl.serialization.SerializerTestUtilities.pushTccl; -import org.ehcache.impl.serialization.CompactJavaSerializer; -import org.ehcache.spi.serialization.Serializer; - +import org.ehcache.spi.serialization.StatefulSerializer; import org.junit.Assert; import org.junit.Test; @@ -38,7 +36,8 @@ private static ClassLoader newLoader() { @Test public void testThreadContextLoader() throws Exception { - Serializer serializer = new CompactJavaSerializer(null); + StatefulSerializer serializer = new CompactJavaSerializer(null); + serializer.init(new TransientStateRepository()); ClassLoader loader = newLoader(); ByteBuffer encoded = serializer.serialize((Serializable) loader.loadClass(Foo.class.getName()).newInstance()); @@ -54,7 +53,8 @@ public void testThreadContextLoader() throws Exception { @Test public void testExplicitLoader() throws Exception { ClassLoader loader = newLoader(); - Serializer serializer = new CompactJavaSerializer(loader); + StatefulSerializer serializer = new CompactJavaSerializer(loader); + serializer.init(new TransientStateRepository()); ByteBuffer encoded = serializer.serialize((Serializable) loader.loadClass(Foo.class.getName()).newInstance()); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassUnloadingTest.java b/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassUnloadingTest.java index c53e9ca50b..793c554e2a 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassUnloadingTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassUnloadingTest.java @@ -23,9 +23,7 @@ import java.util.ArrayList; import java.util.List; -import org.ehcache.impl.serialization.CompactJavaSerializer; -import org.ehcache.spi.serialization.Serializer; - +import org.ehcache.spi.serialization.StatefulSerializer; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -54,7 +52,8 @@ public void createSpecialObject() throws Exception { @Test public void testClassUnloadingAfterSerialization() throws Exception { - Serializer serializer = new CompactJavaSerializer(null); + StatefulSerializer serializer = new CompactJavaSerializer(null); + serializer.init(new TransientStateRepository()); serializer.serialize(specialObject); @@ -74,7 +73,8 @@ public void testClassUnloadingAfterSerialization() throws Exception { public void testClassUnloadingAfterSerializationAndDeserialization() throws Exception { Thread.currentThread().setContextClassLoader(specialObject.getClass().getClassLoader()); try { - Serializer serializer = new CompactJavaSerializer(null); + StatefulSerializer serializer = new CompactJavaSerializer(null); + serializer.init(new TransientStateRepository()); specialObject = serializer.read(serializer.serialize(specialObject)); Assert.assertEquals(SpecialClass.class.getName(), specialObject.getClass().getName()); Assert.assertNotSame(SpecialClass.class, specialObject.getClass()); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/CompactPersistentJavaSerializerTest.java b/impl/src/test/java/org/ehcache/impl/serialization/CompactPersistentJavaSerializerTest.java deleted file mode 100644 index be58b65d4b..0000000000 --- a/impl/src/test/java/org/ehcache/impl/serialization/CompactPersistentJavaSerializerTest.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.serialization; - -import org.ehcache.core.spi.service.FileBasedPersistenceContext; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; - -import java.io.File; -import java.nio.ByteBuffer; - -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.*; - -/** - * CompactPersistentJavaSerializerTest - */ -public class CompactPersistentJavaSerializerTest { - - @Rule - public TemporaryFolder temporaryFolder = new TemporaryFolder(); - - @Test - public void testProperlyInitializesEncodingIndexOnLoad() throws Exception { - final File folder = temporaryFolder.newFolder("test-cpjs"); - FileBasedPersistenceContext persistenceContext = new FileBasedPersistenceContext() { - @Override - public File getDirectory() { - return folder; - } - }; - - CompactPersistentJavaSerializer serializer = new CompactPersistentJavaSerializer(getClass().getClassLoader(), persistenceContext); - ByteBuffer integerBytes = serializer.serialize(10); - serializer.close(); - - serializer = new CompactPersistentJavaSerializer(getClass().getClassLoader(), persistenceContext); - ByteBuffer longBytes = serializer.serialize(42L); - - assertThat((Integer) serializer.read(integerBytes), is(10)); - assertThat((Long) serializer.read(longBytes), is(42L)); - } - -} \ No newline at end of file diff --git a/impl/src/test/java/org/ehcache/impl/serialization/EnumTest.java b/impl/src/test/java/org/ehcache/impl/serialization/EnumTest.java index 0311436a5b..63bdd6df69 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/EnumTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/EnumTest.java @@ -16,7 +16,7 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; import org.hamcrest.core.IsSame; import org.junit.Assert; import org.junit.Test; @@ -36,7 +36,8 @@ public class EnumTest { @Test public void basicInstanceSerialization() throws ClassNotFoundException { - Serializer s = new CompactJavaSerializer(null); + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); Assert.assertThat(s.read(s.serialize(People.Alice)), IsSame.sameInstance(People.Alice)); Assert.assertThat(s.read(s.serialize(People.Bob)), IsSame.sameInstance(People.Bob)); @@ -45,7 +46,8 @@ public void basicInstanceSerialization() throws ClassNotFoundException { @Test public void classSerialization() throws ClassNotFoundException { - Serializer s = new CompactJavaSerializer(null); + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); Assert.assertThat(s.read(s.serialize(Enum.class)), IsSame.sameInstance(Enum.class)); Assert.assertThat(s.read(s.serialize(Dogs.Handel.getClass())), IsSame.sameInstance(Dogs.Handel.getClass())); @@ -55,7 +57,8 @@ public void classSerialization() throws ClassNotFoundException { @Test public void shiftingInstanceSerialization() throws ClassNotFoundException { - Serializer s = new CompactJavaSerializer(null); + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); ClassLoader wLoader = createClassNameRewritingLoader(Foo_W.class); ClassLoader rLoader = createClassNameRewritingLoader(Foo_R.class); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/FieldTypeChangeTest.java b/impl/src/test/java/org/ehcache/impl/serialization/FieldTypeChangeTest.java index 21d83f4c6d..5746c1048d 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/FieldTypeChangeTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/FieldTypeChangeTest.java @@ -16,7 +16,7 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; import org.junit.Test; import java.io.Serializable; @@ -35,7 +35,8 @@ public class FieldTypeChangeTest { @Test public void fieldTypeChangeWithOkayObject() throws Exception { - Serializer s = new CompactJavaSerializer(null); + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); ClassLoader loaderW = createClassNameRewritingLoader(Foo_W.class); Serializable a = (Serializable) loaderW.loadClass(newClassName(Foo_W.class)).getConstructor(Object.class).newInstance("foo"); @@ -50,7 +51,8 @@ public void fieldTypeChangeWithOkayObject() throws Exception { @Test public void fieldTypeChangeWithIncompatibleObject() throws Exception { - Serializer s = new CompactJavaSerializer(null); + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); ClassLoader loaderW = createClassNameRewritingLoader(Foo_W.class); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/GetFieldTest.java b/impl/src/test/java/org/ehcache/impl/serialization/GetFieldTest.java index 3c0f937c89..3fe3a4688a 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/GetFieldTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/GetFieldTest.java @@ -16,7 +16,7 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; import org.junit.Test; import java.io.IOException; @@ -37,7 +37,8 @@ public class GetFieldTest { @Test public void testGetField() throws Exception { - Serializer s = new CompactJavaSerializer(null); + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(Foo_A.class); Serializable a = (Serializable) loaderA.loadClass(newClassName(Foo_A.class)).newInstance(); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/PutFieldTest.java b/impl/src/test/java/org/ehcache/impl/serialization/PutFieldTest.java index 1c6dcf679c..61c1429d59 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/PutFieldTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/PutFieldTest.java @@ -16,7 +16,7 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; import org.junit.Assert; import org.junit.Test; @@ -39,7 +39,8 @@ public class PutFieldTest { @Test public void testWithAllPrimitivesAndString() throws Exception { - Serializer s = new CompactJavaSerializer(null); + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(Foo_A.class); Serializable a = (Serializable) loaderA.loadClass(newClassName(Foo_A.class)).newInstance(); @@ -65,7 +66,8 @@ public void testWithAllPrimitivesAndString() throws Exception { @Test public void testWithTwoStrings() throws Exception { - Serializer s = new CompactJavaSerializer(null); + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(Bar_A.class); Serializable a = (Serializable) loaderA.loadClass(newClassName(Bar_A.class)).newInstance(); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/ReadObjectNoDataTest.java b/impl/src/test/java/org/ehcache/impl/serialization/ReadObjectNoDataTest.java index d9ef48d9e2..8495387800 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/ReadObjectNoDataTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/ReadObjectNoDataTest.java @@ -16,7 +16,7 @@ package org.ehcache.impl.serialization; -import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.StatefulSerializer; import org.junit.Assert; import org.junit.Test; @@ -37,7 +37,8 @@ public class ReadObjectNoDataTest { @Test public void test() throws Exception { - Serializer s = new CompactJavaSerializer(null); + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); ClassLoader loaderW = createClassNameRewritingLoader(C_W.class, B_W.class); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/SerializeAfterEvolutionTest.java b/impl/src/test/java/org/ehcache/impl/serialization/SerializeAfterEvolutionTest.java index 18a0805aaf..b3ae43e8aa 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/SerializeAfterEvolutionTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/SerializeAfterEvolutionTest.java @@ -20,9 +20,7 @@ import java.nio.ByteBuffer; import java.util.Date; -import org.ehcache.impl.serialization.CompactJavaSerializer; -import org.ehcache.spi.serialization.Serializer; - +import org.ehcache.spi.serialization.StatefulSerializer; import org.hamcrest.core.Is; import org.junit.Assert; import org.junit.Test; @@ -35,7 +33,8 @@ public class SerializeAfterEvolutionTest { @Test public void test() throws Exception { - Serializer s = new CompactJavaSerializer(null); + StatefulSerializer s = new CompactJavaSerializer(null); + s.init(new TransientStateRepository()); ClassLoader loaderA = createClassNameRewritingLoader(A_old.class); Serializable a = (Serializable) loaderA.loadClass(newClassName(A_old.class)).newInstance(); diff --git a/xml/src/test/java/com/pany/ehcache/serializer/TestSerializer.java b/xml/src/test/java/com/pany/ehcache/serializer/TestSerializer.java index 5376affb75..fcafefabc7 100644 --- a/xml/src/test/java/com/pany/ehcache/serializer/TestSerializer.java +++ b/xml/src/test/java/com/pany/ehcache/serializer/TestSerializer.java @@ -15,6 +15,7 @@ */ package com.pany.ehcache.serializer; +import org.ehcache.impl.serialization.TransientStateRepository; import org.ehcache.spi.serialization.SerializerException; import org.ehcache.impl.serialization.CompactJavaSerializer; import org.ehcache.spi.serialization.Serializer; @@ -26,7 +27,9 @@ public class TestSerializer implements Serializer { private final Serializer serializer; public TestSerializer(ClassLoader classLoader) { - serializer = new CompactJavaSerializer(classLoader); + CompactJavaSerializer compactJavaSerializer = new CompactJavaSerializer(classLoader); + compactJavaSerializer.init(new TransientStateRepository()); + serializer = compactJavaSerializer; } @Override diff --git a/xml/src/test/java/org/ehcache/xml/XmlConfigurationTest.java b/xml/src/test/java/org/ehcache/xml/XmlConfigurationTest.java index 209728a190..360fe2af1e 100644 --- a/xml/src/test/java/org/ehcache/xml/XmlConfigurationTest.java +++ b/xml/src/test/java/org/ehcache/xml/XmlConfigurationTest.java @@ -371,11 +371,11 @@ public void testDefaultSerializerConfiguration() throws Exception { assertThat(configuration, instanceOf(DefaultSerializationProviderConfiguration.class)); DefaultSerializationProviderConfiguration factoryConfiguration = (DefaultSerializationProviderConfiguration) configuration; - assertThat(factoryConfiguration.getTransientSerializers().size(), is(4)); - assertThat(factoryConfiguration.getTransientSerializers().get(CharSequence.class), Matchers.>equalTo(TestSerializer.class)); - assertThat(factoryConfiguration.getTransientSerializers().get(Number.class), Matchers.>equalTo(TestSerializer2.class)); - assertThat(factoryConfiguration.getTransientSerializers().get(Long.class), Matchers.>equalTo(TestSerializer3.class)); - assertThat(factoryConfiguration.getTransientSerializers().get(Integer.class), Matchers.>equalTo(TestSerializer4.class)); + assertThat(factoryConfiguration.getDefaultSerializers().size(), is(4)); + assertThat(factoryConfiguration.getDefaultSerializers().get(CharSequence.class), Matchers.>equalTo(TestSerializer.class)); + assertThat(factoryConfiguration.getDefaultSerializers().get(Number.class), Matchers.>equalTo(TestSerializer2.class)); + assertThat(factoryConfiguration.getDefaultSerializers().get(Long.class), Matchers.>equalTo(TestSerializer3.class)); + assertThat(factoryConfiguration.getDefaultSerializers().get(Integer.class), Matchers.>equalTo(TestSerializer4.class)); List> orderedServiceConfigurations = new ArrayList>(xmlConfig.getCacheConfigurations().get("baz").getServiceConfigurations()); From b44de1b18e2b09af1d9d0d967ca13f91a384f957 Mon Sep 17 00:00:00 2001 From: Chris Dennis Date: Tue, 13 Sep 2016 19:53:38 -0400 Subject: [PATCH 010/218] Issue #1438 : use failOnVersionConflict dependency resolution strategy --- build.gradle | 11 +++++++++- clustered/integration-test/build.gradle | 5 ++++- core-spi-test/build.gradle | 6 ++++-- core/build.gradle | 5 ++++- demos/build.gradle | 2 +- docs/build.gradle | 4 ++++ impl/build.gradle | 4 +++- osgi-test/build.gradle | 27 ++++++++++++++++++------- transactions/build.gradle | 4 +++- 9 files changed, 53 insertions(+), 15 deletions(-) diff --git a/build.gradle b/build.gradle index bf95e3abd3..379f474f6e 100644 --- a/build.gradle +++ b/build.gradle @@ -77,7 +77,10 @@ subprojects { } else { compileOnly 'com.google.code.findbugs:annotations:2.0.3' } - testCompile 'junit:junit:4.11', 'org.hamcrest:hamcrest-library:1.3', 'org.mockito:mockito-core:1.9.5' + testCompile 'junit:junit:4.12', 'org.hamcrest:hamcrest-library:1.3' + testCompile('org.mockito:mockito-core:1.9.5') { + exclude group:'org.hamcrest', module:'hamcrest-core' + } testRuntime "org.slf4j:slf4j-simple:$parent.slf4jVersion" } @@ -159,6 +162,12 @@ subprojects { csv.enabled false } } + + configurations.all { + resolutionStrategy { + failOnVersionConflict() + } + } } allprojects { diff --git a/clustered/integration-test/build.gradle b/clustered/integration-test/build.gradle index 932462e77e..2a2854a26e 100644 --- a/clustered/integration-test/build.gradle +++ b/clustered/integration-test/build.gradle @@ -21,7 +21,10 @@ dependencies { testCompile "org.terracotta.management:management-entity-client:$parent.managementVersion" testCompile group:'org.terracotta', name:'galvan-support', version: galvanVersion - testCompile group:'com.google.code.tempus-fugit', name:'tempus-fugit', version:'1.1' + testCompile (group:'com.google.code.tempus-fugit', name:'tempus-fugit', version:'1.1') { + exclude group:'junit', module:'junit' + exclude group:'org.hamcrest', module:'hamcrest-core' + } testCompile group: 'javax.cache', name: 'cache-api', version: jcacheVersion testCompile "org.terracotta.management:management-entity-server:$parent.managementVersion:plugin" diff --git a/core-spi-test/build.gradle b/core-spi-test/build.gradle index 3d34399c32..83f89d5256 100644 --- a/core-spi-test/build.gradle +++ b/core-spi-test/build.gradle @@ -15,6 +15,8 @@ */ dependencies { - compile project(':spi-tester'), project(':core'), - 'org.hamcrest:hamcrest-library:1.3', 'org.mockito:mockito-core:1.9.5', 'junit:junit:4.11' + compile project(':spi-tester'), project(':core'), 'org.hamcrest:hamcrest-library:1.3', 'junit:junit:4.12' + compile ('org.mockito:mockito-core:1.9.5') { + exclude group:'org.hamcrest', module:'hamcrest-core' + } } diff --git a/core/build.gradle b/core/build.gradle index 8dcc48fded..07ba61edf4 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -17,6 +17,9 @@ apply plugin: EhDeploy dependencies { - compile project(':api'), "org.terracotta:statistics:$parent.statisticVersion", "org.slf4j:slf4j-api:$parent.slf4jVersion" + compile project(':api'), "org.slf4j:slf4j-api:$parent.slf4jVersion" + compile ("org.terracotta:statistics:$parent.statisticVersion") { + exclude group:'org.slf4j', module:'slf4j-api' + } testCompile project(':spi-tester') } \ No newline at end of file diff --git a/demos/build.gradle b/demos/build.gradle index 44a858b5e3..f2b79ea364 100644 --- a/demos/build.gradle +++ b/demos/build.gradle @@ -3,6 +3,6 @@ subprojects { apply plugin: 'jetty' dependencies { - compile 'ch.qos.logback:logback-classic:1.0.13', 'javax.servlet:servlet-api:2.5', 'com.h2database:h2:1.4.186', project(':impl') + compile 'ch.qos.logback:logback-classic:1.1.3', 'javax.servlet:servlet-api:2.5', 'com.h2database:h2:1.4.186', project(':impl') } } \ No newline at end of file diff --git a/docs/build.gradle b/docs/build.gradle index 042b896d65..a980e66ef1 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -26,6 +26,10 @@ buildscript { apply plugin: 'org.asciidoctor.convert' +configurations.asciidoctor.dependencies.matching({it.group == 'org.asciidoctor' && it.name == 'asciidoctorj-groovy-dsl'}).all { + exclude group:'org.asciidoctor', module:'asciidoctorj' +} + task copyCSS(type: Copy) { from ('css') { include '**' diff --git a/impl/build.gradle b/impl/build.gradle index 8fc63c184d..aa23d8388e 100644 --- a/impl/build.gradle +++ b/impl/build.gradle @@ -19,7 +19,9 @@ apply plugin: EhDeploy dependencies { compile project(':api'), project(':core') compile group: 'org.terracotta', name: 'offheap-store', version: parent.offheapVersion - compile group: 'org.ehcache', name: 'sizeof', version: parent.sizeofVersion + compile (group: 'org.ehcache', name: 'sizeof', version: parent.sizeofVersion) { + exclude group:'org.slf4j', module:'slf4j-api' + } testCompile project(path: ':core-spi-test'), 'org.ow2.asm:asm-all:5.0.4' } diff --git a/osgi-test/build.gradle b/osgi-test/build.gradle index 94f4f7b1d1..c1ba150d56 100644 --- a/osgi-test/build.gradle +++ b/osgi-test/build.gradle @@ -17,22 +17,35 @@ dependencies { ext { paxExamVersion = '3.5.0' - urlVersion = '1.6.0' felixVersion = '4.4.0' } testCompile project(':impl'), project(':xml'), project(':107'), - 'junit:junit:4.11', - "org.ops4j.pax.exam:pax-exam-junit4:$paxExamVersion", "org.apache.felix:org.apache.felix.framework:$felixVersion", "javax.cache:cache-api:$parent.jcacheVersion" + testCompile ("org.ops4j.pax.exam:pax-exam-junit4:$paxExamVersion") { + exclude group:'junit', module:'junit' + exclude group:'org.slf4j', module:'slf4j-api' + } testRuntime "org.slf4j:slf4j-simple:$parent.slf4jVersion", - "org.ops4j.pax.exam:pax-exam-container-native:$paxExamVersion", - "org.ops4j.pax.exam:pax-exam-link-mvn:$paxExamVersion", - "org.ops4j.pax.url:pax-url-aether:$urlVersion" - + testRuntime ("org.ops4j.pax.exam:pax-exam-container-native:$paxExamVersion") { + exclude group:'org.slf4j', module:'slf4j-api' + } + testRuntime ("org.ops4j.pax.exam:pax-exam-link-mvn:$paxExamVersion") { + exclude group:'org.slf4j', module:'slf4j-api' + } +} +configurations.testRuntime { + resolutionStrategy.force 'org.apache.maven.wagon:wagon-provider-api:2.5' + resolutionStrategy.force 'org.codehaus.plexus:plexus-utils:3.0.15' + resolutionStrategy.force 'org.eclipse.aether:aether-api:0.9.0.M4' + resolutionStrategy.force 'org.eclipse.aether:aether-impl:0.9.0.M4' + resolutionStrategy.force 'org.eclipse.aether:aether-spi:0.9.0.M4' + resolutionStrategy.force 'org.eclipse.aether:aether-util:0.9.0.M4' + resolutionStrategy.force 'org.sonatype.plexus:plexus-cipher:1.7' + resolutionStrategy.force 'org.sonatype.plexus:plexus-sec-dispatcher:1.4' } sourceSets { diff --git a/transactions/build.gradle b/transactions/build.gradle index c61ea6d5d0..5f7a308896 100644 --- a/transactions/build.gradle +++ b/transactions/build.gradle @@ -24,7 +24,9 @@ dependencies { compile project(':impl'), project(':xml') compile group: 'javax.transaction', name: 'jta', version: '1.1' testCompile project(path: ':core-spi-test') - compile group: 'org.codehaus.btm', name: 'btm', version: '2.1.4' + compile (group: 'org.codehaus.btm', name: 'btm', version: '2.1.4') { + exclude group:'org.slf4j', module:'slf4j-api' + } } // For EhPomMangle From 821962accd21c315c7adb6cf37fb4bdca29a8b8e Mon Sep 17 00:00:00 2001 From: Ramesh Kavanappillil Date: Fri, 24 Jun 2016 11:29:53 +0530 Subject: [PATCH 011/218] issue #1192: separating local persistence service from disk persistence service --- .../core/PersistentUserManagedEhcache.java | 16 +- .../core/spi/service/DiskResourceService.java | 34 ++ .../spi/service/LocalPersistenceService.java | 65 ++- .../builders/UserManagedCacheBuilder.java | 18 +- .../DefaultDiskResourceServiceFactory.java | 35 ++ .../DefaultSerializationProvider.java | 1 + .../internal/store/disk/OffHeapDiskStore.java | 25 +- .../DefaultDiskResourceService.java | 274 ++++++++++ .../DefaultLocalPersistenceService.java | 477 +++--------------- .../persistence/FileBasedStateRepository.java | 2 +- .../ehcache/impl/persistence/FileUtils.java | 209 ++++++++ ...rg.ehcache.core.spi.service.ServiceFactory | 1 + .../builders/PersistentCacheManagerTest.java | 61 ++- .../org/ehcache/docs/UserManagedCaches.java | 2 +- ...eManagerDestroyRemovesPersistenceTest.java | 24 +- ...vice.java => TestDiskResourceService.java} | 48 +- .../DefaultSerializationProviderTest.java | 13 +- .../disk/OffHeapDiskStoreProviderTest.java | 5 +- .../store/disk/OffHeapDiskStoreSPITest.java | 17 +- .../store/disk/OffHeapDiskStoreTest.java | 20 +- .../TieredStoreFlushWhileShutdownTest.java | 19 +- .../store/tiering/TieredStoreSPITest.java | 26 +- .../tiering/TieredStoreWith3TiersSPITest.java | 20 +- .../internal/util/FileExistenceMatchers.java | 152 ++++++ .../DefaultLocalPersistenceServiceTest.java | 105 +--- .../transactions/xa/internal/XAStore.java | 4 +- .../journal/DefaultJournalProvider.java | 10 +- 27 files changed, 1042 insertions(+), 641 deletions(-) create mode 100644 core/src/main/java/org/ehcache/core/spi/service/DiskResourceService.java create mode 100644 impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultDiskResourceServiceFactory.java create mode 100644 impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java create mode 100644 impl/src/main/java/org/ehcache/impl/persistence/FileUtils.java rename impl/src/test/java/org/ehcache/impl/internal/persistence/{TestLocalPersistenceService.java => TestDiskResourceService.java} (66%) create mode 100644 impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchers.java diff --git a/core/src/main/java/org/ehcache/core/PersistentUserManagedEhcache.java b/core/src/main/java/org/ehcache/core/PersistentUserManagedEhcache.java index fb4eea3ba4..f74a0c987f 100644 --- a/core/src/main/java/org/ehcache/core/PersistentUserManagedEhcache.java +++ b/core/src/main/java/org/ehcache/core/PersistentUserManagedEhcache.java @@ -22,8 +22,8 @@ import org.ehcache.config.CacheRuntimeConfiguration; import org.ehcache.config.ResourceType; import org.ehcache.core.events.CacheEventDispatcher; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.spi.loaderwriter.BulkCacheLoadingException; import org.ehcache.spi.loaderwriter.BulkCacheWritingException; import org.ehcache.spi.loaderwriter.CacheLoadingException; @@ -51,7 +51,7 @@ public class PersistentUserManagedEhcache implements PersistentUserManaged private final StatusTransitioner statusTransitioner; private final Logger logger; private final InternalCache cache; - private final LocalPersistenceService localPersistenceService; + private final DiskResourceService diskPersistenceService; private final String id; /** @@ -59,12 +59,12 @@ public class PersistentUserManagedEhcache implements PersistentUserManaged * * @param configuration the cache configuration * @param store the underlying store - * @param localPersistenceService the persistence service + * @param diskPersistenceService the persistence service * @param cacheLoaderWriter the optional loader writer * @param eventDispatcher the event dispatcher * @param id an id for this cache */ - public PersistentUserManagedEhcache(CacheConfiguration configuration, Store store, LocalPersistenceService localPersistenceService, CacheLoaderWriter cacheLoaderWriter, CacheEventDispatcher eventDispatcher, String id) { + public PersistentUserManagedEhcache(CacheConfiguration configuration, Store store, DiskResourceService diskPersistenceService, CacheLoaderWriter cacheLoaderWriter, CacheEventDispatcher eventDispatcher, String id) { this.logger = LoggerFactory.getLogger(PersistentUserManagedEhcache.class.getName() + "-" + id); this.statusTransitioner = new StatusTransitioner(logger); if (cacheLoaderWriter == null) { @@ -72,7 +72,7 @@ public PersistentUserManagedEhcache(CacheConfiguration configuration, Stor } else { this.cache = new EhcacheWithLoaderWriter(new EhcacheRuntimeConfiguration(configuration), store, cacheLoaderWriter, eventDispatcher, true, logger, statusTransitioner); } - this.localPersistenceService = localPersistenceService; + this.diskPersistenceService = diskPersistenceService; this.id = id; } @@ -98,7 +98,7 @@ void create() { if (!getRuntimeConfiguration().getResourcePools().getPoolForResource(ResourceType.Core.DISK).isPersistent()) { destroy(); } - localPersistenceService.getPersistenceSpaceIdentifier(id, cache.getRuntimeConfiguration()); + diskPersistenceService.getPersistenceSpaceIdentifier(id, cache.getRuntimeConfiguration()); } catch (CachePersistenceException e) { throw new RuntimeException("Unable to create persistence space for user managed cache " + id, e); } @@ -106,7 +106,7 @@ void create() { void destroyInternal() throws CachePersistenceException { statusTransitioner.checkMaintenance(); - localPersistenceService.destroy(id); + diskPersistenceService.destroy(id); } /** @@ -125,7 +125,7 @@ public void close() { cache.close(); if (!getRuntimeConfiguration().getResourcePools().getPoolForResource(ResourceType.Core.DISK).isPersistent()) { try { - localPersistenceService.destroy(id); + diskPersistenceService.destroy(id); } catch (CachePersistenceException e) { logger.debug("Unable to clear persistence space for user managed cache {}", id, e); } diff --git a/core/src/main/java/org/ehcache/core/spi/service/DiskResourceService.java b/core/src/main/java/org/ehcache/core/spi/service/DiskResourceService.java new file mode 100644 index 0000000000..b2afe0c857 --- /dev/null +++ b/core/src/main/java/org/ehcache/core/spi/service/DiskResourceService.java @@ -0,0 +1,34 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.spi.service; + +import org.ehcache.CachePersistenceException; +import org.ehcache.spi.persistence.PersistableResourceService; + +/** + * Resource service handling file level operations for disk tiers. + */ +public interface DiskResourceService extends PersistableResourceService { + + /** + * Creates a new persistence context within the given space. + * + * @param identifier space to create within + * @param name name of the context to create + * @return a {@link FileBasedPersistenceContext} + */ + FileBasedPersistenceContext createPersistenceContextWithin(PersistenceSpaceIdentifier identifier, String name) throws CachePersistenceException; +} \ No newline at end of file diff --git a/core/src/main/java/org/ehcache/core/spi/service/LocalPersistenceService.java b/core/src/main/java/org/ehcache/core/spi/service/LocalPersistenceService.java index a100f517ab..0745bdd17e 100644 --- a/core/src/main/java/org/ehcache/core/spi/service/LocalPersistenceService.java +++ b/core/src/main/java/org/ehcache/core/spi/service/LocalPersistenceService.java @@ -17,22 +17,63 @@ package org.ehcache.core.spi.service; import org.ehcache.CachePersistenceException; -import org.ehcache.spi.persistence.PersistableResourceService; +import org.ehcache.spi.service.MaintainableService; + +import java.io.File; /** - * Service to provide persistence context to caches requiring it. - *

- * Will be used by caches with a disk store, whether or not the data should survive a program restart. - *

+ * Service that provides isolated persistence spaces to any service that requires it + * under the local root directory. */ -public interface LocalPersistenceService extends PersistableResourceService { +public interface LocalPersistenceService extends MaintainableService { /** - * Creates a new persistence context within the given space. + * Creates a logical safe directory space for the owner and returns an identifying space Id. + * + * @param owner Service owner that owns the safe space. + * @param name Identifying name for the space. * - * @param identifier space to create within - * @param name name of the context to create - * @return a {@link FileBasedPersistenceContext} + * @return Opaque Identifier that can be used to identify the safe space. + */ + SafeSpaceIdentifier createSafeSpaceIdentifier(String owner, String name); + + /** + * Creates the safe space represented by {@code safeSpaceId}, if it does not exist in the underlying physical space. + * + * @param safeSpaceId Identifier to the created logical space on which the physical space needs to be created + * @throws CachePersistenceException If the space cannot be created or found, due to system errors + */ + void createSafeSpace(SafeSpaceIdentifier safeSpaceId) throws CachePersistenceException; + + /** + * Destroys the safe space. + * + * @param safeSpaceId Safe space identifier. + * @param verbose Log more information. + */ + void destroySafeSpace(SafeSpaceIdentifier safeSpaceId, boolean verbose); + + /** + * Destroys all safe spaces provided to this owner. + * + * @param owner owner of safe spaces. + */ + void destroyAll(String owner); + + /** + * Identifier to the logical safe space */ - FileBasedPersistenceContext createPersistenceContextWithin(PersistenceSpaceIdentifier identifier, String name) throws CachePersistenceException; -} + interface SafeSpaceIdentifier { + /** + * Represents the root directory of the given logical safe space. + *

+ * Note that the directory represented by {@code File} may or may not be created in the physical space. + * The existence of the physical space depends on whether the {@code createSafeSpace} method was invoked + * for the space at some time in the past or not. + *

+ * + * @return Root directory of the safe space. + */ + File getRoot(); + } +} \ No newline at end of file diff --git a/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java b/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java index 2a05d10af5..98625450a0 100644 --- a/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java +++ b/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java @@ -29,6 +29,7 @@ import org.ehcache.config.ResourcePools; import org.ehcache.config.ResourceType; import org.ehcache.core.internal.store.StoreConfigurationImpl; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.core.spi.store.heap.SizeOfEngine; import org.ehcache.impl.events.CacheEventDispatcherImpl; import org.ehcache.core.internal.store.StoreSupport; @@ -58,7 +59,6 @@ import org.ehcache.spi.serialization.SerializationProvider; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.serialization.UnsupportedTypeException; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.core.spi.store.heap.SizeOfEngineProvider; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; @@ -210,20 +210,20 @@ T build(ServiceLocator serviceLocator) throws IllegalStateException { if (id == null) { throw new IllegalStateException("Persistent user managed caches must have an id set"); } - final LocalPersistenceService persistenceService = serviceLocator.getService(LocalPersistenceService.class); + final DiskResourceService diskResourceService = serviceLocator.getService(DiskResourceService.class); if (!resourcePools.getPoolForResource(ResourceType.Core.DISK).isPersistent()) { try { - persistenceService.destroy(id); + diskResourceService.destroy(id); } catch (CachePersistenceException cpex) { throw new RuntimeException("Unable to clean-up persistence space for non-restartable cache " + id, cpex); } } try { - final PersistableResourceService.PersistenceSpaceIdentifier identifier = persistenceService.getPersistenceSpaceIdentifier(id, cacheConfig); + final PersistableResourceService.PersistenceSpaceIdentifier identifier = diskResourceService.getPersistenceSpaceIdentifier(id, cacheConfig); lifeCycledList.add(new LifeCycledAdapter() { @Override public void close() throws Exception { - persistenceService.releasePersistenceSpaceIdentifier(identifier); + diskResourceService.releasePersistenceSpaceIdentifier(identifier); } }); serviceConfigsList.add(identifier); @@ -304,13 +304,13 @@ public void close() throws Exception { eventDispatcher.setStoreEventSource(store.getStoreEventSource()); if (persistent) { - LocalPersistenceService persistenceService = serviceLocator - .getService(LocalPersistenceService.class); - if (persistenceService == null) { + DiskResourceService diskResourceService = serviceLocator + .getService(DiskResourceService.class); + if (diskResourceService == null) { throw new IllegalStateException("No LocalPersistenceService could be found - did you configure one?"); } - PersistentUserManagedEhcache cache = new PersistentUserManagedEhcache(cacheConfig, store, persistenceService, cacheLoaderWriter, eventDispatcher, id); + PersistentUserManagedEhcache cache = new PersistentUserManagedEhcache(cacheConfig, store, diskResourceService, cacheLoaderWriter, eventDispatcher, id); registerListeners(cache, serviceLocator, lifeCycledList); for (LifeCycled lifeCycled : lifeCycledList) { cache.addHook(lifeCycled); diff --git a/impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultDiskResourceServiceFactory.java b/impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultDiskResourceServiceFactory.java new file mode 100644 index 0000000000..0448e83ee0 --- /dev/null +++ b/impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultDiskResourceServiceFactory.java @@ -0,0 +1,35 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.persistence; + +import org.ehcache.core.spi.service.DiskResourceService; +import org.ehcache.core.spi.service.ServiceFactory; +import org.ehcache.impl.persistence.DefaultDiskResourceService; +import org.ehcache.spi.service.ServiceCreationConfiguration; + +public class DefaultDiskResourceServiceFactory implements ServiceFactory { + + @Override + public DefaultDiskResourceService create(final ServiceCreationConfiguration serviceConfiguration) { + return new DefaultDiskResourceService(); + } + + @Override + public Class getServiceType() { + return DiskResourceService.class; + } +} \ No newline at end of file diff --git a/impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProvider.java b/impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProvider.java index 4631519be9..9d074f1818 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProvider.java +++ b/impl/src/main/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProvider.java @@ -16,6 +16,7 @@ package org.ehcache.impl.internal.spi.serialization; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration; import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration; import org.ehcache.impl.serialization.ByteArraySerializer; diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java index b07d050bb8..39f95ffb4e 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java @@ -22,6 +22,7 @@ import org.ehcache.config.Eviction; import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourceType; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.events.StoreEventDispatcher; @@ -46,7 +47,6 @@ import org.ehcache.spi.serialization.Serializer; import org.ehcache.core.spi.service.ExecutionService; import org.ehcache.core.spi.service.FileBasedPersistenceContext; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceDependencies; @@ -297,13 +297,13 @@ private File getMetadataFile() { return new File(fileBasedPersistenceContext.getDirectory(), "ehcache-disk-store.meta"); } - @ServiceDependencies({TimeSourceService.class, SerializationProvider.class, ExecutionService.class}) + @ServiceDependencies({TimeSourceService.class, SerializationProvider.class, ExecutionService.class, DiskResourceService.class}) public static class Provider implements Store.Provider, AuthoritativeTier.Provider { private final Map, PersistenceSpaceIdentifier> createdStores = new ConcurrentWeakIdentityHashMap, PersistenceSpaceIdentifier>(); private final String defaultThreadPool; private volatile ServiceProvider serviceProvider; - private volatile LocalPersistenceService localPersistenceService; + private volatile DiskResourceService diskPersistenceService; public Provider() { this(null); @@ -341,11 +341,6 @@ private OffHeapDiskStore createStoreInternal(Configuration st } MemoryUnit unit = (MemoryUnit)diskPool.getUnit(); - this.localPersistenceService = serviceProvider.getService(LocalPersistenceService.class); - if (localPersistenceService == null) { - throw new IllegalStateException("No LocalPersistenceService could be found - did you configure it at the CacheManager level?"); - } - String threadPoolAlias; int writerConcurrency; OffHeapDiskStoreConfiguration config = findSingletonAmongst(OffHeapDiskStoreConfiguration.class, (Object[]) serviceConfigs); @@ -357,8 +352,11 @@ private OffHeapDiskStore createStoreInternal(Configuration st writerConcurrency = config.getWriterConcurrency(); } PersistenceSpaceIdentifier space = findSingletonAmongst(PersistenceSpaceIdentifier.class, (Object[]) serviceConfigs); + if (space == null) { + throw new IllegalStateException("No LocalPersistenceService could be found - did you configure it at the CacheManager level?"); + } try { - FileBasedPersistenceContext persistenceContext = localPersistenceService.createPersistenceContextWithin(space , "offheap-disk-store"); + FileBasedPersistenceContext persistenceContext = diskPersistenceService.createPersistenceContextWithin(space , "offheap-disk-store"); OffHeapDiskStore offHeapStore = new OffHeapDiskStore(persistenceContext, executionService, threadPoolAlias, writerConcurrency, @@ -411,7 +409,7 @@ public void initStore(Store resource) { if (keySerializer instanceof StatefulSerializer) { StateRepository stateRepository = null; try { - stateRepository = localPersistenceService.getStateRepositoryWithin(identifier, "key-serializer"); + stateRepository = diskPersistenceService.getStateRepositoryWithin(identifier, "key-serializer"); } catch (CachePersistenceException e) { throw new RuntimeException(e); } @@ -421,7 +419,7 @@ public void initStore(Store resource) { if (valueSerializer instanceof StatefulSerializer) { StateRepository stateRepository = null; try { - stateRepository = localPersistenceService.getStateRepositoryWithin(identifier, "value-serializer"); + stateRepository = diskPersistenceService.getStateRepositoryWithin(identifier, "value-serializer"); } catch (CachePersistenceException e) { throw new RuntimeException(e); } @@ -438,12 +436,17 @@ static void init(final OffHeapDiskStore resource) { @Override public void start(ServiceProvider serviceProvider) { this.serviceProvider = serviceProvider; + diskPersistenceService = serviceProvider.getService(DiskResourceService.class); + if (diskPersistenceService == null) { + throw new IllegalStateException("Unable to find file based persistence service"); + } } @Override public void stop() { this.serviceProvider = null; createdStores.clear(); + diskPersistenceService = null; } @Override diff --git a/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java b/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java new file mode 100644 index 0000000000..596972b314 --- /dev/null +++ b/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java @@ -0,0 +1,274 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.persistence; + +import org.ehcache.CachePersistenceException; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.ResourceType; +import org.ehcache.core.spi.service.DiskResourceService; +import org.ehcache.core.spi.service.FileBasedPersistenceContext; +import org.ehcache.core.spi.service.LocalPersistenceService; +import org.ehcache.core.spi.service.LocalPersistenceService.SafeSpaceIdentifier; +import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; +import org.ehcache.spi.persistence.StateRepository; +import org.ehcache.spi.service.MaintainableService; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.util.Map; +import java.util.concurrent.ConcurrentMap; + +/** + * Default implementation of the {@link DiskResourceService} which can be used explicitly when + * {@link org.ehcache.PersistentUserManagedCache persistent user managed caches} are desired. + */ +public class DefaultDiskResourceService implements DiskResourceService { + private static final Logger LOGGER = LoggerFactory.getLogger(DefaultDiskResourceService.class); + private static final String PERSISTENCE_SPACE_OWNER = "file"; + + private final ConcurrentMap knownPersistenceSpaces; + private volatile LocalPersistenceService persistenceService; + + public DefaultDiskResourceService() { + this.knownPersistenceSpaces = new ConcurrentHashMap(); + } + + /** + * {@inheritDoc} + */ + @Override + public void start(final ServiceProvider serviceProvider) { + persistenceService = serviceProvider.getService(LocalPersistenceService.class); + } + + /** + * {@inheritDoc} + */ + @Override + public void startForMaintenance(ServiceProvider serviceProvider) { + persistenceService = serviceProvider.getService(LocalPersistenceService.class); + } + + /** + * {@inheritDoc} + */ + @Override + public void stop() { + persistenceService = null; + } + + /** + * {@inheritDoc} + */ + @Override + public boolean handlesResourceType(ResourceType resourceType) { + return persistenceService != null && ResourceType.Core.DISK.equals(resourceType); + } + + /** + * {@inheritDoc} + */ + @Override + public PersistenceSpaceIdentifier getPersistenceSpaceIdentifier(String name, CacheConfiguration config) throws CachePersistenceException { + if (persistenceService == null) { + return null; + } + boolean persistent = config.getResourcePools().getPoolForResource(ResourceType.Core.DISK).isPersistent(); + while (true) { + PersistenceSpace persistenceSpace = knownPersistenceSpaces.get(name); + if (persistenceSpace != null) { + return persistenceSpace.identifier; + } + PersistenceSpace newSpace = createSpace(name, persistent); + if (newSpace != null) { + return newSpace.identifier; + } + } + } + + @Override + public void releasePersistenceSpaceIdentifier(PersistenceSpaceIdentifier identifier) throws CachePersistenceException { + String name = null; + for (Map.Entry entry : knownPersistenceSpaces.entrySet()) { + if (entry.getValue().identifier.equals(identifier)) { + name = entry.getKey(); + } + } + if (name == null) { + throw new CachePersistenceException("Unknown space " + identifier); + } + PersistenceSpace persistenceSpace = knownPersistenceSpaces.remove(name); + if (persistenceSpace != null) { + for (FileBasedStateRepository stateRepository : persistenceSpace.stateRepositories.values()) { + try { + stateRepository.close(); + } catch (IOException e) { + LOGGER.warn("StateRepository close failed - destroying persistence space {} to prevent corruption", identifier, e); + persistenceService.destroySafeSpace(((DefaultPersistenceSpaceIdentifier)identifier).persistentSpaceId, true); + } + } + } + } + + private PersistenceSpace createSpace(String name, boolean persistent) throws CachePersistenceException { + DefaultPersistenceSpaceIdentifier persistenceSpaceIdentifier = + new DefaultPersistenceSpaceIdentifier(persistenceService.createSafeSpaceIdentifier(PERSISTENCE_SPACE_OWNER, name)); + PersistenceSpace persistenceSpace = new PersistenceSpace(persistenceSpaceIdentifier); + if (knownPersistenceSpaces.putIfAbsent(name, persistenceSpace) == null) { + boolean created = false; + try { + if (!persistent) { + persistenceService.destroySafeSpace(persistenceSpaceIdentifier.persistentSpaceId, true); + } + persistenceService.createSafeSpace(persistenceSpaceIdentifier.persistentSpaceId); + created = true; + } finally { + if (!created) { + // this happens only if an exception is thrown..clean up for any throwable.. + knownPersistenceSpaces.remove(name, persistenceSpace); + } + } + return persistenceSpace; + } else { + return null; + } + } + + /** + * {@inheritDoc} + */ + @Override + public void destroy(String name) throws CachePersistenceException { + if (persistenceService == null) { + return; + } + PersistenceSpace space = knownPersistenceSpaces.remove(name); + SafeSpaceIdentifier identifier = (space == null) ? + persistenceService.createSafeSpaceIdentifier(PERSISTENCE_SPACE_OWNER, name) : space.identifier.persistentSpaceId; + persistenceService.destroySafeSpace(identifier, true); + } + + /** + * {@inheritDoc} + */ + @Override + public void destroyAll() { + if (persistenceService == null) { + return; + } + persistenceService.destroyAll(PERSISTENCE_SPACE_OWNER); + } + + /** + * {@inheritDoc} + */ + @Override + public StateRepository getStateRepositoryWithin(PersistenceSpaceIdentifier identifier, String name) + throws CachePersistenceException { + PersistenceSpace persistenceSpace = getPersistenceSpace(identifier); + if (persistenceSpace != null) { + FileBasedStateRepository stateRepository = new FileBasedStateRepository( + FileUtils.createSubDirectory(persistenceSpace.identifier.persistentSpaceId.getRoot(), name)); + FileBasedStateRepository previous = persistenceSpace.stateRepositories.putIfAbsent(name, stateRepository); + if (previous != null) { + return previous; + } else { + return stateRepository; + } + } + throw new CachePersistenceException("Unknown space " + identifier); + } + + private PersistenceSpace getPersistenceSpace(PersistenceSpaceIdentifier identifier) { + for (PersistenceSpace persistenceSpace : knownPersistenceSpaces.values()) { + if (persistenceSpace.identifier.equals(identifier)) { + return persistenceSpace; + } + } + return null; + } + + /** + * {@inheritDoc} + */ + @Override + public FileBasedPersistenceContext createPersistenceContextWithin(PersistenceSpaceIdentifier identifier, String name) + throws CachePersistenceException { + if (containsSpace(identifier)) { + return new DefaultFileBasedPersistenceContext( + FileUtils.createSubDirectory(((DefaultPersistenceSpaceIdentifier)identifier).persistentSpaceId.getRoot(), name)); + } else { + throw new CachePersistenceException("Unknown space: " + identifier); + } + } + + private boolean containsSpace(PersistenceSpaceIdentifier identifier) { + for (PersistenceSpace persistenceSpace : knownPersistenceSpaces.values()) { + if (persistenceSpace.identifier.equals(identifier)) { + return true; + } + } + return false; + } + + private static class PersistenceSpace { + final DefaultPersistenceSpaceIdentifier identifier; + final ConcurrentMap stateRepositories = new ConcurrentHashMap(); + + private PersistenceSpace(DefaultPersistenceSpaceIdentifier identifier) { + this.identifier = identifier; + } + } + + private static class DefaultPersistenceSpaceIdentifier implements PersistenceSpaceIdentifier { + final SafeSpaceIdentifier persistentSpaceId; + + private DefaultPersistenceSpaceIdentifier(SafeSpaceIdentifier persistentSpaceId) { + this.persistentSpaceId = persistentSpaceId; + } + + @Override + public Class getServiceType() { + return DiskResourceService.class; + } + + @Override + public String toString() { + return persistentSpaceId.toString(); + } + + // no need to override equals and hashcode as references are private and created in a protected fashion + // within this class. So two space identifiers are equal iff their references are equal. + } + + private static class DefaultFileBasedPersistenceContext implements FileBasedPersistenceContext { + private final File directory; + + private DefaultFileBasedPersistenceContext(File directory) { + this.directory = directory; + } + + @Override + public File getDirectory() { + return directory; + } + } +} \ No newline at end of file diff --git a/impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java b/impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java index 4f02e1f132..666293a92c 100644 --- a/impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java +++ b/impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java @@ -16,15 +16,9 @@ package org.ehcache.impl.persistence; -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import org.ehcache.CachePersistenceException; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.ResourceType; -import org.ehcache.core.spi.service.FileBasedPersistenceContext; import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; -import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; -import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.service.MaintainableService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceProvider; @@ -36,63 +30,28 @@ import java.io.IOException; import java.io.RandomAccessFile; import java.nio.channels.FileLock; -import java.nio.charset.Charset; -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.util.ArrayDeque; -import java.util.Deque; -import java.util.HashSet; -import java.util.Locale; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentMap; -import static java.lang.Integer.toHexString; -import static java.nio.charset.Charset.forName; +import static org.ehcache.impl.persistence.FileUtils.createLocationIfRequiredAndVerify; +import static org.ehcache.impl.persistence.FileUtils.recursiveDeleteDirectoryContent; +import static org.ehcache.impl.persistence.FileUtils.safeIdentifier; +import static org.ehcache.impl.persistence.FileUtils.tryRecursiveDelete; +import static org.ehcache.impl.persistence.FileUtils.validateName; /** - * Default implementation of the {@link LocalPersistenceService} which can be used explicitly when - * {@link org.ehcache.PersistentUserManagedCache persistent user managed caches} are desired. + * Implements the local persistence service that provides individual sub-spaces for different + * services. */ public class DefaultLocalPersistenceService implements LocalPersistenceService { - private static final Charset UTF8 = forName("UTF8"); - private static final int DEL = 0x7F; - private static final char ESCAPE = '%'; - private static final Set ILLEGALS = new HashSet(); - static { - ILLEGALS.add('/'); - ILLEGALS.add('\\'); - ILLEGALS.add('<'); - ILLEGALS.add('>'); - ILLEGALS.add(':'); - ILLEGALS.add('"'); - ILLEGALS.add('|'); - ILLEGALS.add('?'); - ILLEGALS.add('*'); - ILLEGALS.add('.'); - } + private static final Logger LOGGER = LoggerFactory.getLogger(DefaultLocalPersistenceService.class); - private final ConcurrentMap knownPersistenceSpaces = new ConcurrentHashMap(); private final File rootDirectory; private final File lockFile; - private FileLock lock; + private FileLock lock; private RandomAccessFile rw; - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultLocalPersistenceService.class); - private boolean started; - /** - * Tells if the service is currently started - * - * @return if the service is started - */ - public boolean isStarted() { - return started; - } - /** * Creates a new service instance using the provided configuration. * @@ -115,41 +74,6 @@ public synchronized void start(final ServiceProvider serviceProvider) { internalStart(); } - /** - * {@inheritDoc} - */ - @Override - public synchronized void startForMaintenance(ServiceProvider serviceProvider) { - internalStart(); - } - - /** - * Default scope for testing - */ - void internalStart() { - if (!started) { - createLocationIfRequiredAndVerify(rootDirectory); - try { - rw = new RandomAccessFile(lockFile, "rw"); - } catch (FileNotFoundException e) { - // should not happen normally since we checked that everything is fine right above - throw new RuntimeException(e); - } - try { - lock = rw.getChannel().lock(); - } catch (Exception e) { - try { - rw.close(); - } catch (IOException e1) { - // ignore silently - } - throw new RuntimeException("Couldn't lock rootDir: " + rootDirectory.getAbsolutePath(), e); - } - started = true; - LOGGER.debug("RootDirectory Locked"); - } - } - /** * {@inheritDoc} */ @@ -173,378 +97,145 @@ public synchronized void stop() { } } - private static void createLocationIfRequiredAndVerify(final File rootDirectory) { - if(!rootDirectory.exists()) { - if(!rootDirectory.mkdirs()) { - throw new IllegalArgumentException("Directory couldn't be created: " + rootDirectory.getAbsolutePath()); - } - } else if(!rootDirectory.isDirectory()) { - throw new IllegalArgumentException("Location is not a directory: " + rootDirectory.getAbsolutePath()); - } - - if(!rootDirectory.canWrite()) { - throw new IllegalArgumentException("Location isn't writable: " + rootDirectory.getAbsolutePath()); - } + File getLockFile() { + return lockFile; } /** * {@inheritDoc} */ @Override - public boolean handlesResourceType(ResourceType resourceType) { - return ResourceType.Core.DISK.equals(resourceType); - } + public SafeSpaceIdentifier createSafeSpaceIdentifier(String owner, String identifier) { + validateName(owner); + SafeSpace ss = createSafeSpaceLogical(owner, identifier); - /** - * {@inheritDoc} - */ - @Override - public PersistenceSpaceIdentifier getPersistenceSpaceIdentifier(String name, CacheConfiguration config) throws CachePersistenceException { - boolean persistent = config.getResourcePools().getPoolForResource(ResourceType.Core.DISK).isPersistent(); - while (true) { - PersistenceSpace persistenceSpace = knownPersistenceSpaces.get(name); - if (persistenceSpace != null) { - return persistenceSpace.identifier; - } - PersistenceSpace newSpace = createSpace(name, persistent); - if (newSpace != null) { - return newSpace.identifier; + for (File parent = ss.directory.getParentFile(); parent != null; parent = parent.getParentFile()) { + if (rootDirectory.equals(parent)) { + return new DefaultSafeSpaceIdentifier(ss); } } - } - @Override - public void releasePersistenceSpaceIdentifier(PersistenceSpaceIdentifier identifier) throws CachePersistenceException { - String name = null; - for (Map.Entry entry : knownPersistenceSpaces.entrySet()) { - if (entry.getValue().identifier.equals(identifier)) { - name = entry.getKey(); - } - } - if (name == null) { - throw new CachePersistenceException("Unknown space " + identifier); - } - PersistenceSpace persistenceSpace = knownPersistenceSpaces.remove(name); - if (persistenceSpace != null) { - for (FileBasedStateRepository stateRepository : persistenceSpace.stateRepositories.values()) { - try { - stateRepository.close(); - } catch (IOException e) { - LOGGER.warn("StateRepository close failed - destroying persistence space {} to prevent corruption", identifier, e); - destroy(name, (DefaultPersistenceSpaceIdentifier) identifier, true); - } - } - } + throw new IllegalArgumentException("Attempted to access file outside the persistence path"); } - private PersistenceSpace createSpace(String name, boolean persistent) throws CachePersistenceException { - DefaultPersistenceSpaceIdentifier persistenceSpaceIdentifier = new DefaultPersistenceSpaceIdentifier(getDirectoryFor(name)); - PersistenceSpace persistenceSpace = new PersistenceSpace(persistenceSpaceIdentifier); - if (knownPersistenceSpaces.putIfAbsent(name, persistenceSpace) == null) { - try { - if (!persistent) { - destroy(name, persistenceSpaceIdentifier, true); - } - create(persistenceSpaceIdentifier.getDirectory()); - } catch (IOException e) { - knownPersistenceSpaces.remove(name, persistenceSpace); - throw new CachePersistenceException("Unable to create persistence space for " + name, e); - } - return persistenceSpace; - } else { - return null; - } - } /** * {@inheritDoc} */ @Override - public synchronized void destroy(String name) throws CachePersistenceException { - boolean wasStarted = false; - if (!started) { - internalStart(); - wasStarted = true; - } - - try { - PersistenceSpace space = knownPersistenceSpaces.remove(name); - if (space == null) { - destroy(name, new DefaultPersistenceSpaceIdentifier(getDirectoryFor(name)), true); - } else { - destroy(name, space.identifier, true); - } - } finally { - if (wasStarted) { - stop(); - } + public void createSafeSpace(SafeSpaceIdentifier safeSpaceId) throws CachePersistenceException { + if (safeSpaceId == null || !(safeSpaceId instanceof DefaultSafeSpaceIdentifier)) { + // this cannot happen..if identifier created before creating physical space.. + throw new AssertionError("Invalid safe space identifier. Identifier not created"); } + SafeSpace ss = ((DefaultSafeSpaceIdentifier) safeSpaceId).safeSpace; + FileUtils.create(ss.directory.getParentFile()); + FileUtils.create(ss.directory); } /** * {@inheritDoc} */ @Override - public synchronized void destroyAll() { - if (!started) { - throw new IllegalStateException("Service must be started"); - } - if(recursiveDeleteDirectoryContent(rootDirectory)){ - LOGGER.debug("Destroyed all file based persistence contexts"); - } else { - LOGGER.warn("Could not delete all file based persistence contexts"); + public void destroySafeSpace(SafeSpaceIdentifier safeSpaceId, boolean verbose) { + if (safeSpaceId == null || !(safeSpaceId instanceof DefaultSafeSpaceIdentifier)) { + // this cannot happen..if identifier created before creating/destroying physical space.. + throw new AssertionError("Invalid safe space identifier. Identifier not created"); } + SafeSpace ss = ((DefaultSafeSpaceIdentifier) safeSpaceId).safeSpace; + destroy(ss, verbose); } /** * {@inheritDoc} */ - @Override - public StateRepository getStateRepositoryWithin(PersistenceSpaceIdentifier identifier, String name) throws CachePersistenceException { - PersistenceSpace persistenceSpace = getPersistenceSpace(identifier); - if (persistenceSpace != null) { - validateName(name); - File directory = new File(((DefaultPersistenceSpaceIdentifier) identifier).getDirectory(), name); - if (!directory.mkdirs()) { - if (!directory.exists()) { - throw new CachePersistenceException("Unable to create directory " + directory); - } - } - FileBasedStateRepository stateRepository = new FileBasedStateRepository(directory); - FileBasedStateRepository previous = persistenceSpace.stateRepositories.putIfAbsent(name, stateRepository); - if (previous != null) { - return previous; - } else { - return stateRepository; + public void destroyAll(String owner) { + File ownerDirectory = new File(rootDirectory, owner); + boolean cleared = true; + if (ownerDirectory.exists() && ownerDirectory.isDirectory()) { + cleared = false; + if (recursiveDeleteDirectoryContent(ownerDirectory)) { + LOGGER.debug("Destroyed all file based persistence contexts owned by {}", owner); + cleared = ownerDirectory.delete(); } } - throw new CachePersistenceException("Unknown space " + identifier); - } - - private PersistenceSpace getPersistenceSpace(PersistenceSpaceIdentifier identifier) { - for (PersistenceSpace persistenceSpace : knownPersistenceSpaces.values()) { - if (persistenceSpace.identifier.equals(identifier)) { - return persistenceSpace; - } + if (!cleared) { + LOGGER.warn("Could not delete all file based persistence contexts owned by {}", owner); } - return null; } - /** - * {@inheritDoc} - */ @Override - public FileBasedPersistenceContext createPersistenceContextWithin(PersistenceSpaceIdentifier identifier, String name) throws CachePersistenceException { - if (containsSpace(identifier)) { - validateName(name); - File directory = new File(((DefaultPersistenceSpaceIdentifier) identifier).getDirectory(), name); - try { - create(directory); - } catch (IOException ex) { - throw new CachePersistenceException("Unable to create persistence context for " + name + " in " + identifier); - } - return new DefaultFileBasedPersistenceContext(directory); - } else { - throw new CachePersistenceException("Unknown space: " + identifier); - } - } - - private void validateName(String name) { - if (!name.matches("[a-zA-Z0-9\\-_]+")) { - throw new IllegalArgumentException("Name is invalid for persistence context: " + name); - } + public synchronized void startForMaintenance(ServiceProvider serviceProvider) { + internalStart(); } - private boolean containsSpace(PersistenceSpaceIdentifier identifier) { - for (PersistenceSpace persistenceSpace : knownPersistenceSpaces.values()) { - if (persistenceSpace.identifier.equals(identifier)) { - return true; + private void internalStart() { + if (!started) { + createLocationIfRequiredAndVerify(rootDirectory); + try { + rw = new RandomAccessFile(lockFile, "rw"); + } catch (FileNotFoundException e) { + // should not happen normally since we checked that everything is fine right above + throw new RuntimeException(e); } - } - return false; - } - - File getLockFile() { - return lockFile; - } - - private File getDirectoryFor(String identifier) { - File directory = new File(rootDirectory, safeIdentifier(identifier)); - - for (File parent = directory.getParentFile(); parent != null; parent = parent.getParentFile()) { - if (rootDirectory.equals(parent)) { - return directory; + try { + lock = rw.getChannel().lock(); + } catch (Exception e) { + try { + rw.close(); + } catch (IOException e1) { + // ignore silently + } + throw new RuntimeException("Couldn't lock rootDir: " + rootDirectory.getAbsolutePath(), e); } - } - - throw new IllegalArgumentException("Attempted to access file outside the persistence path"); - } - - private static void create(File directory) throws IOException, CachePersistenceException { - if (directory.isDirectory()) { - LOGGER.debug("Reusing {}", directory.getAbsolutePath()); - } else if (directory.mkdir()) { - LOGGER.debug("Created {}", directory.getAbsolutePath()); - } else { - throw new CachePersistenceException("Unable to create or reuse directory: " + directory.getAbsolutePath()); + started = true; + LOGGER.debug("RootDirectory Locked"); } } - private static void destroy(String identifier, DefaultPersistenceSpaceIdentifier fileBasedPersistenceContext, boolean verbose) { + private void destroy(SafeSpace ss, boolean verbose) { if (verbose) { - LOGGER.debug("Destroying file based persistence context for {}", identifier); + LOGGER.debug("Destroying file based persistence context for {}", ss.identifier); } - if (fileBasedPersistenceContext.getDirectory().exists() && !tryRecursiveDelete(fileBasedPersistenceContext.getDirectory())) { + if (ss.directory.exists() && !tryRecursiveDelete(ss.directory)) { if (verbose) { - LOGGER.warn("Could not delete directory for context {}", identifier); - } - } - } - - private static boolean recursiveDeleteDirectoryContent(File file) { - File[] contents = file.listFiles(); - if (contents == null) { - throw new IllegalArgumentException("File " + file.getAbsolutePath() + " is not a directory"); - } else { - boolean deleteSuccessful = true; - for (File f : contents) { - deleteSuccessful &= tryRecursiveDelete(f); - } - return deleteSuccessful; - } - } - - private static boolean recursiveDelete(File file) { - Deque toDelete = new ArrayDeque(); - toDelete.push(file); - while (!toDelete.isEmpty()) { - File target = toDelete.pop(); - File[] contents = target.listFiles(); - if (contents == null || contents.length == 0) { - if (target.exists() && !target.delete()) { - return false; - } - } else { - toDelete.push(target); - for (File f : contents) { - toDelete.push(f); - } - } - } - return true; - } - - @SuppressFBWarnings("DM_GC") - private static boolean tryRecursiveDelete(File file) { - boolean interrupted = false; - try { - for (int i = 0; i < 5; i++) { - if (recursiveDelete(file) || !isWindows()) { - return true; - } else { - System.gc(); - System.runFinalization(); - - try { - Thread.sleep(50); - } catch (InterruptedException e) { - interrupted = true; - } - } - } - } finally { - if (interrupted) { - Thread.currentThread().interrupt(); - } - } - return false; - } - - private static boolean isWindows() { - return System.getProperty("os.name").toLowerCase(Locale.ENGLISH).contains("windows"); - } - - /** - * sanitize a name for valid file or directory name - * - * @param name the name to sanitize - * @return sanitized version of name - */ - private static String safeIdentifier(String name) { - return safeIdentifier(name, true); - } - - static String safeIdentifier(String name, boolean withSha1) { - int len = name.length(); - StringBuilder sb = new StringBuilder(len); - for (int i = 0; i < len; i++) { - char c = name.charAt(i); - if (c <= ' ' || c >= DEL || ILLEGALS.contains(c) || c == ESCAPE) { - sb.append(ESCAPE); - sb.append(String.format("%04x", (int) c)); - } else { - sb.append(c); + LOGGER.warn("Could not delete directory for context {}", ss.identifier); } } - if (withSha1) { - sb.append("_").append(sha1(name)); - } - return sb.toString(); } - private static String sha1(String input) { - StringBuilder sb = new StringBuilder(); - for (byte b : getSha1Digest().digest(input.getBytes(UTF8))) { - sb.append(toHexString((b & 0xf0) >>> 4)); - sb.append(toHexString((b & 0xf))); - } - return sb.toString(); - } - private static MessageDigest getSha1Digest() { - try { - return MessageDigest.getInstance("SHA-1"); - } catch (NoSuchAlgorithmException e) { - throw new AssertionError("All JDKs must have SHA-1"); - } + private SafeSpace createSafeSpaceLogical(String owner, String identifier) { + File ownerDirectory = new File(rootDirectory, owner); + File directory = new File(ownerDirectory, safeIdentifier(identifier)); + return new SafeSpace(identifier, directory); } - private static class PersistenceSpace { - final DefaultPersistenceSpaceIdentifier identifier; - final ConcurrentMap stateRepositories = new ConcurrentHashMap(); + private static final class SafeSpace { + private final String identifier; + private final File directory; - private PersistenceSpace(DefaultPersistenceSpaceIdentifier identifier) { + private SafeSpace(String identifier, File directory) { + this.directory = directory; this.identifier = identifier; } } - private static abstract class FileHolder { - final File directory; - FileHolder(File directory) { - this.directory = directory; - } + private static final class DefaultSafeSpaceIdentifier implements SafeSpaceIdentifier { + private final SafeSpace safeSpace; - public File getDirectory() { - return directory; - } - - } - private static class DefaultPersistenceSpaceIdentifier extends FileHolder implements PersistenceSpaceIdentifier { - - DefaultPersistenceSpaceIdentifier(File directory) { - super(directory); + private DefaultSafeSpaceIdentifier(SafeSpace safeSpace) { + this.safeSpace = safeSpace; } @Override - public Class getServiceType() { - return LocalPersistenceService.class; + public String toString() { + return safeSpace.identifier; } - } - private static class DefaultFileBasedPersistenceContext extends FileHolder implements FileBasedPersistenceContext { - - DefaultFileBasedPersistenceContext(File directory) { - super(directory); + @Override + public File getRoot() { + return safeSpace.directory; } } } diff --git a/impl/src/main/java/org/ehcache/impl/persistence/FileBasedStateRepository.java b/impl/src/main/java/org/ehcache/impl/persistence/FileBasedStateRepository.java index eca0aeee6d..df9654807b 100644 --- a/impl/src/main/java/org/ehcache/impl/persistence/FileBasedStateRepository.java +++ b/impl/src/main/java/org/ehcache/impl/persistence/FileBasedStateRepository.java @@ -35,7 +35,7 @@ import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicInteger; -import static org.ehcache.impl.persistence.DefaultLocalPersistenceService.safeIdentifier; +import static org.ehcache.impl.persistence.FileUtils.safeIdentifier; /** * FileBasedStateRepository diff --git a/impl/src/main/java/org/ehcache/impl/persistence/FileUtils.java b/impl/src/main/java/org/ehcache/impl/persistence/FileUtils.java new file mode 100644 index 0000000000..cfad313088 --- /dev/null +++ b/impl/src/main/java/org/ehcache/impl/persistence/FileUtils.java @@ -0,0 +1,209 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.persistence; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; + +import org.ehcache.CachePersistenceException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.nio.charset.Charset; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.ArrayDeque; +import java.util.Deque; +import java.util.HashSet; +import java.util.Locale; +import java.util.Set; + +import static java.lang.Integer.toHexString; +import static java.nio.charset.Charset.forName; + +/** + * A bunch of utility functions, mainly used by {@link DefaultLocalPersistenceService} and + * {@link FileBasedStateRepository} within this class. + */ +final class FileUtils { + private static final Logger LOGGER = LoggerFactory.getLogger(FileUtils.class); + private static final Charset UTF8 = forName("UTF8"); + private static final int DEL = 0x7F; + private static final char ESCAPE = '%'; + + private static final Set ILLEGALS = new HashSet(); + static { + ILLEGALS.add('/'); + ILLEGALS.add('\\'); + ILLEGALS.add('<'); + ILLEGALS.add('>'); + ILLEGALS.add(':'); + ILLEGALS.add('"'); + ILLEGALS.add('|'); + ILLEGALS.add('?'); + ILLEGALS.add('*'); + ILLEGALS.add('.'); + } + + static void createLocationIfRequiredAndVerify(final File rootDirectory) { + if(!rootDirectory.exists()) { + if(!rootDirectory.mkdirs()) { + throw new IllegalArgumentException("Directory couldn't be created: " + rootDirectory.getAbsolutePath()); + } + } else if(!rootDirectory.isDirectory()) { + throw new IllegalArgumentException("Location is not a directory: " + rootDirectory.getAbsolutePath()); + } + + if(!rootDirectory.canWrite()) { + throw new IllegalArgumentException("Location isn't writable: " + rootDirectory.getAbsolutePath()); + } + } + + static File createSubDirectory(File mainDirectory, String name) throws CachePersistenceException { + validateName(name); + File subDirectory = new File(mainDirectory, name); + create(subDirectory); + return subDirectory; + } + + static void validateName(String name) { + if (!name.matches("[a-zA-Z0-9\\-_]+")) { + throw new IllegalArgumentException("Name is invalid for persistence context: " + name); + } + } + + static void create(File directory) throws CachePersistenceException { + if (directory.isDirectory()) { + LOGGER.debug("Reusing {}", directory.getAbsolutePath()); + } else if (directory.mkdir()) { + LOGGER.debug("Created {}", directory.getAbsolutePath()); + } else if (directory.isDirectory()) { + // if create directory fails, check once more if it is due to concurrent creation. + LOGGER.debug("Reusing {}", directory.getAbsolutePath()); + } else { + throw new CachePersistenceException("Unable to create or reuse directory: " + directory.getAbsolutePath()); + } + } + + static boolean recursiveDeleteDirectoryContent(File file) { + File[] contents = file.listFiles(); + if (contents == null) { + throw new IllegalArgumentException("File " + file.getAbsolutePath() + " is not a directory"); + } else { + boolean deleteSuccessful = true; + for (File f : contents) { + deleteSuccessful &= tryRecursiveDelete(f); + } + return deleteSuccessful; + } + } + + private static boolean recursiveDelete(File file) { + Deque toDelete = new ArrayDeque(); + toDelete.push(file); + while (!toDelete.isEmpty()) { + File target = toDelete.pop(); + File[] contents = target.listFiles(); + if (contents == null || contents.length == 0) { + if (target.exists() && !target.delete()) { + return false; + } + } else { + toDelete.push(target); + for (File f : contents) { + toDelete.push(f); + } + } + } + return true; + } + + @SuppressFBWarnings("DM_GC") + static boolean tryRecursiveDelete(File file) { + boolean interrupted = false; + try { + for (int i = 0; i < 5; i++) { + if (recursiveDelete(file) || !isWindows()) { + return true; + } else { + System.gc(); + System.runFinalization(); + + try { + Thread.sleep(50); + } catch (InterruptedException e) { + interrupted = true; + } + } + } + } finally { + if (interrupted) { + Thread.currentThread().interrupt(); + } + } + return false; + } + + private static boolean isWindows() { + return System.getProperty("os.name").toLowerCase(Locale.ENGLISH).contains("windows"); + } + + /** + * sanitize a name for valid file or directory name + * + * @param name the name to sanitize + * @return sanitized version of name + */ + static String safeIdentifier(String name) { + return safeIdentifier(name, true); + } + + static String safeIdentifier(String name, boolean withSha1) { + int len = name.length(); + StringBuilder sb = new StringBuilder(len); + for (int i = 0; i < len; i++) { + char c = name.charAt(i); + if (c <= ' ' || c >= DEL || ILLEGALS.contains(c) || c == ESCAPE) { + sb.append(ESCAPE); + sb.append(String.format("%04x", (int) c)); + } else { + sb.append(c); + } + } + if (withSha1) { + sb.append("_").append(sha1(name)); + } + return sb.toString(); + } + + private static String sha1(String input) { + StringBuilder sb = new StringBuilder(); + for (byte b : getSha1Digest().digest(input.getBytes(UTF8))) { + sb.append(toHexString((b & 0xf0) >>> 4)); + sb.append(toHexString((b & 0xf))); + } + return sb.toString(); + } + + private static MessageDigest getSha1Digest() { + try { + return MessageDigest.getInstance("SHA-1"); + } catch (NoSuchAlgorithmException e) { + throw new AssertionError("All JDKs must have SHA-1"); + } + } +} \ No newline at end of file diff --git a/impl/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory b/impl/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory index 9020ac13a5..7a0738d79a 100644 --- a/impl/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory +++ b/impl/src/main/resources/META-INF/services/org.ehcache.core.spi.service.ServiceFactory @@ -10,6 +10,7 @@ org.ehcache.impl.internal.spi.loaderwriter.DefaultCacheLoaderWriterProviderFacto org.ehcache.impl.internal.spi.event.DefaultCacheEventListenerProviderFactory org.ehcache.impl.internal.executor.DefaultExecutionServiceFactory org.ehcache.impl.internal.persistence.DefaultLocalPersistenceServiceFactory +org.ehcache.impl.internal.persistence.DefaultDiskResourceServiceFactory org.ehcache.impl.internal.loaderwriter.writebehind.WriteBehindProviderFactory org.ehcache.impl.internal.events.CacheEventNotificationListenerServiceProviderFactory org.ehcache.impl.internal.spi.copy.DefaultCopyProviderFactory diff --git a/impl/src/test/java/org/ehcache/config/builders/PersistentCacheManagerTest.java b/impl/src/test/java/org/ehcache/config/builders/PersistentCacheManagerTest.java index 9b16fff933..9c25d5e16a 100644 --- a/impl/src/test/java/org/ehcache/config/builders/PersistentCacheManagerTest.java +++ b/impl/src/test/java/org/ehcache/config/builders/PersistentCacheManagerTest.java @@ -21,26 +21,28 @@ import org.ehcache.config.units.MemoryUnit; import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; import org.junit.Before; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; import java.io.File; -import java.io.FilenameFilter; import java.io.IOException; -import static junit.framework.TestCase.assertNotNull; import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; -import static org.junit.Assert.assertNull; +import static org.ehcache.impl.internal.util.FileExistenceMatchers.fileExistOwnerClosedExpected; +import static org.ehcache.impl.internal.util.FileExistenceMatchers.fileExistsOwnerOpenExpected; +import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; /** * @author Alex Snaps */ public class PersistentCacheManagerTest { + private static final String TEST_CACHE_ALIAS = "test123"; + @Rule public ExpectedException thrown = ExpectedException.none(); @@ -52,7 +54,7 @@ public class PersistentCacheManagerTest { @Before public void setup() throws IOException { - rootDirectory = folder.newFolder("testInitializesLocalPersistenceService"); + rootDirectory = folder.newFolder("testInitializesDiskResourceService"); assertTrue(rootDirectory.delete()); builder = newCacheManagerBuilder().with(new CacheManagerPersistenceConfiguration(rootDirectory)); } @@ -74,47 +76,54 @@ public void testDestroyCache_NullAliasNotAllowed() throws CachePersistenceExcept @Test public void testDestroyCache_UnexistingCacheDoesNothing() throws CachePersistenceException { PersistentCacheManager manager = builder.build(true); - manager.destroyCache("test"); + manager.destroyCache(TEST_CACHE_ALIAS); } @Test public void testDestroyCache_Initialized_DestroyExistingCache() throws CachePersistenceException { PersistentCacheManager manager = builder - .withCache("test", + .withCache(TEST_CACHE_ALIAS, CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder().disk(10, MemoryUnit.MB, true))) .build(true); - assertNotNull(getCacheDirectory()); - manager.destroyCache("test"); - assertNull(getCacheDirectory()); + assertThat(rootDirectory, fileExistsOwnerOpenExpected(1, TEST_CACHE_ALIAS)); + manager.destroyCache(TEST_CACHE_ALIAS); + assertThat(rootDirectory, fileExistsOwnerOpenExpected(0, TEST_CACHE_ALIAS)); } + @Ignore("Ignoring as currently no support for destroying cache on a closed cache manager") @Test public void testDestroyCache_Uninitialized_DestroyExistingCache() throws CachePersistenceException { PersistentCacheManager manager = builder - .withCache("test", + .withCache(TEST_CACHE_ALIAS, CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder().disk(10, MemoryUnit.MB, true))) .build(true); - assertNotNull(getCacheDirectory()); + assertThat(rootDirectory, fileExistsOwnerOpenExpected(1, TEST_CACHE_ALIAS)); manager.close(); // pass it to uninitialized - manager.destroyCache("test"); - assertNull(getCacheDirectory()); + assertThat(rootDirectory, fileExistOwnerClosedExpected(1, TEST_CACHE_ALIAS)); + manager.destroyCache(TEST_CACHE_ALIAS); + assertThat(rootDirectory, fileExistOwnerClosedExpected(0, TEST_CACHE_ALIAS)); } - private File getCacheDirectory() { - File[] files = rootDirectory.listFiles(new FilenameFilter() { - @Override - public boolean accept(final File dir, final String name) { - return name.startsWith("test"); - } - }); - if(files == null || files.length == 0) { - return null; + @Ignore("Ignoring as currently no support for destroying cache on a closed cache manager") + @Test + public void testDestroyCache_CacheManagerUninitialized() throws CachePersistenceException { + { + PersistentCacheManager manager = builder + .withCache(TEST_CACHE_ALIAS, + CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().disk(10, MemoryUnit.MB, true))) + .build(true); + assertThat(rootDirectory, fileExistsOwnerOpenExpected(1, TEST_CACHE_ALIAS)); + manager.close(); // pass it to uninitialized + assertThat(rootDirectory, fileExistOwnerClosedExpected(1, TEST_CACHE_ALIAS)); } - if(files.length > 1) { - fail("Too many cache directories"); + { + PersistentCacheManager manager = builder.build(false); + assertThat(rootDirectory, fileExistOwnerClosedExpected(1, TEST_CACHE_ALIAS)); + manager.destroyCache(TEST_CACHE_ALIAS); + assertThat(rootDirectory, fileExistOwnerClosedExpected(0, TEST_CACHE_ALIAS)); } - return files[0]; } } diff --git a/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java b/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java index 03ba3fce80..0c18477857 100644 --- a/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java +++ b/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java @@ -28,12 +28,12 @@ import org.ehcache.config.units.MemoryUnit; import org.ehcache.docs.plugs.ListenerObject; import org.ehcache.docs.plugs.LongCopier; +import org.ehcache.impl.persistence.DefaultLocalPersistenceService; import org.ehcache.impl.serialization.LongSerializer; import org.ehcache.docs.plugs.OddKeysEvictionAdvisor; import org.ehcache.docs.plugs.SampleLoaderWriter; import org.ehcache.docs.plugs.StringCopier; import org.ehcache.impl.serialization.StringSerializer; -import org.ehcache.impl.persistence.DefaultLocalPersistenceService; import org.ehcache.core.spi.service.LocalPersistenceService; import org.junit.Test; diff --git a/impl/src/test/java/org/ehcache/impl/internal/persistence/CacheManagerDestroyRemovesPersistenceTest.java b/impl/src/test/java/org/ehcache/impl/internal/persistence/CacheManagerDestroyRemovesPersistenceTest.java index 9964a74a2e..c079366ec9 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/persistence/CacheManagerDestroyRemovesPersistenceTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/persistence/CacheManagerDestroyRemovesPersistenceTest.java @@ -16,29 +16,30 @@ package org.ehcache.impl.internal.persistence; import org.ehcache.Cache; +import org.ehcache.CachePersistenceException; import org.ehcache.PersistentCacheManager; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; -import org.ehcache.CachePersistenceException; +import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; import org.junit.Test; import java.io.File; import java.net.URISyntaxException; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.hamcrest.core.Is.is; +import static org.ehcache.impl.internal.util.FileExistenceMatchers.fileExistNoOwner; +import static org.ehcache.impl.internal.util.FileExistenceMatchers.fileExistsOwnerOpen; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; /** - * @author rism + * */ public class CacheManagerDestroyRemovesPersistenceTest { - PersistentCacheManager persistentCacheManager; + private PersistentCacheManager persistentCacheManager; @Test public void testDestroyRemovesPersistenceData () throws URISyntaxException, CachePersistenceException { @@ -50,7 +51,7 @@ public void testDestroyRemovesPersistenceData () throws URISyntaxException, Cach persistentCacheManager.close(); persistentCacheManager.destroy(); - assertThat(file.list().length, is(0)); + assertThat(file, fileExistNoOwner()); } @Test @@ -60,7 +61,7 @@ public void testDestroyCacheDestroysPersistenceContext() throws URISyntaxExcepti persistentCacheManager.destroyCache("persistent-cache"); - assertThat(file.list().length, is(1)); + assertThat(file, fileExistsOwnerOpen(0)); } @Test @@ -97,12 +98,10 @@ public void testDestroyCacheWithUnknownAlias() throws URISyntaxException, CacheP anotherPersistentCacheManager.destroyCache("persistent-cache"); - assertThat(file.list().length, is(1)); - + assertThat(file, fileExistsOwnerOpen(0)); } - - public void initCacheManager(File file) throws URISyntaxException { + private void initCacheManager(File file) throws URISyntaxException { persistentCacheManager = CacheManagerBuilder.newCacheManagerBuilder() .with(new CacheManagerPersistenceConfiguration(file)) .withCache("persistent-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, @@ -116,10 +115,11 @@ public void initCacheManager(File file) throws URISyntaxException { private void putValuesInCacheAndCloseCacheManager() { Cache preConfigured = persistentCacheManager.getCache("persistent-cache", Long.class, String.class); - preConfigured.put(1l, "foo"); + preConfigured.put(1L, "foo"); persistentCacheManager.close(); } + @SuppressWarnings("ConstantConditions") private String getStoragePath() throws URISyntaxException { return getClass().getClassLoader().getResource(".").toURI().getPath(); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/persistence/TestLocalPersistenceService.java b/impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java similarity index 66% rename from impl/src/test/java/org/ehcache/impl/internal/persistence/TestLocalPersistenceService.java rename to impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java index 156d2a8bb2..aa101e9b6d 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/persistence/TestLocalPersistenceService.java +++ b/impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java @@ -17,57 +17,67 @@ package org.ehcache.impl.internal.persistence; import java.io.File; -import java.util.Collection; + import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.ResourcePool; import org.ehcache.config.ResourceType; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; import org.ehcache.CachePersistenceException; +import org.ehcache.impl.persistence.DefaultDiskResourceService; import org.ehcache.impl.persistence.DefaultLocalPersistenceService; -import org.ehcache.spi.persistence.PersistableResourceService; import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.core.spi.service.FileBasedPersistenceContext; import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.spi.service.MaintainableService; import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceConfiguration; import org.junit.rules.ExternalResource; import org.junit.rules.TemporaryFolder; +import org.mockito.Mockito; + +import static org.mockito.Mockito.mock; /** * * @author cdennis */ -public class TestLocalPersistenceService extends ExternalResource implements LocalPersistenceService { +public class TestDiskResourceService extends ExternalResource implements DiskResourceService { private final TemporaryFolder folder; - private LocalPersistenceService persistenceService; + private LocalPersistenceService fileService; + private DiskResourceService diskResourceService; - public TestLocalPersistenceService(File folder) { + public TestDiskResourceService(File folder) { this.folder = new TemporaryFolder(folder); } - public TestLocalPersistenceService() { + public TestDiskResourceService() { this.folder = new TemporaryFolder(); } @Override protected void before() throws Throwable { folder.create(); - persistenceService = new DefaultLocalPersistenceService(new CacheManagerPersistenceConfiguration(folder.newFolder())); - persistenceService.start(null); + fileService = new DefaultLocalPersistenceService(new CacheManagerPersistenceConfiguration(folder.newFolder())); + fileService.start(null); + diskResourceService = new DefaultDiskResourceService(); + ServiceProvider sp = mock(ServiceProvider.class); + Mockito.when(sp.getService(LocalPersistenceService.class)).thenReturn(fileService); + diskResourceService.start(sp); } @Override protected void after() { - LocalPersistenceService ps = persistenceService; - persistenceService = null; + DiskResourceService ps = diskResourceService; + LocalPersistenceService ls = fileService; + diskResourceService = null; + fileService = null; try { ps.stop(); + ls.stop(); } finally { folder.delete(); } @@ -75,37 +85,37 @@ protected void after() { @Override public boolean handlesResourceType(ResourceType resourceType) { - return persistenceService.handlesResourceType(resourceType); + return diskResourceService.handlesResourceType(resourceType); } @Override public PersistenceSpaceIdentifier getPersistenceSpaceIdentifier(String name, CacheConfiguration config) throws CachePersistenceException { - return persistenceService.getPersistenceSpaceIdentifier(name, config); + return diskResourceService.getPersistenceSpaceIdentifier(name, config); } @Override public void releasePersistenceSpaceIdentifier(PersistenceSpaceIdentifier identifier) throws CachePersistenceException { - persistenceService.releasePersistenceSpaceIdentifier(identifier); + diskResourceService.releasePersistenceSpaceIdentifier(identifier); } @Override public StateRepository getStateRepositoryWithin(PersistenceSpaceIdentifier identifier, String name) throws CachePersistenceException { - return persistenceService.getStateRepositoryWithin(identifier, name); + return diskResourceService.getStateRepositoryWithin(identifier, name); } @Override public void destroy(String name) throws CachePersistenceException { - persistenceService.destroy(name); + diskResourceService.destroy(name); } @Override public FileBasedPersistenceContext createPersistenceContextWithin(PersistenceSpaceIdentifier identifier, String name) throws CachePersistenceException { - return persistenceService.createPersistenceContextWithin(identifier, name); + return diskResourceService.createPersistenceContextWithin(identifier, name); } @Override public void destroyAll() throws CachePersistenceException { - persistenceService.destroyAll(); + diskResourceService.destroyAll(); } @Override diff --git a/impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java index 737488913d..246fe91cb0 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java @@ -16,6 +16,7 @@ package org.ehcache.impl.internal.spi.serialization; import org.ehcache.CachePersistenceException; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.core.spi.service.FileBasedPersistenceContext; import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration; @@ -60,6 +61,7 @@ import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** @@ -493,8 +495,8 @@ public void testPersistentStatefulLegacyComboSerializer() throws Exception { } private PersistableResourceService.PersistenceSpaceIdentifier getPersistenceSpaceIdentifierMock() { - PersistableResourceService.PersistenceSpaceIdentifier spaceIdentifier = mock(LocalPersistenceService.PersistenceSpaceIdentifier.class); - when(spaceIdentifier.getServiceType()).thenReturn(LocalPersistenceService.class); + PersistableResourceService.PersistenceSpaceIdentifier spaceIdentifier = mock(DiskResourceService.PersistenceSpaceIdentifier.class); + when(spaceIdentifier.getServiceType()).thenReturn(DiskResourceService.class); return spaceIdentifier; } @@ -502,8 +504,9 @@ private DefaultSerializationProvider getStartedProvider() throws CachePersistenc DefaultSerializationProvider defaultProvider = new DefaultSerializationProvider(null); ServiceProvider serviceProvider = mock(ServiceProvider.class); - LocalPersistenceService persistenceService = mock(LocalPersistenceService.class); - when(persistenceService.createPersistenceContextWithin(any(PersistableResourceService.PersistenceSpaceIdentifier.class), anyString())) + DiskResourceService diskResourceService = mock(DiskResourceService.class); + StateRepository stateRepository = mock(StateRepository.class); + when(diskResourceService.createPersistenceContextWithin(any(PersistableResourceService.PersistenceSpaceIdentifier.class), anyString())) .thenReturn(new FileBasedPersistenceContext() { @Override public File getDirectory() { @@ -515,7 +518,7 @@ public File getDirectory() { } } }); - when(serviceProvider.getService(LocalPersistenceService.class)).thenReturn(persistenceService); + when(serviceProvider.getService(DiskResourceService.class)).thenReturn(diskResourceService); defaultProvider.start(serviceProvider); return defaultProvider; } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java index 5f8748166c..954c187e11 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java @@ -25,7 +25,7 @@ import org.ehcache.config.SizedResourcePool; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.internal.service.ServiceLocator; -import org.ehcache.core.spi.service.LocalPersistenceService; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.core.spi.store.Store; import org.ehcache.expiry.Expirations; import org.ehcache.expiry.Expiry; @@ -62,8 +62,7 @@ public class OffHeapDiskStoreProviderTest { public void testStatisticsAssociations() throws Exception { OffHeapDiskStore.Provider provider = new OffHeapDiskStore.Provider(); - ServiceLocator serviceLocator = new ServiceLocator(mock(SerializationProvider.class), new DefaultTimeSourceService(null), mock(LocalPersistenceService.class)); - + ServiceLocator serviceLocator = new ServiceLocator(mock(SerializationProvider.class), new DefaultTimeSourceService(null), mock(DiskResourceService.class)); provider.start(serviceLocator); OffHeapDiskStore store = provider.createStore(getStoreConfig(), mock(PersistableResourceService.PersistenceSpaceIdentifier.class)); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreSPITest.java index a2269a9b17..e572f73c4b 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreSPITest.java @@ -21,7 +21,6 @@ import org.ehcache.config.ResourcePools; import org.ehcache.core.internal.store.StoreConfigurationImpl; import org.ehcache.config.SizedResourcePool; -import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.config.units.MemoryUnit; import org.ehcache.CachePersistenceException; import org.ehcache.expiry.Expirations; @@ -29,7 +28,7 @@ import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; import org.ehcache.impl.internal.events.TestStoreEventDispatcher; import org.ehcache.impl.internal.executor.OnDemandExecutionService; -import org.ehcache.impl.internal.persistence.TestLocalPersistenceService; +import org.ehcache.impl.internal.persistence.TestDiskResourceService; import org.ehcache.impl.internal.store.offheap.BasicOffHeapValueHolder; import org.ehcache.impl.internal.store.offheap.OffHeapValueHolder; import org.ehcache.core.spi.time.SystemTimeSource; @@ -71,7 +70,7 @@ public class OffHeapDiskStoreSPITest extends AuthoritativeTierSPITest newStore(Long capacity, EvictionAdviso CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); when(cacheConfiguration.getResourcePools()).thenReturn(newResourcePoolsBuilder().disk(1, MemoryUnit.MB, false).build()); String spaceName = "OffheapDiskStore-" + index.getAndIncrement(); - PersistenceSpaceIdentifier space = persistenceService.getPersistenceSpaceIdentifier(spaceName, cacheConfiguration); + PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier(spaceName, cacheConfiguration); ResourcePools resourcePools = getDiskResourcePool(capacity); SizedResourcePool diskPool = resourcePools.getPoolForResource(DISK); MemoryUnit unit = (MemoryUnit)diskPool.getUnit(); @@ -116,7 +115,7 @@ private AuthoritativeTier newStore(Long capacity, EvictionAdviso Store.Configuration config = new StoreConfigurationImpl(getKeyType(), getValueType(), evictionAdvisor, getClass().getClassLoader(), expiry, resourcePools, 0, keySerializer, valueSerializer); OffHeapDiskStore store = new OffHeapDiskStore( - persistenceService.createPersistenceContextWithin(space, "store"), + diskResourceService.createPersistenceContextWithin(space, "store"), new OnDemandExecutionService(), null, 1, config, timeSource, new TestStoreEventDispatcher(), @@ -157,7 +156,7 @@ public ServiceConfiguration[] getServiceConfigurations() { CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); when(cacheConfiguration.getResourcePools()).thenReturn(newResourcePoolsBuilder().disk(1, MemoryUnit.MB, false).build()); String spaceName = "OffheapDiskStore-" + index.getAndIncrement(); - PersistenceSpaceIdentifier space = persistenceService.getPersistenceSpaceIdentifier(spaceName, cacheConfiguration); + PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier(spaceName, cacheConfiguration); return new ServiceConfiguration[] {space}; } catch (CachePersistenceException e) { throw new RuntimeException(e); @@ -196,7 +195,7 @@ public void close(final Store store) { throw new RuntimeException(ex); } try { - persistenceService.destroy(spaceName); + diskResourceService.destroy(spaceName); } catch (CachePersistenceException ex) { throw new AssertionError(ex); } finally { @@ -211,10 +210,10 @@ public void tearDown() throws CachePersistenceException, IOException { try { for (Map.Entry, String> entry : createdStores.entrySet()) { OffHeapDiskStore.Provider.close((OffHeapDiskStore) entry.getKey()); - persistenceService.destroy(entry.getValue()); + diskResourceService.destroy(entry.getValue()); } } finally { - persistenceService.stop(); + diskResourceService.stop(); } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java index dc5aedcab6..ca5e9457cc 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java @@ -31,7 +31,7 @@ import org.ehcache.expiry.Expiry; import org.ehcache.impl.internal.events.TestStoreEventDispatcher; import org.ehcache.impl.internal.executor.OnDemandExecutionService; -import org.ehcache.impl.internal.persistence.TestLocalPersistenceService; +import org.ehcache.impl.internal.persistence.TestDiskResourceService; import org.ehcache.impl.internal.store.offheap.AbstractOffHeapStore; import org.ehcache.impl.internal.store.offheap.AbstractOffHeapStoreTest; import org.ehcache.impl.internal.spi.serialization.DefaultSerializationProvider; @@ -98,7 +98,7 @@ public class OffHeapDiskStoreTest extends AbstractOffHeapStoreTest { public final TemporaryFolder temporaryFolder = new TemporaryFolder(); @Rule - public final TestLocalPersistenceService persistenceService = new TestLocalPersistenceService(); + public final TestDiskResourceService diskResourceService = new TestDiskResourceService(); @Test public void testRecovery() throws StoreAccessException, IOException { @@ -120,13 +120,13 @@ public void testRecovery() throws StoreAccessException, IOException { public void testRecoveryFailureWhenValueTypeChangesToIncompatibleClass() throws Exception { OffHeapDiskStore.Provider provider = new OffHeapDiskStore.Provider(); ServiceLocator serviceLocator = new ServiceLocator(); - serviceLocator.addService(persistenceService); + serviceLocator.addService(diskResourceService); serviceLocator.addService(provider); serviceLocator.startAllServices(); CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); when(cacheConfiguration.getResourcePools()).thenReturn(newResourcePoolsBuilder().disk(1, MemoryUnit.MB, false).build()); - PersistenceSpaceIdentifier space = persistenceService.getPersistenceSpaceIdentifier("cache", cacheConfiguration); + PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier("cache", cacheConfiguration); { Store.Configuration storeConfig1 = mock(Store.Configuration.class); @@ -170,13 +170,13 @@ public void testRecoveryFailureWhenValueTypeChangesToIncompatibleClass() throws public void testRecoveryWithArrayType() throws Exception { OffHeapDiskStore.Provider provider = new OffHeapDiskStore.Provider(); ServiceLocator serviceLocator = new ServiceLocator(); - serviceLocator.addService(persistenceService); + serviceLocator.addService(diskResourceService); serviceLocator.addService(provider); serviceLocator.startAllServices(); CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); when(cacheConfiguration.getResourcePools()).thenReturn(newResourcePoolsBuilder().disk(1, MemoryUnit.MB, false).build()); - PersistenceSpaceIdentifier space = persistenceService.getPersistenceSpaceIdentifier("cache", cacheConfiguration); + PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier("cache", cacheConfiguration); { Store.Configuration storeConfig1 = mock(Store.Configuration.class); @@ -215,7 +215,7 @@ public void testRecoveryWithArrayType() throws Exception { protected OffHeapDiskStore createAndInitStore(final TimeSource timeSource, final Expiry expiry) { try { SerializationProvider serializationProvider = new DefaultSerializationProvider(null); - serializationProvider.start(providerContaining(persistenceService)); + serializationProvider.start(providerContaining(diskResourceService)); ClassLoader classLoader = getClass().getClassLoader(); Serializer keySerializer = serializationProvider.createKeySerializer(String.class, classLoader); Serializer valueSerializer = serializationProvider.createValueSerializer(String.class, classLoader); @@ -238,7 +238,7 @@ protected OffHeapDiskStore createAndInitStore(final TimeSource t protected OffHeapDiskStore createAndInitStore(TimeSource timeSource, Expiry expiry, EvictionAdvisor evictionAdvisor) { try { SerializationProvider serializationProvider = new DefaultSerializationProvider(null); - serializationProvider.start(providerContaining(persistenceService)); + serializationProvider.start(providerContaining(diskResourceService)); ClassLoader classLoader = getClass().getClassLoader(); Serializer keySerializer = serializationProvider.createKeySerializer(String.class, classLoader); Serializer valueSerializer = serializationProvider.createValueSerializer(byte[].class, classLoader); @@ -321,8 +321,8 @@ private FileBasedPersistenceContext getPersistenceContext() { try { CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); when(cacheConfiguration.getResourcePools()).thenReturn(newResourcePoolsBuilder().disk(1, MB, false).build()); - PersistenceSpaceIdentifier space = persistenceService.getPersistenceSpaceIdentifier("cache", cacheConfiguration); - return persistenceService.createPersistenceContextWithin(space, "store"); + PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier("cache", cacheConfiguration); + return diskResourceService.createPersistenceContextWithin(space, "store"); } catch (CachePersistenceException e) { throw new AssertionError(e); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreFlushWhileShutdownTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreFlushWhileShutdownTest.java index ad1f2dafcc..c2b0ea1b4d 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreFlushWhileShutdownTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreFlushWhileShutdownTest.java @@ -19,17 +19,18 @@ import org.ehcache.config.CacheConfiguration; import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; -import org.ehcache.core.spi.service.LocalPersistenceService; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; import org.ehcache.expiry.Expirations; import org.ehcache.expiry.Expiry; -import org.ehcache.impl.persistence.DefaultLocalPersistenceService; +import org.ehcache.impl.persistence.DefaultDiskResourceService; import org.ehcache.impl.internal.store.disk.OffHeapDiskStore; import org.ehcache.impl.internal.store.heap.OnHeapStore; import org.ehcache.core.internal.service.ServiceLocator; import org.ehcache.core.spi.store.Store; +import org.ehcache.impl.persistence.DefaultLocalPersistenceService; import org.ehcache.impl.serialization.JavaSerializer; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.persistence.PersistableResourceService.PersistenceSpaceIdentifier; @@ -113,8 +114,8 @@ public int getDispatcherConcurrency() { CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); when(cacheConfiguration.getResourcePools()).thenReturn(newResourcePoolsBuilder().disk(1, MemoryUnit.MB, true).build()); - LocalPersistenceService persistenceService = serviceLocator.getService(LocalPersistenceService.class); - PersistenceSpaceIdentifier persistenceSpace = persistenceService.getPersistenceSpaceIdentifier("testTieredStoreReleaseFlushesEntries", cacheConfiguration); + DiskResourceService diskResourceService = serviceLocator.getService(DiskResourceService.class); + PersistenceSpaceIdentifier persistenceSpace = diskResourceService.getPersistenceSpaceIdentifier("testTieredStoreReleaseFlushesEntries", cacheConfiguration); Store tieredStore = tieredStoreProvider.createStore(configuration, new ServiceConfiguration[] {persistenceSpace}); tieredStoreProvider.initStore(tieredStore); for (int i = 0; i < 100; i++) { @@ -136,8 +137,8 @@ public int getDispatcherConcurrency() { serviceLocator1.startAllServices(); tieredStoreProvider.start(serviceLocator1); - LocalPersistenceService persistenceService1 = serviceLocator1.getService(LocalPersistenceService.class); - PersistenceSpaceIdentifier persistenceSpace1 = persistenceService1.getPersistenceSpaceIdentifier("testTieredStoreReleaseFlushesEntries", cacheConfiguration); + DiskResourceService diskResourceService1 = serviceLocator1.getService(DiskResourceService.class); + PersistenceSpaceIdentifier persistenceSpace1 = diskResourceService1.getPersistenceSpaceIdentifier("testTieredStoreReleaseFlushesEntries", cacheConfiguration); tieredStore = tieredStoreProvider.createStore(configuration, new ServiceConfiguration[] {persistenceSpace1}); tieredStoreProvider.initStore(tieredStore); @@ -148,9 +149,11 @@ public int getDispatcherConcurrency() { private ServiceLocator getServiceLocator(File location) throws Exception { DefaultPersistenceConfiguration persistenceConfiguration = new DefaultPersistenceConfiguration(location); - DefaultLocalPersistenceService persistenceService = new DefaultLocalPersistenceService(persistenceConfiguration); + DefaultLocalPersistenceService fileService = new DefaultLocalPersistenceService(persistenceConfiguration); + DefaultDiskResourceService diskResourceService = new DefaultDiskResourceService(); ServiceLocator serviceLocator = new ServiceLocator(); - serviceLocator.addService(persistenceService); + serviceLocator.addService(fileService); + serviceLocator.addService(diskResourceService); serviceLocator.addService(new OnHeapStore.Provider()); serviceLocator.addService(new OffHeapDiskStore.Provider()); return serviceLocator; diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreSPITest.java index 4ed2f05e94..1809fc5bb0 100755 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreSPITest.java @@ -22,7 +22,7 @@ import org.ehcache.config.ResourceType; import org.ehcache.core.internal.store.StoreConfigurationImpl; import org.ehcache.config.SizedResourcePool; -import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; import org.ehcache.CachePersistenceException; @@ -33,7 +33,7 @@ import org.ehcache.impl.internal.events.NullStoreEventDispatcher; import org.ehcache.impl.internal.events.TestStoreEventDispatcher; import org.ehcache.impl.internal.executor.OnDemandExecutionService; -import org.ehcache.impl.persistence.DefaultLocalPersistenceService; +import org.ehcache.impl.internal.persistence.TestDiskResourceService; import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; import org.ehcache.impl.internal.store.disk.OffHeapDiskStore; import org.ehcache.impl.internal.store.disk.OffHeapDiskStoreSPITest; @@ -52,7 +52,6 @@ import org.ehcache.spi.copy.Copier; import org.ehcache.spi.serialization.Serializer; import org.ehcache.core.spi.service.FileBasedPersistenceContext; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.junit.After; @@ -83,11 +82,13 @@ public class TieredStoreSPITest extends StoreSPITest { private StoreFactory storeFactory; private final TieredStore.Provider provider = new TieredStore.Provider(); private final Map, String> createdStores = new ConcurrentHashMap, String>(); - private LocalPersistenceService persistenceService; @Rule public final TemporaryFolder folder = new TemporaryFolder(); + @Rule + public TestDiskResourceService diskResourceService = new TestDiskResourceService(); + @Override protected StoreFactory getStoreFactory() { return storeFactory; @@ -95,7 +96,6 @@ protected StoreFactory getStoreFactory() { @Before public void setUp() throws IOException { - persistenceService = new DefaultLocalPersistenceService(new CacheManagerPersistenceConfiguration(folder.newFolder())); storeFactory = new StoreFactory() { final AtomicInteger aliasCounter = new AtomicInteger(); @@ -132,8 +132,8 @@ private Store newStore(Long capacity, EvictionAdvisor store) { String spaceName = createdStores.get(store); provider.releaseStore(store); try { - persistenceService.destroy(spaceName); + diskResourceService.destroy(spaceName); } catch (CachePersistenceException e) { throw new AssertionError(e); } finally { @@ -314,13 +314,9 @@ public ServiceProvider getServiceProvider() { @After public void tearDown() throws CachePersistenceException { - try { - for (Map.Entry, String> entry : createdStores.entrySet()) { - provider.releaseStore(entry.getKey()); - persistenceService.destroy(entry.getValue()); - } - } finally { - persistenceService.stop(); + for (Map.Entry, String> entry : createdStores.entrySet()) { + provider.releaseStore(entry.getKey()); + diskResourceService.destroy(entry.getValue()); } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreWith3TiersSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreWith3TiersSPITest.java index ca11ca81f0..961154fe87 100755 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreWith3TiersSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreWith3TiersSPITest.java @@ -22,7 +22,7 @@ import org.ehcache.config.ResourceType; import org.ehcache.core.internal.store.StoreConfigurationImpl; import org.ehcache.config.SizedResourcePool; -import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.events.StoreEventDispatcher; @@ -34,7 +34,7 @@ import org.ehcache.impl.internal.events.NullStoreEventDispatcher; import org.ehcache.impl.internal.events.TestStoreEventDispatcher; import org.ehcache.impl.internal.executor.OnDemandExecutionService; -import org.ehcache.impl.persistence.DefaultLocalPersistenceService; +import org.ehcache.impl.internal.persistence.TestDiskResourceService; import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; import org.ehcache.impl.internal.store.disk.OffHeapDiskStore; import org.ehcache.impl.internal.store.disk.OffHeapDiskStoreSPITest; @@ -55,7 +55,6 @@ import org.ehcache.spi.copy.Copier; import org.ehcache.spi.serialization.Serializer; import org.ehcache.core.spi.service.FileBasedPersistenceContext; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.junit.After; @@ -84,11 +83,13 @@ public class TieredStoreWith3TiersSPITest extends StoreSPITest { private StoreFactory storeFactory; private final TieredStore.Provider provider = new TieredStore.Provider(); private final Map, String> createdStores = new ConcurrentHashMap, String>(); - private LocalPersistenceService persistenceService; @Rule public final TemporaryFolder folder = new TemporaryFolder(); + @Rule + public TestDiskResourceService diskResourceService = new TestDiskResourceService(); + @Override protected StoreFactory getStoreFactory() { return storeFactory; @@ -96,7 +97,6 @@ protected StoreFactory getStoreFactory() { @Before public void setUp() throws IOException { - persistenceService = new DefaultLocalPersistenceService(new CacheManagerPersistenceConfiguration(folder.newFolder())); storeFactory = new StoreFactory() { final AtomicInteger aliasCounter = new AtomicInteger(); @@ -140,8 +140,8 @@ private Store newStore(Long capacity, EvictionAdvisor store) { String spaceName = createdStores.get(store); provider.releaseStore(store); try { - persistenceService.destroy(spaceName); + diskResourceService.destroy(spaceName); } catch (CachePersistenceException e) { throw new AssertionError(e); } finally { @@ -331,10 +331,10 @@ public void tearDown() throws CachePersistenceException { try { for (Map.Entry, String> entry : createdStores.entrySet()) { provider.releaseStore(entry.getKey()); - persistenceService.destroy(entry.getValue()); + diskResourceService.destroy(entry.getValue()); } } finally { - persistenceService.stop(); + diskResourceService.stop(); } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchers.java b/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchers.java new file mode 100644 index 0000000000..35537203fe --- /dev/null +++ b/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchers.java @@ -0,0 +1,152 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.internal.util; + +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; + +import java.io.File; +import java.io.FilenameFilter; + +/** + * Matcher(s) for file existence in the persistence directory.. + * + * @author RKAV + */ +public class FileExistenceMatchers { + public static Matcher fileExistsOwnerOpen(final int numExpectedFiles) { + return new TypeSafeMatcher() { + @Override + protected boolean matchesSafely(File item) { + return fileExistsOwnerOpenWithName(item, numExpectedFiles, null); + } + + @Override + public void describeTo(Description description) { + + } + }; + } + + public static Matcher fileExistsOwnerOpenExpected(final int numExpectedFiles, final String expected) { + return new TypeSafeMatcher() { + @Override + protected boolean matchesSafely(File item) { + return fileExistsOwnerOpenWithName(item, numExpectedFiles, expected); + } + + @Override + public void describeTo(Description description) { + } + }; + } + + public static Matcher fileExistOwnerClosed(final int numExpectedFiles) { + return new TypeSafeMatcher() { + @Override + protected boolean matchesSafely(File item) { + return fileExistsOwnerClosedWithName(item, numExpectedFiles, null); + } + + @Override + public void describeTo(Description description) { + } + }; + } + + public static Matcher fileExistOwnerClosedExpected(final int numExpectedFiles, final String expected) { + return new TypeSafeMatcher() { + @Override + protected boolean matchesSafely(File item) { + return fileExistsOwnerClosedWithName(item, numExpectedFiles, expected); + } + + @Override + public void describeTo(Description description) { + } + }; + } + + public static Matcher fileExistNoOwner() { + return new TypeSafeMatcher() { + @Override + protected boolean matchesSafely(File item) { + File[] files = item.listFiles(); + return files == null || files.length == 0; + } + + @Override + public void describeTo(Description description) { + + } + }; + } + + private static boolean fileExistsOwnerOpenWithName(final File item, final int numExpectedFiles, final String expected) { + boolean matches = false; + File[] files = item.listFiles(); + if (files == null) { + return false; + } + if (files.length == 2) { + int i = files[0].isDirectory() ? 0 : 1; + if (expected != null) { + files = files[i].listFiles(new FilenameFilter() { + @Override + public boolean accept(File dir, String name) { + return name.startsWith(expected); + } + }); + } else { + files = files[i].isDirectory() ? files[i].listFiles() : null; + } + if (numExpectedFiles > 0) { + matches = files != null && files.length == numExpectedFiles; + } else { + matches = files == null || files.length == 0; + } + } + return matches; + } + + private static boolean fileExistsOwnerClosedWithName(final File item, final int numExpectedFiles, final String expected) { + boolean matches = false; + File[] files = item.listFiles(); + if (files == null) { + return false; + } + if (files.length == 1) { + if (expected != null) { + files = files[0].listFiles(new FilenameFilter() { + @Override + public boolean accept(File dir, String name) { + return name.startsWith(expected); + } + }); + } else { + files = files[0].isDirectory() ? files[0].listFiles() : null; + } + if (numExpectedFiles > 0) { + matches = files != null && files.length == numExpectedFiles; + } else { + matches = files == null || files.length == 0; + } + } + return matches; + } +} diff --git a/impl/src/test/java/org/ehcache/impl/persistence/DefaultLocalPersistenceServiceTest.java b/impl/src/test/java/org/ehcache/impl/persistence/DefaultLocalPersistenceServiceTest.java index 03a5b4b9d9..4ff2ad14f3 100644 --- a/impl/src/test/java/org/ehcache/impl/persistence/DefaultLocalPersistenceServiceTest.java +++ b/impl/src/test/java/org/ehcache/impl/persistence/DefaultLocalPersistenceServiceTest.java @@ -17,13 +17,7 @@ package org.ehcache.impl.persistence; import org.ehcache.CachePersistenceException; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; -import org.ehcache.spi.persistence.PersistableResourceService; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -34,12 +28,17 @@ import java.io.File; import java.io.IOException; +import static org.ehcache.core.spi.service.LocalPersistenceService.SafeSpaceIdentifier; +import static org.ehcache.impl.internal.util.FileExistenceMatchers.fileExistOwnerClosed; +import static org.ehcache.impl.internal.util.FileExistenceMatchers.fileExistsOwnerOpen; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.junit.Assume.assumeTrue; import static org.mockito.Mockito.never; +import static org.ehcache.impl.internal.util.FileExistenceMatchers.fileExistOwnerClosed; +import static org.ehcache.impl.internal.util.FileExistenceMatchers.fileExistsOwnerOpen; public class DefaultLocalPersistenceServiceTest { @@ -110,6 +109,24 @@ public void testLocksDirectoryAndUnlocks() throws IOException { assertThat(service.getLockFile().exists(), is(false)); } + @Test + public void testPhysicalDestroy() throws IOException, CachePersistenceException { + final File f = folder.newFolder("testPhysicalDestroy"); + final DefaultLocalPersistenceService service = new DefaultLocalPersistenceService(new DefaultPersistenceConfiguration(f)); + service.start(null); + assertThat(service.getLockFile().exists(), is(true)); + SafeSpaceIdentifier id = service.createSafeSpaceIdentifier("test", "test"); + service.createSafeSpace(id); + assertThat(f, fileExistsOwnerOpen(1)); + // try to destroy the physical space without the logical id + SafeSpaceIdentifier newId = service.createSafeSpaceIdentifier("test", "test"); + service.destroySafeSpace(newId, false); + assertThat(f, fileExistsOwnerOpen(0)); + service.stop(); + assertThat(f, fileExistOwnerClosed(0)); + assertThat(service.getLockFile().exists(), is(false)); + } + @Test public void testExclusiveLock() throws IOException { DefaultLocalPersistenceService service1 = new DefaultLocalPersistenceService(new DefaultPersistenceConfiguration(testFolder)); @@ -121,80 +138,4 @@ public void testExclusiveLock() throws IOException { expectedException.expectMessage("Couldn't lock rootDir: " + testFolder.getAbsolutePath()); service2.start(null); } - - @Test - public void testCantDestroyAllIfServiceNotStarted() { - DefaultLocalPersistenceService service = new DefaultLocalPersistenceService(new DefaultPersistenceConfiguration(testFolder)); - expectedException.expect(IllegalStateException.class); - expectedException.expectMessage("Service must be started"); - service.destroyAll(); - } - - @Test - public void testDestroyWhenStarted() throws CachePersistenceException { - DefaultLocalPersistenceService service = new DefaultLocalPersistenceService(new DefaultPersistenceConfiguration(testFolder)); - service.start(null); - - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder - .newCacheConfigurationBuilder(Object.class, Object.class, - ResourcePoolsBuilder.newResourcePoolsBuilder().disk(10, MemoryUnit.MB)) - .build(); - PersistableResourceService.PersistenceSpaceIdentifier id = - service.getPersistenceSpaceIdentifier("test", cacheConfiguration); - - service = Mockito.spy(service); - service.destroy("test"); - - // Make sure we haven't tried to start the service - Mockito.verify(service, never()).internalStart(); - - // Make sure we are still started - assertThat(service.isStarted(), is(true)); - - // Make sure the cache was deleted - expectedException.expect(CachePersistenceException.class); - service.getStateRepositoryWithin(id, "test"); - } - - @Test - public void testDestroyWhenStopped() throws CachePersistenceException { - DefaultLocalPersistenceService service = new DefaultLocalPersistenceService(new DefaultPersistenceConfiguration(testFolder)); - service.start(null); - - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder - .newCacheConfigurationBuilder(Object.class, Object.class, - ResourcePoolsBuilder.newResourcePoolsBuilder().disk(10, MemoryUnit.MB)) - .build(); - PersistableResourceService.PersistenceSpaceIdentifier id = - service.getPersistenceSpaceIdentifier("test", cacheConfiguration); - - service.stop(); - - service = Mockito.spy(service); - service.destroy("test"); - - // Make sure it was started - Mockito.verify(service).internalStart(); - - // Make sure the service is still stopped - assertThat(service.isStarted(), is(false)); - - // Make sure the cache was deleted - expectedException.expect(CachePersistenceException.class); - service.getStateRepositoryWithin(id, "test"); - } - - @Test - public void testIsStarted() { - DefaultLocalPersistenceService service = new DefaultLocalPersistenceService(new DefaultPersistenceConfiguration(testFolder)); - assertThat(service.isStarted(), is(false)); - service.start(null); - assertThat(service.isStarted(), is(true)); - service.stop(); - assertThat(service.isStarted(), is(false)); - service.startForMaintenance(null); - assertThat(service.isStarted(), is(true)); - service.stop(); - assertThat(service.isStarted(), is(false)); - } } diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java index b38f100ebf..ddd1a59824 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java @@ -23,6 +23,7 @@ import org.ehcache.config.EvictionAdvisor; import org.ehcache.core.internal.store.StoreConfigurationImpl; import org.ehcache.core.internal.store.StoreSupport; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.core.spi.store.StoreAccessException; import org.ehcache.impl.config.copy.DefaultCopierConfiguration; import org.ehcache.expiry.Duration; @@ -40,7 +41,6 @@ import org.ehcache.spi.copy.Copier; import org.ehcache.spi.copy.CopyProvider; import org.ehcache.spi.serialization.Serializer; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceDependencies; @@ -861,7 +861,7 @@ public Duration getExpiryForUpdate(K key, ValueSupplier> o }; // get the PersistenceSpaceIdentifier if the cache is persistent, null otherwise - LocalPersistenceService.PersistenceSpaceIdentifier persistenceSpaceId = findSingletonAmongst(LocalPersistenceService.PersistenceSpaceIdentifier.class, serviceConfigs); + DiskResourceService.PersistenceSpaceIdentifier persistenceSpaceId = findSingletonAmongst(DiskResourceService.PersistenceSpaceIdentifier.class, serviceConfigs); // find the copiers Collection copierConfigs = findAmongst(DefaultCopierConfiguration.class, underlyingServiceConfigs); diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProvider.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProvider.java index 92a6c943ec..66904319d5 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProvider.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProvider.java @@ -17,11 +17,11 @@ package org.ehcache.transactions.xa.internal.journal; import org.ehcache.CachePersistenceException; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.spi.persistence.PersistableResourceService; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.spi.serialization.Serializer; import org.ehcache.core.spi.service.FileBasedPersistenceContext; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.spi.service.Service; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -33,16 +33,16 @@ public class DefaultJournalProvider implements JournalProvider { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultJournalProvider.class); - private volatile LocalPersistenceService persistenceService; + private volatile DiskResourceService diskResourceService; @Override public void start(ServiceProvider serviceProvider) { - this.persistenceService = serviceProvider.getService(LocalPersistenceService.class); + this.diskResourceService = serviceProvider.getService(DiskResourceService.class); } @Override public void stop() { - this.persistenceService = null; + this.diskResourceService = null; } @Override @@ -54,7 +54,7 @@ public Journal getJournal(PersistableResourceService.PersistenceSpaceIden try { LOGGER.info("Using persistent XAStore journal"); - FileBasedPersistenceContext persistenceContext = persistenceService.createPersistenceContextWithin(persistentSpaceId, "XAJournal"); + FileBasedPersistenceContext persistenceContext = diskResourceService.createPersistenceContextWithin(persistentSpaceId, "XAJournal"); return new PersistentJournal(persistenceContext.getDirectory(), keySerializer); } catch (CachePersistenceException cpe) { throw new RuntimeException(cpe); From c76ee06ccd753f4db4365e78b4cda6779fd49c4d Mon Sep 17 00:00:00 2001 From: Alex Snaps Date: Fri, 9 Sep 2016 02:35:13 -0400 Subject: [PATCH 012/218] Issue #1429: Support system props in xml --- .../org/ehcache/xml/ConfigurationParser.java | 59 +++++++++++++++++++ .../org/ehcache/xml/XmlConfigurationTest.java | 36 +++++++++-- .../test/resources/configs/systemprops.xml | 35 +++++++++++ 3 files changed, 124 insertions(+), 6 deletions(-) create mode 100644 xml/src/test/resources/configs/systemprops.xml diff --git a/xml/src/main/java/org/ehcache/xml/ConfigurationParser.java b/xml/src/main/java/org/ehcache/xml/ConfigurationParser.java index 13ce28d981..e7af8e9db2 100644 --- a/xml/src/main/java/org/ehcache/xml/ConfigurationParser.java +++ b/xml/src/main/java/org/ehcache/xml/ConfigurationParser.java @@ -52,6 +52,9 @@ import org.ehcache.spi.service.ServiceCreationConfiguration; import org.ehcache.core.internal.util.ClassLoading; import org.w3c.dom.Element; +import org.w3c.dom.NamedNodeMap; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; import org.xml.sax.ErrorHandler; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; @@ -78,8 +81,12 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Properties; import java.util.Set; +import java.util.Stack; import java.util.concurrent.TimeUnit; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import org.ehcache.xml.model.ThreadPoolReferenceType; import org.ehcache.xml.model.ThreadPoolsType; @@ -92,6 +99,7 @@ */ class ConfigurationParser { + private static final Pattern SYSPROP = Pattern.compile("\\$\\{([^}]+)\\}"); private static final SchemaFactory XSD_SCHEMA_FACTORY = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); private static final URL CORE_SCHEMA_URL = XmlConfiguration.class.getResource("/ehcache-core.xsd"); @@ -105,6 +113,23 @@ class ConfigurationParser { private final Map resourceXmlParsers = new HashMap(); private final ConfigType config; + static String replaceProperties(String originalValue, final Properties properties) { + Matcher matcher = SYSPROP.matcher(originalValue); + + StringBuffer sb = new StringBuffer(); + while (matcher.find()) { + final String property = matcher.group(1); + final String value = properties.getProperty(property); + if (value == null) { + throw new IllegalStateException(String.format("Replacement for ${%s} not found!", property)); + } + matcher.appendReplacement(sb, Matcher.quoteReplacement(value)); + } + matcher.appendTail(sb); + final String resolvedValue = sb.toString(); + return resolvedValue.equals(originalValue) ? null : resolvedValue; + } + public ConfigurationParser(String xml) throws IOException, SAXException, JAXBException, ParserConfigurationException { Collection schemaSources = new ArrayList(); schemaSources.add(new StreamSource(CORE_SCHEMA_URL.openStream())); @@ -132,6 +157,9 @@ public ConfigurationParser(String xml) throws IOException, SAXException, JAXBExc DocumentBuilder domBuilder = factory.newDocumentBuilder(); domBuilder.setErrorHandler(new FatalErrorHandler()); Element dom = domBuilder.parse(xml).getDocumentElement(); + + substituteSystemProperties(dom); + if (!CORE_SCHEMA_ROOT_ELEMENT.equals(dom.getLocalName()) || !CORE_SCHEMA_NAMESPACE.equals(dom.getNamespaceURI())) { throw new XmlConfigurationException("Expecting {" + CORE_SCHEMA_NAMESPACE + "}" + CORE_SCHEMA_ROOT_ELEMENT + " element; found {" + dom.getNamespaceURI() + "}" + dom.getLocalName()); @@ -143,6 +171,37 @@ public ConfigurationParser(String xml) throws IOException, SAXException, JAXBExc this.config = unmarshaller.unmarshal(dom, configTypeClass).getValue(); } + private void substituteSystemProperties(final Element dom) { + final Properties properties = System.getProperties(); + Stack nodeLists = new Stack(); + nodeLists.push(dom.getChildNodes()); + while (!nodeLists.isEmpty()) { + NodeList nodeList = nodeLists.pop(); + for (int i = 0; i < nodeList.getLength(); ++i) { + Node currentNode = nodeList.item(i); + if (currentNode.hasChildNodes()) { + nodeLists.push(currentNode.getChildNodes()); + } + final NamedNodeMap attributes = currentNode.getAttributes(); + if (attributes != null) { + for (int j = 0; j < attributes.getLength(); ++j) { + final Node attributeNode = attributes.item(j); + final String newValue = replaceProperties(attributeNode.getNodeValue(), properties); + if (newValue != null) { + attributeNode.setNodeValue(newValue); + } + } + } + if (currentNode.getNodeType() == Node.TEXT_NODE) { + final String newValue = replaceProperties(currentNode.getNodeValue(), properties); + if (newValue != null) { + currentNode.setNodeValue(newValue); + } + } + } + } + } + public Iterable getServiceElements() { return config.getService(); } diff --git a/xml/src/test/java/org/ehcache/xml/XmlConfigurationTest.java b/xml/src/test/java/org/ehcache/xml/XmlConfigurationTest.java index 360fe2af1e..90e86a9afd 100644 --- a/xml/src/test/java/org/ehcache/xml/XmlConfigurationTest.java +++ b/xml/src/test/java/org/ehcache/xml/XmlConfigurationTest.java @@ -22,6 +22,11 @@ import org.ehcache.config.ResourceUnit; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.internal.util.ClassLoading; +import org.ehcache.expiry.Duration; +import org.ehcache.expiry.Expirations; +import org.ehcache.expiry.Expiry; import org.ehcache.impl.config.copy.DefaultCopierConfiguration; import org.ehcache.impl.config.copy.DefaultCopyProviderConfiguration; import org.ehcache.impl.config.event.DefaultCacheEventListenerConfiguration; @@ -31,20 +36,15 @@ import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration; import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration; import org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.expiry.Duration; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; -import org.ehcache.impl.copy.SerializingCopier; import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration; import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineProviderConfiguration; +import org.ehcache.impl.copy.SerializingCopier; import org.ehcache.spi.copy.Copier; import org.ehcache.spi.loaderwriter.WriteBehindConfiguration; import org.ehcache.spi.loaderwriter.WriteBehindConfiguration.BatchingConfiguration; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceCreationConfiguration; -import org.ehcache.core.internal.util.ClassLoading; import org.ehcache.xml.exceptions.XmlConfigurationException; import org.hamcrest.CoreMatchers; import org.hamcrest.Matcher; @@ -677,6 +677,30 @@ public void testCustomResource() throws Exception { } } + @Test + public void testSysPropReplace() { + System.getProperties().setProperty("ehcache.match", Number.class.getName()); + XmlConfiguration xmlConfig = new XmlConfiguration(XmlConfigurationTest.class.getResource("/configs/systemprops.xml")); + + assertThat(xmlConfig.getCacheConfigurations().get("bar").getKeyType(), sameInstance((Class)Number.class)); + + DefaultPersistenceConfiguration persistenceConfiguration = (DefaultPersistenceConfiguration)xmlConfig.getServiceCreationConfigurations().iterator().next(); + assertThat(persistenceConfiguration.getRootDirectory(), is(new File(System.getProperty("user.home") + "/ehcache"))); + } + + @Test + public void testSysPropReplaceRegExp() { + assertThat(ConfigurationParser.replaceProperties("foo${file.separator}", System.getProperties()), equalTo("foo" + File.separator)); + assertThat(ConfigurationParser.replaceProperties("${file.separator}foo${file.separator}", System.getProperties()), equalTo(File.separator + "foo" + File.separator)); + try { + ConfigurationParser.replaceProperties("${bar}foo", System.getProperties()); + fail("Should have thrown!"); + } catch (IllegalStateException e) { + assertThat(e.getMessage().contains("${bar}"), is(true)); + } + assertThat(ConfigurationParser.replaceProperties("foo", System.getProperties()), nullValue()); + } + private void checkListenerConfigurationExists(Collection configuration) { int count = 0; for (Object o : configuration) { diff --git a/xml/src/test/resources/configs/systemprops.xml b/xml/src/test/resources/configs/systemprops.xml new file mode 100644 index 0000000000..bf18c771d0 --- /dev/null +++ b/xml/src/test/resources/configs/systemprops.xml @@ -0,0 +1,35 @@ + + + + + + + + + + + 5 + + + + ${ehcache.match} + 5 + + From dca855d85dfe679e0f1b7c765ca6ab311a09017d Mon Sep 17 00:00:00 2001 From: Ramesh Kavanappillil Date: Thu, 15 Sep 2016 17:02:32 +0530 Subject: [PATCH 013/218] Closes #1441 Make the constructor for ehcache concurrent offheap clock cache public --- .../store/offheap/EhcacheConcurrentOffHeapClockCache.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCache.java b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCache.java index 52b7754454..308de82478 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCache.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCache.java @@ -42,7 +42,7 @@ public class EhcacheConcurrentOffHeapClockCache extends AbstractConcurrent private final EvictionAdvisor evictionAdvisor; private final AtomicLong[] counters; - protected EhcacheConcurrentOffHeapClockCache(EvictionAdvisor evictionAdvisor, Factory> segmentFactory, int ssize) { + public EhcacheConcurrentOffHeapClockCache(EvictionAdvisor evictionAdvisor, Factory> segmentFactory, int ssize) { super(segmentFactory, ssize); this.evictionAdvisor = evictionAdvisor; this.counters = new AtomicLong[segments.length]; From cc793d807d91d08eac2d6bff315721d83c69f15a Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Fri, 9 Sep 2016 17:41:47 +0200 Subject: [PATCH 014/218] :shirt: #1430 Suppress unchecked warning on Eviction --- api/src/main/java/org/ehcache/config/Eviction.java | 8 +++----- .../impl/internal/store/disk/OffHeapDiskStore.java | 3 ++- .../org/ehcache/impl/internal/store/heap/Backend.java | 2 +- .../ehcache/impl/internal/store/heap/KeyCopyBackend.java | 2 +- .../ehcache/impl/internal/store/heap/OnHeapStore.java | 5 +++-- .../ehcache/impl/internal/store/heap/SimpleBackend.java | 2 +- .../impl/internal/store/offheap/OffHeapStore.java | 3 ++- .../impl/internal/concurrent/ConcurrentHashMapTest.java | 9 +++++---- ...EhcachePersistentConcurrentOffHeapClockCacheTest.java | 5 +++-- .../disk/factories/EhcachePersistentSegmentTest.java | 9 +++++---- .../internal/store/heap/OnHeapStoreEvictionTest.java | 5 +++-- 11 files changed, 29 insertions(+), 24 deletions(-) diff --git a/api/src/main/java/org/ehcache/config/Eviction.java b/api/src/main/java/org/ehcache/config/Eviction.java index 8a3c9109c4..64dd5f7869 100644 --- a/api/src/main/java/org/ehcache/config/Eviction.java +++ b/api/src/main/java/org/ehcache/config/Eviction.java @@ -21,7 +21,7 @@ */ public final class Eviction { - private static final EvictionAdvisor NO_ADVICE = new EvictionAdvisor() { + private static final EvictionAdvisor NO_ADVICE = new EvictionAdvisor() { @Override public boolean adviseAgainstEviction(Object key, Object value) { return false; @@ -31,12 +31,10 @@ public boolean adviseAgainstEviction(Object key, Object value) { /** * Returns an {@link EvictionAdvisor} where no mappings are advised against eviction. * - * @param the key type for the advisor - * @param the value type for the advisor * @return an advisor where no mappings are advised against eviction */ - public static EvictionAdvisor noAdvice() { - return (EvictionAdvisor) NO_ADVICE; + public static EvictionAdvisor noAdvice() { + return NO_ADVICE; } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java index 39f95ffb4e..3ee3c0befe 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java @@ -81,6 +81,7 @@ import java.util.concurrent.atomic.AtomicReference; import static java.lang.Math.max; +import static org.ehcache.config.Eviction.noAdvice; import static org.ehcache.core.internal.service.ServiceLocator.findSingletonAmongst; import static org.terracotta.offheapstore.util.MemoryUnit.BYTES; @@ -124,7 +125,7 @@ public OffHeapDiskStore(FileBasedPersistenceContext fileBasedPersistenceContext, if (evictionAdvisor != null) { this.evictionAdvisor = wrap(evictionAdvisor); } else { - this.evictionAdvisor = wrap(Eviction.noAdvice()); + this.evictionAdvisor = wrap(noAdvice()); } this.keyType = config.getKeyType(); this.valueType = config.getValueType(); diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/Backend.java b/impl/src/main/java/org/ehcache/impl/internal/store/heap/Backend.java index de74e89c3e..89137e872d 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/Backend.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/heap/Backend.java @@ -76,5 +76,5 @@ interface Backend { void updateUsageInBytesIfRequired(long delta); - Map.Entry> getEvictionCandidate(Random random, int size, final Comparator> prioritizer, final EvictionAdvisor> evictionAdvisor); + Map.Entry> getEvictionCandidate(Random random, int size, final Comparator> prioritizer, final EvictionAdvisor> evictionAdvisor); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/KeyCopyBackend.java b/impl/src/main/java/org/ehcache/impl/internal/store/heap/KeyCopyBackend.java index 3f59f193e0..da62fce6cb 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/KeyCopyBackend.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/heap/KeyCopyBackend.java @@ -59,7 +59,7 @@ public boolean remove(K key, OnHeapValueHolder value) { } @Override - public Map.Entry> getEvictionCandidate(Random random, int size, final Comparator> prioritizer, final EvictionAdvisor> evictionAdvisor) { + public Map.Entry> getEvictionCandidate(Random random, int size, final Comparator> prioritizer, final EvictionAdvisor> evictionAdvisor) { Map.Entry, OnHeapValueHolder> candidate = keyCopyMap.getEvictionCandidate(random, size, prioritizer, evictionAdvisor); if (candidate == null) { diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java index 19987f6dd3..d1f42de145 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java @@ -91,6 +91,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; +import static org.ehcache.config.Eviction.noAdvice; import static org.ehcache.core.exceptions.StorePassThroughException.handleRuntimeException; import static org.ehcache.core.internal.util.ValueSuppliers.supplierOf; import static org.terracotta.statistics.StatisticBuilder.operation; @@ -228,7 +229,7 @@ public OnHeapStore(final Configuration config, final TimeSource timeSource this.capacity = byteSized ? ((MemoryUnit) heapPool.getUnit()).toBytes(heapPool.getSize()) : heapPool.getSize(); this.timeSource = timeSource; if (config.getEvictionAdvisor() == null) { - this.evictionAdvisor = Eviction.noAdvice(); + this.evictionAdvisor = noAdvice(); } else { this.evictionAdvisor = config.getEvictionAdvisor(); } @@ -1560,7 +1561,7 @@ boolean evict(final StoreEventSink eventSink) { if (candidate == null) { // 2nd attempt without any advisor - candidate = map.getEvictionCandidate(random, SAMPLE_SIZE, EVICTION_PRIORITIZER, Eviction.>noAdvice()); + candidate = map.getEvictionCandidate(random, SAMPLE_SIZE, EVICTION_PRIORITIZER, noAdvice()); } if (candidate == null) { diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/SimpleBackend.java b/impl/src/main/java/org/ehcache/impl/internal/store/heap/SimpleBackend.java index d4063da6b9..bb03a0eefa 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/SimpleBackend.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/heap/SimpleBackend.java @@ -47,7 +47,7 @@ public boolean remove(K key, OnHeapValueHolder value) { } @Override - public Map.Entry> getEvictionCandidate(Random random, int size, final Comparator> prioritizer, final EvictionAdvisor> evictionAdvisor) { + public Map.Entry> getEvictionCandidate(Random random, int size, final Comparator> prioritizer, final EvictionAdvisor> evictionAdvisor) { return realMap.getEvictionCandidate(random, size, prioritizer, evictionAdvisor); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java index c098eb445e..4bc5f4b1a0 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java @@ -61,6 +61,7 @@ import java.util.Map; import java.util.Set; +import static org.ehcache.config.Eviction.noAdvice; import static org.ehcache.impl.internal.store.offheap.OffHeapStoreUtils.getBufferSource; /** @@ -81,7 +82,7 @@ public OffHeapStore(final Configuration config, TimeSource timeSource, Sto if (evictionAdvisor != null) { this.evictionAdvisor = wrap(evictionAdvisor); } else { - this.evictionAdvisor = wrap(Eviction.noAdvice()); + this.evictionAdvisor = wrap(noAdvice()); } this.keySerializer = config.getKeySerializer(); this.valueSerializer = config.getValueSerializer(); diff --git a/impl/src/test/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMapTest.java b/impl/src/test/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMapTest.java index 8764e6a3ec..90857b5dd6 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMapTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMapTest.java @@ -25,6 +25,7 @@ import org.ehcache.config.Eviction; import org.ehcache.config.EvictionAdvisor; +import static org.ehcache.config.Eviction.noAdvice; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.core.IsNull.notNullValue; import static org.hamcrest.core.IsNull.nullValue; @@ -117,21 +118,21 @@ public int compareTo(BadHashKey o) { @Test public void testRandomSampleOnEmptyMap() { ConcurrentHashMap map = new ConcurrentHashMap(); - assertThat(map.getEvictionCandidate(new Random(), 1, null, Eviction.noAdvice()), nullValue()); + assertThat(map.getEvictionCandidate(new Random(), 1, null, noAdvice()), nullValue()); } @Test public void testEmptyRandomSample() { ConcurrentHashMap map = new ConcurrentHashMap(); map.put("foo", "bar"); - assertThat(map.getEvictionCandidate(new Random(), 0, null, Eviction.noAdvice()), nullValue()); + assertThat(map.getEvictionCandidate(new Random(), 0, null, noAdvice()), nullValue()); } @Test public void testOversizedRandomSample() { ConcurrentHashMap map = new ConcurrentHashMap(); map.put("foo", "bar"); - Entry candidate = map.getEvictionCandidate(new Random(), 2, null, Eviction.noAdvice()); + Entry candidate = map.getEvictionCandidate(new Random(), 2, null, noAdvice()); assertThat(candidate.getKey(), is("foo")); assertThat(candidate.getValue(), is("bar")); } @@ -147,7 +148,7 @@ public void testUndersizedRandomSample() { public int compare(String t, String t1) { return 0; } - }, Eviction.noAdvice()); + }, noAdvice()); assertThat(candidate, notNullValue()); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/EhcachePersistentConcurrentOffHeapClockCacheTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/disk/EhcachePersistentConcurrentOffHeapClockCacheTest.java index e3c848ca8c..4b665e45e7 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/EhcachePersistentConcurrentOffHeapClockCacheTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/disk/EhcachePersistentConcurrentOffHeapClockCacheTest.java @@ -38,6 +38,7 @@ import java.io.IOException; +import static org.ehcache.config.Eviction.noAdvice; import static org.ehcache.impl.internal.store.disk.OffHeapDiskStore.persistent; import static org.ehcache.impl.internal.spi.TestServiceProvider.providerContaining; import static org.mockito.Mockito.mock; @@ -50,7 +51,7 @@ public class EhcachePersistentConcurrentOffHeapClockCacheTest extends AbstractEh @Override protected EhcachePersistentConcurrentOffHeapClockCache createTestSegment() throws IOException { - return createTestSegment(Eviction.noAdvice(), mock(EvictionListener.class)); + return createTestSegment(noAdvice(), mock(EvictionListener.class)); } @Override @@ -114,4 +115,4 @@ protected boolean isPinned(String key, EhcacheOffHeapBackingMap protected int getMetadata(String key, int mask, EhcacheOffHeapBackingMap segment) { return ((EhcachePersistentConcurrentOffHeapClockCache) segment).getMetadata(key, mask); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentTest.java index d260fefc7e..0e9054a6dd 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentTest.java @@ -38,6 +38,7 @@ import java.io.IOException; +import static org.ehcache.config.Eviction.noAdvice; import static org.ehcache.impl.internal.store.disk.OffHeapDiskStore.persistent; import static org.ehcache.impl.internal.spi.TestServiceProvider.providerContaining; import static org.hamcrest.CoreMatchers.is; @@ -52,7 +53,7 @@ public class EhcachePersistentSegmentTest { public final TemporaryFolder folder = new TemporaryFolder(); private EhcachePersistentSegmentFactory.EhcachePersistentSegment createTestSegment() throws IOException { - return createTestSegment(Eviction.noAdvice(), mock(EvictionListener.class)); + return createTestSegment(noAdvice(), mock(EvictionListener.class)); } private EhcachePersistentSegmentFactory.EhcachePersistentSegment createTestSegment(EvictionAdvisor evictionPredicate) throws IOException { @@ -60,10 +61,10 @@ private EhcachePersistentSegmentFactory.EhcachePersistentSegment } private EhcachePersistentSegmentFactory.EhcachePersistentSegment createTestSegment(EvictionListener evictionListener) throws IOException { - return createTestSegment(Eviction.noAdvice(), evictionListener); + return createTestSegment(noAdvice(), evictionListener); } - private EhcachePersistentSegmentFactory.EhcachePersistentSegment createTestSegment(final EvictionAdvisor evictionPredicate, EvictionListener evictionListener) throws IOException { + private EhcachePersistentSegmentFactory.EhcachePersistentSegment createTestSegment(final EvictionAdvisor evictionPredicate, EvictionListener evictionListener) throws IOException { try { HeuristicConfiguration configuration = new HeuristicConfiguration(1024 * 1024); SerializationProvider serializationProvider = new DefaultSerializationProvider(null); @@ -154,4 +155,4 @@ public void testEvictionFiresEvent() throws IOException { segment.destroy(); } } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreEvictionTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreEvictionTest.java index 89c8f6137e..dd45857b1c 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreEvictionTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreEvictionTest.java @@ -49,6 +49,7 @@ import java.util.concurrent.Executors; import java.util.concurrent.Semaphore; +import static org.ehcache.config.Eviction.noAdvice; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.hamcrest.MatcherAssert.assertThat; @@ -95,7 +96,7 @@ public String apply(String mappedKey) { public void testFaultsDoNotGetToEvictionAdvisor() throws StoreAccessException { final Semaphore semaphore = new Semaphore(0); - final OnHeapStoreForTests store = newStore(SystemTimeSource.INSTANCE, Eviction.noAdvice()); + final OnHeapStoreForTests store = newStore(SystemTimeSource.INSTANCE, noAdvice()); ExecutorService executor = Executors.newCachedThreadPool(); try { @@ -129,7 +130,7 @@ public String value() { public void testEvictionCandidateLimits() throws Exception { TestTimeSource timeSource = new TestTimeSource(); StoreConfigurationImpl configuration = new StoreConfigurationImpl( - String.class, String.class, Eviction.noAdvice(), + String.class, String.class, noAdvice(), getClass().getClassLoader(), Expirations.noExpiration(), heap(1).build(), 1, null, null); TestStoreEventDispatcher eventDispatcher = new TestStoreEventDispatcher(); final String firstKey = "daFirst"; From 2f0095a9996703c5d73a4719bea459e245d0cbba Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Thu, 8 Sep 2016 14:32:57 +0200 Subject: [PATCH 015/218] :shirt: #1430 Clean up generics issue in BaseCacheConfiguration When we changed the builder to take the exact key and value type on first step, some of the wildcards were left in but are really not needed. --- .../ehcache/core/config/BaseCacheConfiguration.java | 10 +++++----- .../config/builders/CacheConfigurationBuilder.java | 6 +++--- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/core/src/main/java/org/ehcache/core/config/BaseCacheConfiguration.java b/core/src/main/java/org/ehcache/core/config/BaseCacheConfiguration.java index 7cb77a5f33..63c66cc299 100644 --- a/core/src/main/java/org/ehcache/core/config/BaseCacheConfiguration.java +++ b/core/src/main/java/org/ehcache/core/config/BaseCacheConfiguration.java @@ -32,8 +32,8 @@ */ public class BaseCacheConfiguration implements CacheConfiguration { - private final Class keyType; - private final Class valueType; + private final Class keyType; + private final Class valueType; private final EvictionAdvisor evictionAdvisor; private final Collection> serviceConfigurations; private final ClassLoader classLoader; @@ -51,7 +51,7 @@ public class BaseCacheConfiguration implements CacheConfiguration { * @param resourcePools the resource pools * @param serviceConfigurations the service configurations */ - public BaseCacheConfiguration(Class keyType, Class valueType, + public BaseCacheConfiguration(Class keyType, Class valueType, EvictionAdvisor evictionAdvisor, ClassLoader classLoader, Expiry expiry, ResourcePools resourcePools, ServiceConfiguration... serviceConfigurations) { @@ -81,7 +81,7 @@ public Collection> getServiceConfigurations() { */ @Override public Class getKeyType() { - return (Class) keyType; + return keyType; } /** @@ -89,7 +89,7 @@ public Class getKeyType() { */ @Override public Class getValueType() { - return (Class) valueType; + return valueType; } /** diff --git a/impl/src/main/java/org/ehcache/config/builders/CacheConfigurationBuilder.java b/impl/src/main/java/org/ehcache/config/builders/CacheConfigurationBuilder.java index 442c2e7d91..9c80cbc40a 100644 --- a/impl/src/main/java/org/ehcache/config/builders/CacheConfigurationBuilder.java +++ b/impl/src/main/java/org/ehcache/config/builders/CacheConfigurationBuilder.java @@ -63,8 +63,8 @@ public class CacheConfigurationBuilder implements Builder evictionAdvisor; private ResourcePools resourcePools; - private Class keyType; - private Class valueType; + private Class keyType; + private Class valueType; /** * Creates a new instance ready to produce a {@link CacheConfiguration} with key type {@code } and with value type @@ -102,7 +102,7 @@ private CacheConfigurationBuilder(Class keyType, Class valueType, Resource this.resourcePools = resourcePools; } - private CacheConfigurationBuilder(CacheConfigurationBuilder other) { + private CacheConfigurationBuilder(CacheConfigurationBuilder other) { this.keyType = other.keyType; this.valueType = other.valueType; this.expiry = other.expiry; From 0f9fd72f882e2c3c172e13d880a7026b469cca4d Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Thu, 8 Sep 2016 22:02:57 +0200 Subject: [PATCH 016/218] :shirt: #1430 Fix CacheEventListener generics Since the configuration takes types with super bound, the accepted CacheEvent in the method signature must take extends bounds. --- .../ehcache/jsr107/Eh107CacheEntryEvent.java | 10 +++++----- .../ehcache/jsr107/EventListenerAdaptors.java | 8 ++++---- .../pany/ehcache/TestCacheEventListener.java | 4 ++-- .../org/ehcache/event/CacheEventListener.java | 2 +- .../client/internal/store/ClusteredStore.java | 1 + ...ortedCombinationsWIthClusteredCacheTest.java | 2 +- .../core/EhcacheRuntimeConfiguration.java | 6 +++--- .../internal/events/EventListenerWrapper.java | 10 +++++----- .../ehcache/impl/events/CacheEventAdapter.java | 2 +- .../impl/events/CacheEventDispatcherImpl.java | 17 +++++++++-------- .../ehcache/impl/events/EventDispatchTask.java | 6 +++--- .../org/ehcache/docs/plugs/ListenerObject.java | 2 +- .../DefaultCacheEventListenerProviderTest.java | 4 ++-- .../integration/EventNotificationTest.java | 4 ++-- .../ehcache/integration/ExpiryEventsTest.java | 2 +- .../integration/TestCacheEventListener.java | 6 +++--- .../TestSecondCacheEventListener.java | 6 +++--- 17 files changed, 47 insertions(+), 45 deletions(-) diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107CacheEntryEvent.java b/107/src/main/java/org/ehcache/jsr107/Eh107CacheEntryEvent.java index 98fe478d2d..85d1725031 100644 --- a/107/src/main/java/org/ehcache/jsr107/Eh107CacheEntryEvent.java +++ b/107/src/main/java/org/ehcache/jsr107/Eh107CacheEntryEvent.java @@ -27,11 +27,11 @@ abstract class Eh107CacheEntryEvent extends CacheEntryEvent { private static final long serialVersionUID = 8460535666272347345L; - private final CacheEvent ehEvent; + private final CacheEvent ehEvent; private final boolean hasOldValue; - Eh107CacheEntryEvent(Cache source, EventType eventType, CacheEvent ehEvent, + Eh107CacheEntryEvent(Cache source, EventType eventType, CacheEvent ehEvent, boolean hasOldValue) { super(source, eventType); this.ehEvent = ehEvent; @@ -63,7 +63,7 @@ public boolean isOldValueAvailable() { static class NormalEvent extends Eh107CacheEntryEvent { - public NormalEvent(Cache source, EventType eventType, CacheEvent ehEvent, boolean hasOldValue) { + public NormalEvent(Cache source, EventType eventType, CacheEvent ehEvent, boolean hasOldValue) { super(source, eventType, ehEvent, hasOldValue); } @@ -75,7 +75,7 @@ public V getValue() { static class RemovingEvent extends Eh107CacheEntryEvent { - public RemovingEvent(Cache source, EventType eventType, CacheEvent ehEvent, boolean hasOldValue) { + public RemovingEvent(Cache source, EventType eventType, CacheEvent ehEvent, boolean hasOldValue) { super(source, eventType, ehEvent, hasOldValue); } @@ -85,4 +85,4 @@ public V getValue() { } } -} \ No newline at end of file +} diff --git a/107/src/main/java/org/ehcache/jsr107/EventListenerAdaptors.java b/107/src/main/java/org/ehcache/jsr107/EventListenerAdaptors.java index 770904798d..d3b80baec8 100644 --- a/107/src/main/java/org/ehcache/jsr107/EventListenerAdaptors.java +++ b/107/src/main/java/org/ehcache/jsr107/EventListenerAdaptors.java @@ -95,7 +95,7 @@ org.ehcache.event.EventType getEhcacheEventType() { @SuppressWarnings("unchecked") @Override - public void onEvent(org.ehcache.event.CacheEvent ehEvent) { + public void onEvent(org.ehcache.event.CacheEvent ehEvent) { Eh107CacheEntryEvent event = new Eh107CacheEntryEvent.NormalEvent(source, EventType.UPDATED, ehEvent, requestsOld); if (filter.evaluate(event)) { Set events = Collections.singleton(event); @@ -121,7 +121,7 @@ org.ehcache.event.EventType getEhcacheEventType() { @SuppressWarnings("unchecked") @Override - public void onEvent(org.ehcache.event.CacheEvent ehEvent) { + public void onEvent(org.ehcache.event.CacheEvent ehEvent) { Eh107CacheEntryEvent event = new Eh107CacheEntryEvent.RemovingEvent(source, EventType.REMOVED, ehEvent, requestsOld); if (filter.evaluate(event)) { Set events = Collections.singleton(event); @@ -147,7 +147,7 @@ org.ehcache.event.EventType getEhcacheEventType() { @SuppressWarnings("unchecked") @Override - public void onEvent(org.ehcache.event.CacheEvent ehEvent) { + public void onEvent(org.ehcache.event.CacheEvent ehEvent) { Eh107CacheEntryEvent event = new Eh107CacheEntryEvent.RemovingEvent(source, EventType.EXPIRED, ehEvent, requestsOld); if (filter.evaluate(event)) { Set events = Collections.singleton(event); @@ -173,7 +173,7 @@ org.ehcache.event.EventType getEhcacheEventType() { @SuppressWarnings("unchecked") @Override - public void onEvent(org.ehcache.event.CacheEvent ehEvent) { + public void onEvent(org.ehcache.event.CacheEvent ehEvent) { Eh107CacheEntryEvent event = new Eh107CacheEntryEvent.NormalEvent(source, EventType.CREATED, ehEvent, requestsOld); if (filter.evaluate(event)) { Set events = Collections.singleton(event); diff --git a/107/src/test/java/com/pany/ehcache/TestCacheEventListener.java b/107/src/test/java/com/pany/ehcache/TestCacheEventListener.java index c64d3db509..ca1e85e784 100644 --- a/107/src/test/java/com/pany/ehcache/TestCacheEventListener.java +++ b/107/src/test/java/com/pany/ehcache/TestCacheEventListener.java @@ -26,10 +26,10 @@ * TestCacheEventListener */ public class TestCacheEventListener implements CacheEventListener { - public static List> seen = new ArrayList>(); + public static List> seen = new ArrayList>(); @Override - public void onEvent(CacheEvent event) { + public void onEvent(CacheEvent event) { seen.add(event); } } diff --git a/api/src/main/java/org/ehcache/event/CacheEventListener.java b/api/src/main/java/org/ehcache/event/CacheEventListener.java index f12f1658f7..4cc8a38475 100644 --- a/api/src/main/java/org/ehcache/event/CacheEventListener.java +++ b/api/src/main/java/org/ehcache/event/CacheEventListener.java @@ -38,6 +38,6 @@ public interface CacheEventListener { * * @param event the actual {@code CacheEvent} */ - void onEvent(CacheEvent event); + void onEvent(CacheEvent event); } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java index a7fc5fb5e5..7e6de4e150 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java @@ -59,6 +59,7 @@ import org.ehcache.spi.service.ServiceConfiguration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.terracotta.context.ContextManager; import org.terracotta.context.annotations.ContextAttribute; import org.terracotta.statistics.StatisticsManager; import org.terracotta.statistics.observer.OperationObserver; diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/UnSupportedCombinationsWIthClusteredCacheTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/UnSupportedCombinationsWIthClusteredCacheTest.java index 2af0633f05..4a0f36fcfe 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/UnSupportedCombinationsWIthClusteredCacheTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/UnSupportedCombinationsWIthClusteredCacheTest.java @@ -184,7 +184,7 @@ public void deleteAll(Iterable keys) throws BulkCacheWritingExce private static class TestEventListener implements CacheEventListener { @Override - public void onEvent(CacheEvent event) { + public void onEvent(CacheEvent event) { } } diff --git a/core/src/main/java/org/ehcache/core/EhcacheRuntimeConfiguration.java b/core/src/main/java/org/ehcache/core/EhcacheRuntimeConfiguration.java index 7622339cc3..3786523c61 100644 --- a/core/src/main/java/org/ehcache/core/EhcacheRuntimeConfiguration.java +++ b/core/src/main/java/org/ehcache/core/EhcacheRuntimeConfiguration.java @@ -126,13 +126,13 @@ public synchronized void deregisterCacheEventListener(CacheEventListener listener, EventOrdering ordering, EventFiring firing, Set forEventTypes) { - EventListenerWrapper listenerWrapper = new EventListenerWrapper(listener, firing, ordering, EnumSet.copyOf(forEventTypes)); + EventListenerWrapper listenerWrapper = new EventListenerWrapper(listener, firing, ordering, EnumSet.copyOf(forEventTypes)); fireCacheConfigurationChange(CacheConfigurationProperty.ADD_LISTENER, listenerWrapper, listenerWrapper); } @Override public void registerCacheEventListener(CacheEventListener listener, EventOrdering ordering, EventFiring firing, EventType eventType, EventType... eventTypes) { - EventListenerWrapper listenerWrapper = new EventListenerWrapper(listener, firing, ordering, EnumSet.of(eventType, eventTypes)); + EventListenerWrapper listenerWrapper = new EventListenerWrapper(listener, firing, ordering, EnumSet.of(eventType, eventTypes)); fireCacheConfigurationChange(CacheConfigurationProperty.ADD_LISTENER, listenerWrapper, listenerWrapper); } @@ -185,4 +185,4 @@ public String readableString() { "expiry: " + ((expiry != null) ? expiry.getClass().getSimpleName() : "") + "\n" + "resourcePools: " + "\n " + ((resourcePools instanceof HumanReadable) ? ((HumanReadable)resourcePools).readableString() : "").replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/ehcache/core/internal/events/EventListenerWrapper.java b/core/src/main/java/org/ehcache/core/internal/events/EventListenerWrapper.java index 13ee9968f6..e8474f8e1f 100644 --- a/core/src/main/java/org/ehcache/core/internal/events/EventListenerWrapper.java +++ b/core/src/main/java/org/ehcache/core/internal/events/EventListenerWrapper.java @@ -27,20 +27,20 @@ /** * Internal wrapper for {@link CacheEventListener} and their configuration. */ -public final class EventListenerWrapper implements CacheEventListener { - private final CacheEventListener listener; +public final class EventListenerWrapper implements CacheEventListener { + private final CacheEventListener listener; private final EventFiring firing; private final EventOrdering ordering; private final EnumSet forEvents; - public EventListenerWrapper(CacheEventListener listener) { + public EventListenerWrapper(CacheEventListener listener) { this.listener = listener; this.firing = null; this.ordering = null; this.forEvents = null; } - public EventListenerWrapper(CacheEventListener listener, final EventFiring firing, final EventOrdering ordering, + public EventListenerWrapper(CacheEventListener listener, final EventFiring firing, final EventOrdering ordering, final EnumSet forEvents) { if (listener == null) { throw new NullPointerException("listener cannot be null"); @@ -78,7 +78,7 @@ public boolean equals(Object other) { } @Override - public void onEvent(CacheEvent event) { + public void onEvent(CacheEvent event) { listener.onEvent(event); } diff --git a/impl/src/main/java/org/ehcache/impl/events/CacheEventAdapter.java b/impl/src/main/java/org/ehcache/impl/events/CacheEventAdapter.java index 7a6246c19e..ba6c72280d 100644 --- a/impl/src/main/java/org/ehcache/impl/events/CacheEventAdapter.java +++ b/impl/src/main/java/org/ehcache/impl/events/CacheEventAdapter.java @@ -32,7 +32,7 @@ public abstract class CacheEventAdapter implements CacheEventListener event) { + public final void onEvent(CacheEvent event) { switch (event.getType()) { case CREATED: onCreation(event.getKey(), event.getNewValue()); diff --git a/impl/src/main/java/org/ehcache/impl/events/CacheEventDispatcherImpl.java b/impl/src/main/java/org/ehcache/impl/events/CacheEventDispatcherImpl.java index c1c6a8c66f..20de35f4b1 100644 --- a/impl/src/main/java/org/ehcache/impl/events/CacheEventDispatcherImpl.java +++ b/impl/src/main/java/org/ehcache/impl/events/CacheEventDispatcherImpl.java @@ -60,8 +60,8 @@ public class CacheEventDispatcherImpl implements CacheEventDispatcher syncListenersList = new CopyOnWriteArrayList(); - private final List aSyncListenersList = new CopyOnWriteArrayList(); + private final List> syncListenersList = new CopyOnWriteArrayList>(); + private final List> aSyncListenersList = new CopyOnWriteArrayList>(); private final StoreEventListener eventListener = new StoreListener(); private volatile Cache listenerSource; @@ -85,7 +85,7 @@ public CacheEventDispatcherImpl(ExecutorService unOrderedExecutor, ExecutorServi @Override public void registerCacheEventListener(CacheEventListener listener, EventOrdering ordering, EventFiring firing, EnumSet forEventTypes) { - EventListenerWrapper wrapper = new EventListenerWrapper(listener, firing, ordering, forEventTypes); + EventListenerWrapper wrapper = new EventListenerWrapper(listener, firing, ordering, forEventTypes); registerCacheEventListener(wrapper); } @@ -96,7 +96,7 @@ public void registerCacheEventListener(CacheEventListener * * @param wrapper the listener wrapper to register */ - private synchronized void registerCacheEventListener(EventListenerWrapper wrapper) { + private synchronized void registerCacheEventListener(EventListenerWrapper wrapper) { if(aSyncListenersList.contains(wrapper) || syncListenersList.contains(wrapper)) { throw new IllegalStateException("Cache Event Listener already registered: " + wrapper.getListener()); } @@ -126,7 +126,7 @@ private synchronized void registerCacheEventListener(EventListenerWrapper wrappe */ @Override public void deregisterCacheEventListener(CacheEventListener listener) { - EventListenerWrapper wrapper = new EventListenerWrapper(listener); + EventListenerWrapper wrapper = new EventListenerWrapper(listener); if (!removeWrapperFromList(wrapper, aSyncListenersList)) { if (!removeWrapperFromList(wrapper, syncListenersList)) { @@ -141,7 +141,7 @@ public void deregisterCacheEventListener(CacheEventListener listenersList) { + private synchronized boolean removeWrapperFromList(EventListenerWrapper wrapper, List> listenersList) { int index = listenersList.indexOf(wrapper); if (index != -1) { EventListenerWrapper containedWrapper = listenersList.remove(index); @@ -201,15 +201,16 @@ void onEvent(CacheEvent event) { * {@inheritDoc} */ @Override + @SuppressWarnings("unchecked") public List getConfigurationChangeListeners() { List configurationChangeListenerList = new ArrayList(); configurationChangeListenerList.add(new CacheConfigurationChangeListener() { @Override public void cacheConfigurationChange(final CacheConfigurationChangeEvent event) { if (event.getProperty().equals(CacheConfigurationProperty.ADD_LISTENER)) { - registerCacheEventListener((EventListenerWrapper)event.getNewValue()); + registerCacheEventListener((EventListenerWrapper)event.getNewValue()); } else if (event.getProperty().equals(CacheConfigurationProperty.REMOVE_LISTENER)) { - CacheEventListener oldListener = (CacheEventListener)event.getOldValue(); + CacheEventListener oldListener = (CacheEventListener)event.getOldValue(); deregisterCacheEventListener(oldListener); } } diff --git a/impl/src/main/java/org/ehcache/impl/events/EventDispatchTask.java b/impl/src/main/java/org/ehcache/impl/events/EventDispatchTask.java index 73c4025c38..79897ca453 100644 --- a/impl/src/main/java/org/ehcache/impl/events/EventDispatchTask.java +++ b/impl/src/main/java/org/ehcache/impl/events/EventDispatchTask.java @@ -24,9 +24,9 @@ class EventDispatchTask implements Runnable { private static final Logger LOGGER = LoggerFactory.getLogger(EventDispatchTask.class); private final CacheEvent cacheEvent; - private final Iterable listenerWrappers; + private final Iterable> listenerWrappers; - EventDispatchTask(CacheEvent cacheEvent, Iterable listener) { + EventDispatchTask(CacheEvent cacheEvent, Iterable> listener) { if (cacheEvent == null) { throw new NullPointerException("cache event cannot be null"); } @@ -39,7 +39,7 @@ class EventDispatchTask implements Runnable { @Override public void run() { - for(EventListenerWrapper listenerWrapper : listenerWrappers) { + for(EventListenerWrapper listenerWrapper : listenerWrappers) { if (listenerWrapper.isForEventType(cacheEvent.getType())) { try { listenerWrapper.onEvent(cacheEvent); diff --git a/impl/src/test/java/org/ehcache/docs/plugs/ListenerObject.java b/impl/src/test/java/org/ehcache/docs/plugs/ListenerObject.java index 4e11805814..87b088be32 100644 --- a/impl/src/test/java/org/ehcache/docs/plugs/ListenerObject.java +++ b/impl/src/test/java/org/ehcache/docs/plugs/ListenerObject.java @@ -28,7 +28,7 @@ public class ListenerObject implements CacheEventListener { private int evicted; @Override - public void onEvent(CacheEvent event) { + public void onEvent(CacheEvent event) { Logger logger = LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "-" + "GettingStarted"); logger.info(event.getType().toString()); if(event.getType() == EventType.EVICTED){ diff --git a/impl/src/test/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProviderTest.java index 7b99a844ad..5eff70c206 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProviderTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/spi/event/DefaultCacheEventListenerProviderTest.java @@ -86,8 +86,8 @@ public String toString() { }; @Override - public void onEvent(CacheEvent event) { + public void onEvent(CacheEvent event) { //noop } } -} \ No newline at end of file +} diff --git a/integration-test/src/test/java/org/ehcache/integration/EventNotificationTest.java b/integration-test/src/test/java/org/ehcache/integration/EventNotificationTest.java index a5a7b53761..752034b871 100644 --- a/integration-test/src/test/java/org/ehcache/integration/EventNotificationTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/EventNotificationTest.java @@ -404,7 +404,7 @@ public static class Listener implements CacheEventListener { private HashMap eventTypeHashMap = new HashMap(); @Override - public void onEvent(CacheEvent event) { + public void onEvent(CacheEvent event) { Logger logger = LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "-" + "EventNotificationTest"); logger.info(event.getType().toString()); eventTypeHashMap.put(event.getType(), eventCounter.get()); @@ -440,7 +440,7 @@ private void resetLatchCount(int operations) { } @Override - public void onEvent(final CacheEvent event) { + public void onEvent(final CacheEvent event) { Logger logger = LoggerFactory.getLogger(EventNotificationTest.class + "-" + "EventNotificationTest"); logger.info(event.getType().toString()); if(event.getType() == EventType.EVICTED){ diff --git a/integration-test/src/test/java/org/ehcache/integration/ExpiryEventsTest.java b/integration-test/src/test/java/org/ehcache/integration/ExpiryEventsTest.java index db67960437..38e358f145 100644 --- a/integration-test/src/test/java/org/ehcache/integration/ExpiryEventsTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/ExpiryEventsTest.java @@ -179,7 +179,7 @@ private void performActualTest(Cache testCache) { testCache.getRuntimeConfiguration().registerCacheEventListener(new CacheEventListener() { @Override - public void onEvent(CacheEvent event) { + public void onEvent(CacheEvent event) { expiredKeys.add(event.getKey()); } }, EventOrdering.ORDERED, EventFiring.SYNCHRONOUS, EnumSet.of(EventType.EXPIRED)); diff --git a/xml/src/test/java/com/pany/ehcache/integration/TestCacheEventListener.java b/xml/src/test/java/com/pany/ehcache/integration/TestCacheEventListener.java index 0ec20efd59..cab91b7183 100644 --- a/xml/src/test/java/com/pany/ehcache/integration/TestCacheEventListener.java +++ b/xml/src/test/java/com/pany/ehcache/integration/TestCacheEventListener.java @@ -24,9 +24,9 @@ */ public class TestCacheEventListener implements CacheEventListener { - public static CacheEvent FIRED_EVENT; + public static CacheEvent FIRED_EVENT; @Override - public void onEvent(CacheEvent event) { + public void onEvent(CacheEvent event) { FIRED_EVENT = event; } -} \ No newline at end of file +} diff --git a/xml/src/test/java/com/pany/ehcache/integration/TestSecondCacheEventListener.java b/xml/src/test/java/com/pany/ehcache/integration/TestSecondCacheEventListener.java index 4b4651cbcd..a041328b4b 100644 --- a/xml/src/test/java/com/pany/ehcache/integration/TestSecondCacheEventListener.java +++ b/xml/src/test/java/com/pany/ehcache/integration/TestSecondCacheEventListener.java @@ -24,9 +24,9 @@ */ public class TestSecondCacheEventListener implements CacheEventListener { - public static CacheEvent SECOND_LISTENER_FIRED_EVENT; + public static CacheEvent SECOND_LISTENER_FIRED_EVENT; @Override - public void onEvent(CacheEvent event) { + public void onEvent(CacheEvent event) { SECOND_LISTENER_FIRED_EVENT = event; } -} \ No newline at end of file +} From ef507c5eb8d0d592dad20fb6f2fd1e2fe43a5f19 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Mon, 12 Sep 2016 08:51:43 +0200 Subject: [PATCH 017/218] :shirt: #1430 Various generic fixes around Store implementations --- .../client/internal/store/ClusteredStore.java | 4 ++-- .../impl/internal/store/disk/OffHeapDiskStore.java | 10 ++++------ .../ehcache/impl/internal/store/heap/OnHeapStore.java | 3 ++- .../impl/internal/store/offheap/OffHeapStore.java | 8 +++++--- 4 files changed, 13 insertions(+), 12 deletions(-) diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java index 7e6de4e150..e6eaa1c9a0 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java @@ -410,7 +410,7 @@ public Map> bulkCompute(final Set keys, final Fun throws StoreAccessException { Map> valueHolderMap = new HashMap>(); if(remappingFunction instanceof Ehcache.PutAllFunction) { - Ehcache.PutAllFunction putAllFunction = (Ehcache.PutAllFunction)remappingFunction; + Ehcache.PutAllFunction putAllFunction = (Ehcache.PutAllFunction)remappingFunction; Map entriesToRemap = putAllFunction.getEntriesToRemap(); for(Map.Entry entry: entriesToRemap.entrySet()) { PutStatus putStatus = silentPut(entry.getKey(), entry.getValue()); @@ -420,7 +420,7 @@ public Map> bulkCompute(final Set keys, final Fun } } } else if(remappingFunction instanceof Ehcache.RemoveAllFunction) { - Ehcache.RemoveAllFunction removeAllFunction = (Ehcache.RemoveAllFunction)remappingFunction; + Ehcache.RemoveAllFunction removeAllFunction = (Ehcache.RemoveAllFunction)remappingFunction; for (K key : keys) { boolean removed = silentRemove(key); if(removed) { diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java index 3ee3c0befe..8f9ad0751a 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java @@ -226,10 +226,7 @@ private EhcachePersistentConcurrentOffHeapClockCache> r 64, evictionAdvisor, mapEvictionListener, false); - EhcachePersistentConcurrentOffHeapClockCache m = new EhcachePersistentConcurrentOffHeapClockCache>(input, evictionAdvisor, factory); - - - + EhcachePersistentConcurrentOffHeapClockCache> m = new EhcachePersistentConcurrentOffHeapClockCache>(input, evictionAdvisor, factory); m.bootstrap(input); return m; @@ -375,7 +372,7 @@ public void releaseStore(Store resource) { throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); } try { - close((OffHeapDiskStore)resource); + close((OffHeapDiskStore)resource); } catch (IOException e) { throw new RuntimeException(e); } @@ -404,7 +401,7 @@ public void initStore(Store resource) { if (identifier == null) { throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); } - OffHeapDiskStore diskStore = (OffHeapDiskStore) resource; + OffHeapDiskStore diskStore = (OffHeapDiskStore) resource; Serializer keySerializer = diskStore.keySerializer; if (keySerializer instanceof StatefulSerializer) { @@ -470,6 +467,7 @@ public void initAuthoritativeTier(AuthoritativeTier resource) { * This is kind of a hack, but it's safe to use this if the regular portability * is stateless. */ + @SuppressWarnings("unchecked") public static PersistentPortability persistent(final Portability normal) { final Class normalKlazz = normal.getClass(); Class[] delegateInterfaces = normalKlazz.getInterfaces(); diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java index d1f42de145..399174168c 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java @@ -162,6 +162,7 @@ public void onInvalidation(Object key, ValueHolder valueHolder) { private final Expiry expiry; private final TimeSource timeSource; private final StoreEventDispatcher storeEventDispatcher; + @SuppressWarnings("unchecked") private volatile InvalidationListener invalidationListener = NULL_INVALIDATION_LISTENER; private CacheConfigurationChangeListener cacheConfigurationChangeListener = new CacheConfigurationChangeListener() { @@ -298,7 +299,7 @@ private OnHeapValueHolder internalGet(final K key, final boolean updateAccess } } - private OnHeapValueHolder getQuiet(final K key) throws StoreAccessException { + private OnHeapValueHolder getQuiet(final K key) throws StoreAccessException { try { OnHeapValueHolder mapping = map.get(key); if (mapping == null) { diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java index 4bc5f4b1a0..eafa055766 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java @@ -171,7 +171,8 @@ public void releaseStore(Store resource) { close((OffHeapStore)resource); } - static void close(final OffHeapStore resource) {EhcacheConcurrentOffHeapClockCache> localMap = resource.map; + static void close(final OffHeapStore resource) { + EhcacheConcurrentOffHeapClockCache localMap = resource.map; if (localMap != null) { resource.map = null; localMap.destroy(); @@ -185,7 +186,7 @@ public void initStore(Store resource) { throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); } - OffHeapStore offHeapStore = (OffHeapStore) resource; + OffHeapStore offHeapStore = (OffHeapStore) resource; Serializer keySerializer = offHeapStore.keySerializer; if (keySerializer instanceof StatefulSerializer) { ((StatefulSerializer)keySerializer).init(new TransientStateRepository()); @@ -195,7 +196,7 @@ public void initStore(Store resource) { ((StatefulSerializer)valueSerializer).init(new TransientStateRepository()); } - init((OffHeapStore)resource); + init(offHeapStore); } static void init(final OffHeapStore resource) { @@ -234,6 +235,7 @@ public LowerCachingTier createCachingTier(Configuration store } @Override + @SuppressWarnings("unchecked") public void releaseCachingTier(LowerCachingTier resource) { if (!createdStores.contains(resource)) { throw new IllegalArgumentException("Given caching tier is not managed by this provider : " + resource); From 5e13921a84abc59a3d8864e68f9f9860c1c80fdf Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Mon, 12 Sep 2016 18:29:49 +0200 Subject: [PATCH 018/218] :shirt: #1430 Static method trick for SerializingCopier This trick allows usage of the class as a Class> in configuration. --- .../java/org/ehcache/jsr107/ConfigurationMerger.java | 8 ++++---- .../config/builders/CacheConfigurationBuilder.java | 4 ++-- .../config/builders/UserManagedCacheBuilder.java | 4 ++-- .../org/ehcache/impl/copy/SerializingCopier.java | 12 ++++++++++++ .../config/builders/CacheManagerBuilderTest.java | 2 +- .../config/serializer/SerializerCountingTest.java | 12 ++++++------ .../internal/spi/copy/DefaultCopyProviderTest.java | 6 +++--- .../internal/store/heap/OnHeapStoreByValueTest.java | 4 ++-- .../org/ehcache/integration/CacheCopierTest.java | 2 +- .../org/ehcache/integration/ExpiryEventsTest.java | 2 +- .../org/ehcache/integration/SerializersTest.java | 2 +- .../test/java/org/ehcache/osgi/SimpleOsgiTest.java | 2 +- .../ehcache/transactions/xa/internal/XAStore.java | 4 ++-- 13 files changed, 38 insertions(+), 26 deletions(-) diff --git a/107/src/main/java/org/ehcache/jsr107/ConfigurationMerger.java b/107/src/main/java/org/ehcache/jsr107/ConfigurationMerger.java index eef5c3f319..b7d941887b 100644 --- a/107/src/main/java/org/ehcache/jsr107/ConfigurationMerger.java +++ b/107/src/main/java/org/ehcache/jsr107/ConfigurationMerger.java @@ -165,12 +165,12 @@ private CacheConfigurationBuilder handleStoreByValue(Eh107CompleteC if (defaults.containsKey(jsr107Configuration.getKeyType())) { matchingDefault = true; } else { - builder = builder.add(new DefaultCopierConfiguration((Class) SerializingCopier.class, DefaultCopierConfiguration.Type.KEY)); + builder = builder.add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)); } if (defaults.containsKey(jsr107Configuration.getValueType())) { matchingDefault = true; } else { - builder = builder.add(new DefaultCopierConfiguration((Class) SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE)); + builder = builder.add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)); } if (matchingDefault) { LOG.info("CacheManager level copier configuration overwriting JSR-107 by-value semantics for cache {}", cacheName); @@ -198,13 +198,13 @@ private static CacheConfigurationBuilder addDefaultCopiers(CacheCon if (immutableTypes.contains(keyType)) { builder = builder.add(new DefaultCopierConfiguration((Class)Eh107IdentityCopier.class, DefaultCopierConfiguration.Type.KEY)); } else { - builder = builder.add(new DefaultCopierConfiguration((Class)SerializingCopier.class, DefaultCopierConfiguration.Type.KEY)); + builder = builder.add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)); } if (immutableTypes.contains(valueType)) { builder = builder.add(new DefaultCopierConfiguration((Class)Eh107IdentityCopier.class, DefaultCopierConfiguration.Type.VALUE)); } else { - builder = builder.add(new DefaultCopierConfiguration((Class)SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE)); + builder = builder.add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)); } return builder; } diff --git a/impl/src/main/java/org/ehcache/config/builders/CacheConfigurationBuilder.java b/impl/src/main/java/org/ehcache/config/builders/CacheConfigurationBuilder.java index 9c80cbc40a..df7317580f 100644 --- a/impl/src/main/java/org/ehcache/config/builders/CacheConfigurationBuilder.java +++ b/impl/src/main/java/org/ehcache/config/builders/CacheConfigurationBuilder.java @@ -342,7 +342,7 @@ public CacheConfigurationBuilder withLoaderWriter(Class withKeySerializingCopier() { CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder(this); removeExistingCopierConfigFor(DefaultCopierConfiguration.Type.KEY, otherBuilder); - otherBuilder.serviceConfigurations.add(new DefaultCopierConfiguration((Class) SerializingCopier.class, DefaultCopierConfiguration.Type.KEY)); + otherBuilder.serviceConfigurations.add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)); return otherBuilder; } @@ -356,7 +356,7 @@ public CacheConfigurationBuilder withKeySerializingCopier() { public CacheConfigurationBuilder withValueSerializingCopier() { CacheConfigurationBuilder otherBuilder = new CacheConfigurationBuilder(this); removeExistingCopierConfigFor(DefaultCopierConfiguration.Type.VALUE, otherBuilder); - otherBuilder.serviceConfigurations.add(new DefaultCopierConfiguration((Class) SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE)); + otherBuilder.serviceConfigurations.add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)); return otherBuilder; } diff --git a/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java b/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java index 98625450a0..9b3cbe411f 100644 --- a/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java +++ b/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java @@ -191,12 +191,12 @@ T build(ServiceLocator serviceLocator) throws IllegalStateException { if (keyCopier != null) { serviceConfigsList.add(new DefaultCopierConfiguration(keyCopier, DefaultCopierConfiguration.Type.KEY)); } else if (useKeySerializingCopier) { - serviceConfigsList.add(new DefaultCopierConfiguration((Class) SerializingCopier.class, DefaultCopierConfiguration.Type.KEY)); + serviceConfigsList.add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)); } if (valueCopier != null) { serviceConfigsList.add(new DefaultCopierConfiguration(valueCopier, DefaultCopierConfiguration.Type.VALUE)); } else if (useValueSerializingCopier) { - serviceConfigsList.add(new DefaultCopierConfiguration((Class) SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE)); + serviceConfigsList.add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)); } CacheConfiguration cacheConfig = new BaseCacheConfiguration(keyType, valueType, evictionAdvisor, diff --git a/impl/src/main/java/org/ehcache/impl/copy/SerializingCopier.java b/impl/src/main/java/org/ehcache/impl/copy/SerializingCopier.java index 2cf92db17d..8864a4177d 100644 --- a/impl/src/main/java/org/ehcache/impl/copy/SerializingCopier.java +++ b/impl/src/main/java/org/ehcache/impl/copy/SerializingCopier.java @@ -16,6 +16,7 @@ package org.ehcache.impl.copy; +import org.ehcache.spi.copy.Copier; import org.ehcache.spi.serialization.SerializerException; import org.ehcache.spi.serialization.Serializer; @@ -26,6 +27,17 @@ public final class SerializingCopier extends ReadWriteCopier { private final Serializer serializer; + /** + * Convenience method allowing to represent this copier's class as the expected type in configuration. + * + * @param The type to work on + * @return the proper type + */ + @SuppressWarnings("unchecked") + public static Class> asCopierClass() { + return (Class) SerializingCopier.class; + } + /** * Creates a new copier that will using the provided {@link Serializer}. * diff --git a/impl/src/test/java/org/ehcache/config/builders/CacheManagerBuilderTest.java b/impl/src/test/java/org/ehcache/config/builders/CacheManagerBuilderTest.java index 9db60743bb..6c6fdc572d 100644 --- a/impl/src/test/java/org/ehcache/config/builders/CacheManagerBuilderTest.java +++ b/impl/src/test/java/org/ehcache/config/builders/CacheManagerBuilderTest.java @@ -58,7 +58,7 @@ public CacheManagerBuilder builder(final CacheManagerBui public void testCanOverrideCopierInConfig() { CacheManagerBuilder managerBuilder = newCacheManagerBuilder() .withCopier(Long.class, (Class) IdentityCopier.class); - assertNotNull(managerBuilder.withCopier(Long.class, (Class) SerializingCopier.class)); + assertNotNull(managerBuilder.withCopier(Long.class, SerializingCopier.asCopierClass())); } @Test diff --git a/impl/src/test/java/org/ehcache/impl/config/serializer/SerializerCountingTest.java b/impl/src/test/java/org/ehcache/impl/config/serializer/SerializerCountingTest.java index 7c19f07aa2..023778f1ec 100644 --- a/impl/src/test/java/org/ehcache/impl/config/serializer/SerializerCountingTest.java +++ b/impl/src/test/java/org/ehcache/impl/config/serializer/SerializerCountingTest.java @@ -79,8 +79,8 @@ public void tearDown() { public void testOnHeapPutGet() { Cache cache = cacheManager.createCache("onHeap", newCacheConfigurationBuilder(Long.class, String.class, heap(10)) - .add(new DefaultCopierConfiguration(SerializingCopier.class, DefaultCopierConfiguration.Type.KEY)) - .add(new DefaultCopierConfiguration(SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE)) + .add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)) + .add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)) .build()); cache.put(42L, "TheAnswer!"); @@ -121,8 +121,8 @@ public void testOffHeapPutGet() { public void testOffHeapOnHeapCopyPutGet() { Cache cache = cacheManager.createCache("offHeap", newCacheConfigurationBuilder(Long.class, String.class, newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).offheap(10, MemoryUnit.MB)) - .add(new DefaultCopierConfiguration(SerializingCopier.class, DefaultCopierConfiguration.Type.KEY)) - .add(new DefaultCopierConfiguration(SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE)) + .add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)) + .add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)) .build() ); @@ -145,8 +145,8 @@ public void testOffHeapOnHeapCopyPutGet() { public void testDiskOffHeapOnHeapCopyPutGet() { Cache cache = cacheManager.createCache("offHeap", newCacheConfigurationBuilder(Long.class, String.class, newResourcePoolsBuilder().heap(2, EntryUnit.ENTRIES).offheap(10, MemoryUnit.MB).disk(100, MemoryUnit.MB)) - .add(new DefaultCopierConfiguration(SerializingCopier.class, DefaultCopierConfiguration.Type.KEY)) - .add(new DefaultCopierConfiguration(SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE)) + .add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)) + .add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)) .build() ); diff --git a/impl/src/test/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderTest.java index b9cda2869a..8bd018301a 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderTest.java @@ -59,7 +59,7 @@ public void testCreateKeyCopierWithoutConfig() { public void testCreateKeyCopierWithSerializer() { DefaultCopyProvider copyProvider = new DefaultCopyProvider(null); DefaultCopierConfiguration config = new DefaultCopierConfiguration( - (Class)SerializingCopier.class, DefaultCopierConfiguration.Type.KEY); + SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY); assertThat(copyProvider.createKeyCopier(Long.class, mock(Serializer.class), config), instanceOf(SerializingCopier.class)); } @@ -85,7 +85,7 @@ public void testCreateValueCopierWithoutConfig() { public void testCreateValueCopierWithSerializer() { DefaultCopyProvider copyProvider = new DefaultCopyProvider(null); DefaultCopierConfiguration config = new DefaultCopierConfiguration( - (Class)SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE); + SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE); assertThat(copyProvider.createValueCopier(Long.class, mock(Serializer.class), config), instanceOf(SerializingCopier.class)); } @@ -130,4 +130,4 @@ public T copy(final T obj) { return obj; } } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueTest.java index b2a34f938a..d72ecb6723 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueTest.java @@ -160,8 +160,8 @@ public void testStoreByValue() { CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder().build(false); cacheManager.init(); - DefaultCopierConfiguration copierConfiguration = new DefaultCopierConfiguration( - SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE); + DefaultCopierConfiguration copierConfiguration = new DefaultCopierConfiguration( + SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE); final Cache cache1 = cacheManager.createCache("cache1", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(1)) .build()); diff --git a/integration-test/src/test/java/org/ehcache/integration/CacheCopierTest.java b/integration-test/src/test/java/org/ehcache/integration/CacheCopierTest.java index 812079d8ca..d7c56ba0c5 100644 --- a/integration-test/src/test/java/org/ehcache/integration/CacheCopierTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/CacheCopierTest.java @@ -131,7 +131,7 @@ public void testIdentityCopier() throws Exception { @Test public void testSerializingCopier() throws Exception { CacheConfiguration cacheConfiguration = baseConfig - .add(new DefaultCopierConfiguration((Class)SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE)) + .add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)) .add(new DefaultSerializerConfiguration(PersonSerializer.class, DefaultSerializerConfiguration.Type.VALUE)) .build(); diff --git a/integration-test/src/test/java/org/ehcache/integration/ExpiryEventsTest.java b/integration-test/src/test/java/org/ehcache/integration/ExpiryEventsTest.java index 38e358f145..2fddbd6d2d 100644 --- a/integration-test/src/test/java/org/ehcache/integration/ExpiryEventsTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/ExpiryEventsTest.java @@ -64,7 +64,7 @@ public class ExpiryEventsTest { private static final CacheConfigurationBuilder byValueCacheConfigBuilder = byRefCacheConfigBuilder.add(new DefaultCopierConfiguration( - (Class)SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE));; + SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE));; private static final TestTimeSource testTimeSource = new TestTimeSource(); diff --git a/integration-test/src/test/java/org/ehcache/integration/SerializersTest.java b/integration-test/src/test/java/org/ehcache/integration/SerializersTest.java index a8f6d5153b..94d8c97334 100644 --- a/integration-test/src/test/java/org/ehcache/integration/SerializersTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/SerializersTest.java @@ -94,7 +94,7 @@ private void testSerializerWithByValueHeapCache(Serializer serializer) thr newCacheManagerBuilder() .withCache("heapByValueCache", newCacheConfigurationBuilder(Long.class, Person.class, newResourcePoolsBuilder().heap(10)) - .withKeyCopier((Class)SerializingCopier.class) + .withKeyCopier(SerializingCopier.asCopierClass()) .withKeySerializer(serializer) ); cmBuilder.build(true); diff --git a/osgi-test/src/test/java/org/ehcache/osgi/SimpleOsgiTest.java b/osgi-test/src/test/java/org/ehcache/osgi/SimpleOsgiTest.java index d217eb1fc1..d22fce3748 100644 --- a/osgi-test/src/test/java/org/ehcache/osgi/SimpleOsgiTest.java +++ b/osgi-test/src/test/java/org/ehcache/osgi/SimpleOsgiTest.java @@ -74,7 +74,7 @@ public void testEhcache3AsBundle() { public void testEhcache3WithSerializationAndClientClass() { CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() .withCache("myCache", newCacheConfigurationBuilder(Long.class, Person.class, heap(10)) - .add(new DefaultCopierConfiguration((Class) SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE)) + .add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)) .withClassLoader(getClass().getClassLoader()) .build()) .build(true); diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java index ddd1a59824..e2e9ec1486 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java @@ -878,14 +878,14 @@ public Duration getExpiryForUpdate(K key, ValueSupplier> o // force-in a key copier if none is configured if (keyCopierConfig == null) { - underlyingServiceConfigs.add(new DefaultCopierConfiguration((Class) SerializingCopier.class, DefaultCopierConfiguration.Type.KEY)); + underlyingServiceConfigs.add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY)); } else { underlyingServiceConfigs.add(keyCopierConfig); } // force-in a value copier if none is configured, or wrap the configured one in a soft lock copier if (valueCopierConfig == null) { - underlyingServiceConfigs.add(new DefaultCopierConfiguration((Class) SerializingCopier.class, DefaultCopierConfiguration.Type.VALUE)); + underlyingServiceConfigs.add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)); } else { CopyProvider copyProvider = serviceProvider.getService(CopyProvider.class); Copier valueCopier = copyProvider.createValueCopier(storeConfig.getValueType(), storeConfig.getValueSerializer(), valueCopierConfig); From 030e57307555bd1b0b08dcbf3cd1dfb67790172a Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Mon, 12 Sep 2016 18:32:45 +0200 Subject: [PATCH 019/218] :shirt: #1430 Adding suppress warnings where no alternative --- .../impl/config/copy/DefaultCopyProviderConfiguration.java | 3 ++- .../persistence/CacheManagerPersistenceConfiguration.java | 1 + .../config/persistence/UserManagedPersistenceContext.java | 1 + .../impl/internal/classes/ClassInstanceConfiguration.java | 4 +++- .../impl/internal/events/AbstractStoreEventDispatcher.java | 4 +++- 5 files changed, 10 insertions(+), 3 deletions(-) diff --git a/impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopyProviderConfiguration.java b/impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopyProviderConfiguration.java index 8f15d56368..9c0b8fedfa 100644 --- a/impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopyProviderConfiguration.java +++ b/impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopyProviderConfiguration.java @@ -16,6 +16,7 @@ package org.ehcache.impl.config.copy; +import org.ehcache.impl.internal.classes.ClassInstanceConfiguration; import org.ehcache.impl.internal.classes.ClassInstanceProviderConfiguration; import org.ehcache.spi.copy.Copier; import org.ehcache.spi.copy.CopyProvider; @@ -93,7 +94,7 @@ public DefaultCopyProviderConfiguration addCopierFor(Class clazz, Class(copierClass)); return this; } } diff --git a/impl/src/main/java/org/ehcache/impl/config/persistence/CacheManagerPersistenceConfiguration.java b/impl/src/main/java/org/ehcache/impl/config/persistence/CacheManagerPersistenceConfiguration.java index da46c4a831..679938fc46 100644 --- a/impl/src/main/java/org/ehcache/impl/config/persistence/CacheManagerPersistenceConfiguration.java +++ b/impl/src/main/java/org/ehcache/impl/config/persistence/CacheManagerPersistenceConfiguration.java @@ -44,6 +44,7 @@ public CacheManagerPersistenceConfiguration(final File rootDirectory) { * Transforms the builder received in one that returns a {@link PersistentCacheManager}. */ @Override + @SuppressWarnings("unchecked") public CacheManagerBuilder builder(final CacheManagerBuilder other) { return (CacheManagerBuilder)other.using(this); } diff --git a/impl/src/main/java/org/ehcache/impl/config/persistence/UserManagedPersistenceContext.java b/impl/src/main/java/org/ehcache/impl/config/persistence/UserManagedPersistenceContext.java index 6e155c9c25..a0d42a79c2 100644 --- a/impl/src/main/java/org/ehcache/impl/config/persistence/UserManagedPersistenceContext.java +++ b/impl/src/main/java/org/ehcache/impl/config/persistence/UserManagedPersistenceContext.java @@ -46,6 +46,7 @@ public UserManagedPersistenceContext(String identifier, LocalPersistenceService * Transforms the builder received in one that returns a {@link PersistentUserManagedCache}. */ @Override + @SuppressWarnings("unchecked") public UserManagedCacheBuilder> builder(UserManagedCacheBuilder> builder) { return (UserManagedCacheBuilder>) builder.identifier(identifier).using(persistenceService); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceConfiguration.java b/impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceConfiguration.java index ef5e3ffca3..e7cea1f690 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceConfiguration.java +++ b/impl/src/main/java/org/ehcache/impl/internal/classes/ClassInstanceConfiguration.java @@ -37,7 +37,9 @@ public ClassInstanceConfiguration(Class clazz, Object... arguments) public ClassInstanceConfiguration(T instance) { this.instance = instance; - this.clazz = (Class) instance.getClass(); + @SuppressWarnings("unchecked") + Class instanceClass = (Class) instance.getClass(); + this.clazz = instanceClass; this.arguments = null; } diff --git a/impl/src/main/java/org/ehcache/impl/internal/events/AbstractStoreEventDispatcher.java b/impl/src/main/java/org/ehcache/impl/internal/events/AbstractStoreEventDispatcher.java index 56e5a8e179..00f325c62c 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/events/AbstractStoreEventDispatcher.java +++ b/impl/src/main/java/org/ehcache/impl/internal/events/AbstractStoreEventDispatcher.java @@ -83,7 +83,9 @@ protected AbstractStoreEventDispatcher(int dispatcherConcurrency) { if (dispatcherConcurrency <= 0) { throw new IllegalArgumentException("Dispatcher concurrency must be an integer greater than 0"); } - orderedQueues = new LinkedBlockingQueue[dispatcherConcurrency]; + @SuppressWarnings("unchecked") + LinkedBlockingQueue>[] queues = new LinkedBlockingQueue[dispatcherConcurrency]; + orderedQueues = queues; for (int i = 0; i < orderedQueues.length; i++) { orderedQueues[i] = new LinkedBlockingQueue>(10000); } From 2cab9e49fb1610c68f7b7723e9a784803efd9402 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Mon, 12 Sep 2016 18:49:57 +0200 Subject: [PATCH 020/218] :shirt: #1430 Fixes around default InvalidationListener in Store --- .../impl/internal/store/heap/OnHeapStore.java | 6 +++--- .../store/offheap/AbstractOffHeapStore.java | 13 ++++++++----- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java index 399174168c..a38f15839f 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java @@ -141,9 +141,9 @@ public int compare(ValueHolder t, ValueHolder u) { } }; - private static final InvalidationListener NULL_INVALIDATION_LISTENER = new InvalidationListener() { + private static final InvalidationListener NULL_INVALIDATION_LISTENER = new InvalidationListener() { @Override - public void onInvalidation(Object key, ValueHolder valueHolder) { + public void onInvalidation(Object key, ValueHolder valueHolder) { // Do nothing } }; @@ -163,7 +163,7 @@ public void onInvalidation(Object key, ValueHolder valueHolder) { private final TimeSource timeSource; private final StoreEventDispatcher storeEventDispatcher; @SuppressWarnings("unchecked") - private volatile InvalidationListener invalidationListener = NULL_INVALIDATION_LISTENER; + private volatile InvalidationListener invalidationListener = (InvalidationListener) NULL_INVALIDATION_LISTENER; private CacheConfigurationChangeListener cacheConfigurationChangeListener = new CacheConfigurationChangeListener() { @Override diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStore.java index 35cf66c031..1594b2597e 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStore.java @@ -63,9 +63,9 @@ public abstract class AbstractOffHeapStore implements AuthoritativeTier NULL_INVALIDATION_LISTENER = new CachingTier.InvalidationListener() { @Override - public void onInvalidation(Object key, ValueHolder valueHolder) { + public void onInvalidation(Object key, ValueHolder valueHolder) { // Do nothing } }; @@ -103,7 +103,8 @@ public void onInvalidation(Object key, ValueHolder valueHolder) { private volatile InvalidationValve valve; protected BackingMapEvictionListener mapEvictionListener; - private volatile CachingTier.InvalidationListener invalidationListener = NULL_INVALIDATION_LISTENER; + @SuppressWarnings("unchecked") + private volatile CachingTier.InvalidationListener invalidationListener = (CachingTier.InvalidationListener) NULL_INVALIDATION_LISTENER; public AbstractOffHeapStore(String statisticsTag, Configuration config, TimeSource timeSource, StoreEventDispatcher eventDispatcher) { keyType = config.getKeyType(); @@ -1281,7 +1282,9 @@ static class BackingMapEvictionListener implements EhcacheSegmentFactory.E private BackingMapEvictionListener(StoreEventDispatcher eventDispatcher, OperationObserver evictionObserver) { this.eventDispatcher = eventDispatcher; this.evictionObserver = evictionObserver; - this.invalidationListener = NULL_INVALIDATION_LISTENER; + @SuppressWarnings("unchecked") + CachingTier.InvalidationListener nullInvalidationListener = (CachingTier.InvalidationListener) NULL_INVALIDATION_LISTENER; + this.invalidationListener = nullInvalidationListener; } public void setInvalidationListener(CachingTier.InvalidationListener invalidationListener) { @@ -1314,4 +1317,4 @@ private static final class OffHeapStoreStatsSettings { this.authoritativeTier = store; } } -} \ No newline at end of file +} From 63bd7358b2faee5103c954787f45bf2fe1421b05 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Mon, 12 Sep 2016 19:01:33 +0200 Subject: [PATCH 021/218] :shirt: #1430 Generics fixes around NO_OP_EVENT_SINK --- .../internal/events/AbstractStoreEventDispatcher.java | 10 +++++----- .../internal/events/ScopedStoreEventDispatcher.java | 4 +++- .../events/ThreadLocalStoreEventDispatcher.java | 4 +++- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/impl/src/main/java/org/ehcache/impl/internal/events/AbstractStoreEventDispatcher.java b/impl/src/main/java/org/ehcache/impl/internal/events/AbstractStoreEventDispatcher.java index 00f325c62c..78a11d7cc8 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/events/AbstractStoreEventDispatcher.java +++ b/impl/src/main/java/org/ehcache/impl/internal/events/AbstractStoreEventDispatcher.java @@ -32,7 +32,7 @@ */ abstract class AbstractStoreEventDispatcher implements StoreEventDispatcher { - protected static final StoreEventSink NO_OP_EVENT_SINK = new CloseableStoreEventSink() { + protected static final StoreEventSink NO_OP_EVENT_SINK = new CloseableStoreEventSink() { @Override public void close() { // Do nothing @@ -49,17 +49,17 @@ public void reset() { } @Override - public void removed(Object key, ValueSupplier value) { + public void removed(Object key, ValueSupplier value) { // Do nothing } @Override - public void updated(Object key, ValueSupplier oldValue, Object newValue) { + public void updated(Object key, ValueSupplier oldValue, Object newValue) { // Do nothing } @Override - public void expired(Object key, ValueSupplier value) { + public void expired(Object key, ValueSupplier value) { // Do nothing } @@ -69,7 +69,7 @@ public void created(Object key, Object value) { } @Override - public void evicted(Object key, ValueSupplier value) { + public void evicted(Object key, ValueSupplier value) { // Do nothing } }; diff --git a/impl/src/main/java/org/ehcache/impl/internal/events/ScopedStoreEventDispatcher.java b/impl/src/main/java/org/ehcache/impl/internal/events/ScopedStoreEventDispatcher.java index 6c498f1d02..3f67c65b1f 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/events/ScopedStoreEventDispatcher.java +++ b/impl/src/main/java/org/ehcache/impl/internal/events/ScopedStoreEventDispatcher.java @@ -31,7 +31,9 @@ public ScopedStoreEventDispatcher(int dispatcherConcurrency) { @Override public StoreEventSink eventSink() { if (getListeners().isEmpty()) { - return NO_OP_EVENT_SINK; + @SuppressWarnings("unchecked") + StoreEventSink noOpEventSink = (StoreEventSink) NO_OP_EVENT_SINK; + return noOpEventSink; } else { return new InvocationScopedEventSink(getFilters(), isEventOrdering(), getOrderedQueues(), getListeners()); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/events/ThreadLocalStoreEventDispatcher.java b/impl/src/main/java/org/ehcache/impl/internal/events/ThreadLocalStoreEventDispatcher.java index 86ff36f808..b5c3222ad8 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/events/ThreadLocalStoreEventDispatcher.java +++ b/impl/src/main/java/org/ehcache/impl/internal/events/ThreadLocalStoreEventDispatcher.java @@ -33,7 +33,9 @@ public ThreadLocalStoreEventDispatcher(int dispatcherConcurrency) { @Override public StoreEventSink eventSink() { if (getListeners().isEmpty()) { - return NO_OP_EVENT_SINK; + @SuppressWarnings("unchecked") + StoreEventSink noOpEventSink = (StoreEventSink) NO_OP_EVENT_SINK; + return noOpEventSink; } else { StoreEventSink eventSink = tlEventSink.get(); if (eventSink == null) { From 84624cb6598373aeb9336e68985f03a0ada08fe6 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Mon, 12 Sep 2016 19:03:37 +0200 Subject: [PATCH 022/218] :shirt: #1430 Various fixes in write-behind operation * Remove generics warnings * Delete dead code * Rename method following dead code deletion --- .../BatchingLocalHeapWriteBehindQueue.java | 2 +- .../NonBatchingLocalHeapWriteBehindQueue.java | 2 +- .../operations/BatchOperation.java | 2 +- .../operations/DeleteAllOperation.java | 2 +- .../operations/DeleteOperation.java | 17 ++-------- .../operations/SingleOperation.java | 13 +------- .../operations/WriteAllOperation.java | 2 +- .../operations/WriteOperation.java | 33 ++----------------- 8 files changed, 11 insertions(+), 62 deletions(-) diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/BatchingLocalHeapWriteBehindQueue.java b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/BatchingLocalHeapWriteBehindQueue.java index 033fa7f8e2..1d74f90a0c 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/BatchingLocalHeapWriteBehindQueue.java +++ b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/BatchingLocalHeapWriteBehindQueue.java @@ -197,7 +197,7 @@ public void run() { // execute the batch operations for (BatchOperation batch : batches) { try { - batch.performBatchOperation(cacheLoaderWriter); + batch.performOperation(cacheLoaderWriter); } catch (Exception e) { LOGGER.warn("Exception while bulk processing in write behind queue", e); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/NonBatchingLocalHeapWriteBehindQueue.java b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/NonBatchingLocalHeapWriteBehindQueue.java index 7b274f252d..cabf8dd1e3 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/NonBatchingLocalHeapWriteBehindQueue.java +++ b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/NonBatchingLocalHeapWriteBehindQueue.java @@ -69,7 +69,7 @@ protected void addOperation(final SingleOperation operation) { @Override public void run() { try { - operation.performSingleOperation(cacheLoaderWriter); + operation.performOperation(cacheLoaderWriter); } catch (Exception e) { LOGGER.warn("Exception while processing key '{}' write behind queue : {}", operation.getKey(), e); } finally { diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/BatchOperation.java b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/BatchOperation.java index 295e8ccd84..5497cd557b 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/BatchOperation.java +++ b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/BatchOperation.java @@ -31,6 +31,6 @@ public interface BatchOperation { * Perform the batch operation for a particular batch writer * */ - void performBatchOperation(CacheLoaderWriter cacheLoaderWriter) throws BulkCacheWritingException, Exception; + void performOperation(CacheLoaderWriter cacheLoaderWriter) throws BulkCacheWritingException, Exception; } diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteAllOperation.java b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteAllOperation.java index ad7988a095..2c2ba93c42 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteAllOperation.java +++ b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteAllOperation.java @@ -41,7 +41,7 @@ public DeleteAllOperation(Iterable entries) { /** * {@inheritDoc} */ - public void performBatchOperation(CacheLoaderWriter cacheLoaderWriter) throws BulkCacheWritingException, Exception { + public void performOperation(CacheLoaderWriter cacheLoaderWriter) throws BulkCacheWritingException, Exception { cacheLoaderWriter.deleteAll(entries); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteOperation.java b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteOperation.java index 7f6a35dca0..6d92cad624 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteOperation.java +++ b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/DeleteOperation.java @@ -15,9 +15,6 @@ */ package org.ehcache.impl.internal.loaderwriter.writebehind.operations; -import java.util.ArrayList; -import java.util.List; - import org.ehcache.spi.loaderwriter.CacheLoaderWriter; /** @@ -49,18 +46,10 @@ public DeleteOperation(K key, long creationTime) { this.creationTime = creationTime; } - public void performSingleOperation(CacheLoaderWriter cacheLoaderWriter) throws Exception { + public void performOperation(CacheLoaderWriter cacheLoaderWriter) throws Exception { cacheLoaderWriter.delete(key); } - public BatchOperation createBatchOperation(List> operations) { - final List keys = new ArrayList(); - for (KeyBasedOperation operation : operations) { - keys.add(operation.getKey()); - } - return new DeleteAllOperation(keys); - } - @Override public K getKey() { return this.key; @@ -78,8 +67,8 @@ public int hashCode() { @Override public boolean equals(Object other) { - if (other instanceof DeleteOperation) { - return getCreationTime() == ((DeleteOperation) other).getCreationTime() && getKey().equals(((DeleteOperation) other).getKey()); + if (other instanceof DeleteOperation) { + return getCreationTime() == ((DeleteOperation) other).getCreationTime() && getKey().equals(((DeleteOperation) other).getKey()); } else { return false; } diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/SingleOperation.java b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/SingleOperation.java index 33ffa9efba..23ea82ecb9 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/SingleOperation.java +++ b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/SingleOperation.java @@ -15,8 +15,6 @@ */ package org.ehcache.impl.internal.loaderwriter.writebehind.operations; -import java.util.List; - import org.ehcache.spi.loaderwriter.CacheLoaderWriter; /** @@ -30,15 +28,6 @@ public interface SingleOperation extends KeyBasedOperation { * Perform this operation as a single execution with the provided cache writer * */ - void performSingleOperation(CacheLoaderWriter cacheLoaderWriter) throws Exception; + void performOperation(CacheLoaderWriter cacheLoaderWriter) throws Exception; - /** - * Creates a batch operation that corresponds to the operation type of this single operation. - *

- * This batch operation will not be stored in the queue anymore and is solely used for structuring. - * The data from the single operation will already be processed in the final form that will be expected by the - * {@code CacheWriter} that will be used to execute the batch operation. - * - */ - BatchOperation createBatchOperation(List> operations); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteAllOperation.java b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteAllOperation.java index e1aa26b634..bccb6806b7 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteAllOperation.java +++ b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteAllOperation.java @@ -39,7 +39,7 @@ public WriteAllOperation(Iterable> this.entries = entries; } - public void performBatchOperation(CacheLoaderWriter cacheLoaderWriter) throws BulkCacheWritingException, Exception { + public void performOperation(CacheLoaderWriter cacheLoaderWriter) throws BulkCacheWritingException, Exception { cacheLoaderWriter.writeAll(entries); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteOperation.java b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteOperation.java index bb181c60cf..0386ce9b0a 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteOperation.java +++ b/impl/src/main/java/org/ehcache/impl/internal/loaderwriter/writebehind/operations/WriteOperation.java @@ -15,10 +15,6 @@ */ package org.ehcache.impl.internal.loaderwriter.writebehind.operations; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - import org.ehcache.spi.loaderwriter.CacheLoaderWriter; /** @@ -54,35 +50,10 @@ public WriteOperation(K k, V v, long creationTime) { } @Override - public void performSingleOperation(CacheLoaderWriter cacheWriter) throws Exception { + public void performOperation(CacheLoaderWriter cacheWriter) throws Exception { cacheWriter.write(key, value); } - @Override - public BatchOperation createBatchOperation(List> operations) { - final List> entries = new ArrayList>(); - for (final KeyBasedOperation operation : operations) { - entries.add(new Map.Entry() { - - @Override - public K getKey() { - return ((WriteOperation)operation).key; - } - - @Override - public V getValue() { - return ((WriteOperation)operation).value; - } - - @Override - public V setValue(V value) { - throw new UnsupportedOperationException("Not Supported."); - } - }); - } - return new WriteAllOperation(entries); - } - @Override public K getKey() { return this.key; @@ -107,7 +78,7 @@ public int hashCode() { @Override public boolean equals(Object other) { if (other instanceof WriteOperation) { - return getCreationTime() == ((WriteOperation) other).getCreationTime() && getKey().equals(((WriteOperation) other).getKey()); + return getCreationTime() == ((WriteOperation) other).getCreationTime() && getKey().equals(((WriteOperation) other).getKey()); } else { return false; } From 62717666a3d306f0f4e58f9e1df714ca888aaed7 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Mon, 12 Sep 2016 21:13:15 +0200 Subject: [PATCH 023/218] :shirt: #1430 Fixes in sizeof code --- .../org/ehcache/impl/internal/sizeof/DefaultSizeOfEngine.java | 2 +- .../impl/internal/sizeof/DefaultSizeOfEngineProvider.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngine.java b/impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngine.java index 19592634df..8962df8e1f 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngine.java +++ b/impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngine.java @@ -43,7 +43,7 @@ public DefaultSizeOfEngine(long maxObjectGraphSize, long maxObjectSize) { this.maxObjectGraphSize = maxObjectGraphSize; this.maxObjectSize = maxObjectSize; this.sizeOf = SizeOf.newInstance(filterSource.getFilters()); - this.onHeapKeyOffset = sizeOf.deepSizeOf(new CopiedOnHeapKey(new Object(), new IdentityCopier())); + this.onHeapKeyOffset = sizeOf.deepSizeOf(new CopiedOnHeapKey(new Object(), new IdentityCopier())); this.chmTreeBinOffset = sizeOf.deepSizeOf(ConcurrentHashMap.FAKE_TREE_BIN); } diff --git a/impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProvider.java b/impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProvider.java index f549c75661..f879cf8963 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProvider.java +++ b/impl/src/main/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineProvider.java @@ -55,7 +55,7 @@ public SizeOfEngine createSizeOfEngine(ResourceUnit resourceUnit, ServiceConfigu if(!isByteSized) { return new NoopSizeOfEngine(); // Noop Size of Engine } - DefaultSizeOfEngineConfiguration config = ServiceLocator.findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, serviceConfigs); + DefaultSizeOfEngineConfiguration config = ServiceLocator.findSingletonAmongst(DefaultSizeOfEngineConfiguration.class, (Object[]) serviceConfigs); if(config != null) { long maxSize = config.getUnit().toBytes(config.getMaxObjectSize()); return new DefaultSizeOfEngine(config.getMaxObjectGraphSize(), maxSize); From 62aaf280fd00ba06fc0c4a4d6c07715febab7197 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Mon, 12 Sep 2016 21:44:50 +0200 Subject: [PATCH 024/218] :shirt: #1430 Generic fixes in copy provider --- .../ehcache/impl/internal/spi/copy/DefaultCopyProvider.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/impl/src/main/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProvider.java b/impl/src/main/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProvider.java index 8cf3a07440..765d42b68b 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProvider.java +++ b/impl/src/main/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProvider.java @@ -40,6 +40,7 @@ public class DefaultCopyProvider extends ClassInstanceProvider, Copier< private static final Logger LOG = LoggerFactory.getLogger(DefaultCopyProvider.class); + @SuppressWarnings("unchecked") public DefaultCopyProvider(DefaultCopyProviderConfiguration configuration) { super(configuration, (Class) DefaultCopierConfiguration.class); } @@ -87,9 +88,12 @@ private Copier createCopier(Type type, Class clazz, } private Copier createCopier(Class clazz, DefaultCopierConfiguration config, Type type) { + @SuppressWarnings("unchecked") Copier copier = (Copier) newInstance(clazz, config); if (copier == null) { - copier = (Copier) newInstance(clazz, new DefaultCopierConfiguration((Class) IdentityCopier.class, type)); + @SuppressWarnings("unchecked") + Copier defaultInstance = (Copier) newInstance(clazz, new DefaultCopierConfiguration((Class) IdentityCopier.class, type)); + copier = defaultInstance; } return copier; } From b74466e6f6b340105294213c578b393e51a51cc4 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Fri, 16 Sep 2016 15:53:49 +0530 Subject: [PATCH 025/218] Put operation for passive in serverstore impl --- .../clustered/server/ServerStoreImpl.java | 4 +++ .../server/offheap/OffHeapChainMap.java | 17 +++++++++++ .../server/offheap/OffHeapServerStore.java | 29 +++++++++++++++++++ .../server/offheap/ChainMapTest.java | 17 +++++++++++ .../offheap/OffHeapServerStoreTest.java | 1 - 5 files changed, 67 insertions(+), 1 deletion(-) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java index 895434972f..2f2724e395 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java @@ -78,6 +78,10 @@ public void replaceAtHead(long key, Chain expect, Chain update) { store.replaceAtHead(key, expect, update); } + public void put(long key, Chain chain) { + store.put(key, chain); + } + @Override public void clear() { store.clear(); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java index f81b8a4f8c..37e9f04af5 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java @@ -177,6 +177,23 @@ public void replaceAtHead(K key, Chain expected, Chain replacement) { } } + public void put(K key, Chain chain) { + final Lock lock = heads.writeLock(); + lock.lock(); + try { + InternalChain current = heads.get(key); + if (current != null) { + replaceAtHead(key, current.detach(), chain); + } else { + for (Element x : chain) { + append(key, x.getPayload()); + } + } + } finally { + lock.unlock(); + } + } + public void clear() { heads.writeLock().lock(); try { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapServerStore.java b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapServerStore.java index 7c46959403..4d173de3f2 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapServerStore.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapServerStore.java @@ -147,6 +147,35 @@ public void replaceAtHead(long key, Chain expect, Chain update) { } } + public void put(long key, Chain chain) { + try { + segmentFor(key).put(key, chain); + } catch (OversizeMappingException e) { + if (handleOversizeMappingException(key)) { + try { + segmentFor(key).put(key, chain); + } catch (OversizeMappingException ex) { + //ignore + } + } + + writeLockAll(); + try { + do { + try { + segmentFor(key).put(key, chain); + } catch (OversizeMappingException ex) { + e = ex; + } + } while (handleOversizeMappingException(key)); + throw e; + } finally { + writeUnlockAll(); + } + } + } + + @Override public void clear() { for (OffHeapChainMap segment : segments) { diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/ChainMapTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/ChainMapTest.java index cfd588b88e..25fbb33faf 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/ChainMapTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/ChainMapTest.java @@ -323,6 +323,23 @@ public void testContinualAppendCausingEvictionIsStable() { } } + @Test + public void testPutWhenKeyIsNotNull() { + OffHeapChainMap map = new OffHeapChainMap(new UnlimitedPageSource(new OffHeapBufferSource()), StringPortability.INSTANCE, minPageSize, maxPageSize, steal); + map.append("key", buffer(3)); + map.put("key", chain(buffer(1), buffer(2))); + + assertThat(map.get("key"), contains(element(1), element(2))); + } + + @Test + public void testPutWhenKeyIsNull() { + OffHeapChainMap map = new OffHeapChainMap(new UnlimitedPageSource(new OffHeapBufferSource()), StringPortability.INSTANCE, minPageSize, maxPageSize, steal); + map.put("key", chain(buffer(1), buffer(2))); + + assertThat(map.get("key"), contains(element(1), element(2))); + } + private static ByteBuffer buffer(int i) { ByteBuffer buffer = ByteBuffer.allocate(i); while (buffer.hasRemaining()) { diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/OffHeapServerStoreTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/OffHeapServerStoreTest.java index c23fcdab5f..19f4a34760 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/OffHeapServerStoreTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/OffHeapServerStoreTest.java @@ -103,7 +103,6 @@ public Object answer(InvocationOnMock invocation) throws Throwable { }); when(store.handleOversizeMappingException(anyLong())).thenReturn(true); - ByteBuffer payload = createPayload(1L); store.append(1L, payload); From d8d02f9a78ccaf07263494c5a76602303dad50de Mon Sep 17 00:00:00 2001 From: Ludovic Orban Date: Thu, 7 Jul 2016 13:38:19 +0200 Subject: [PATCH 026/218] q&d --- .../config/ClusteredStoreConfiguration.java | 30 ++++++- .../ClusteredStoreConfigurationBuilder.java | 10 ++- .../ClusteringServiceConfigurationParser.java | 2 +- .../service/DefaultClusteringService.java | 5 +- .../client/internal/store/ClusteredStore.java | 11 ++- .../client/BasicClusteredCacheExpiryTest.java | 2 +- .../client/BasicClusteredCacheTest.java | 4 +- .../client/ClusteredCacheDestroyTest.java | 2 +- .../clustered/client/docs/GettingStarted.java | 8 +- .../service/DefaultClusteringServiceTest.java | 68 ++++++++-------- .../internal/store/ClusteredStoreTest.java | 2 +- .../store/EventualServerStoreProxyTest.java | 2 +- .../NoInvalidationServerStoreProxyTest.java | 2 +- .../store/StrongServerStoreProxyTest.java | 2 +- .../internal/ServerStoreConfiguration.java | 46 +++++------ .../src/test/java/org/ehcache/Clust.java | 54 +++++++++++++ .../clustered/BasicClusteredCacheOpsTest.java | 4 +- .../clustered/server/ServerStoreImpl.java | 4 +- .../server/EhcacheActiveEntityTest.java | 8 +- .../server/ServerStoreCompatibilityTest.java | 78 ++++++++++--------- 20 files changed, 222 insertions(+), 122 deletions(-) create mode 100644 clustered/integration-test/src/test/java/org/ehcache/Clust.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/config/ClusteredStoreConfiguration.java b/clustered/client/src/main/java/org/ehcache/clustered/client/config/ClusteredStoreConfiguration.java index 7faa760d85..f30724c5d7 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/config/ClusteredStoreConfiguration.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/config/ClusteredStoreConfiguration.java @@ -25,13 +25,17 @@ */ public class ClusteredStoreConfiguration implements ServiceConfiguration { + public static final Consistency DEFAULT_CONSISTENCY = Consistency.EVENTUAL; + public static final int DEFAULT_CONCURRENCY = 16; + private final Consistency consistency; + private final int concurrency; /** * Creates a new configuration with consistency set to {@link Consistency#EVENTUAL EVENTUAL}. */ public ClusteredStoreConfiguration() { - this(Consistency.EVENTUAL); + this(DEFAULT_CONSISTENCY, DEFAULT_CONCURRENCY); } /** @@ -40,7 +44,27 @@ public ClusteredStoreConfiguration() { * @param consistency the {@code Consistency} */ public ClusteredStoreConfiguration(Consistency consistency) { + this(consistency, DEFAULT_CONCURRENCY); + } + + /** + * Creates a new configuration with the provided concurrency. + * + * @param concurrency the concurrency + */ + public ClusteredStoreConfiguration(int concurrency) { + this(DEFAULT_CONSISTENCY, concurrency); + } + + /** + * Creates a new configuration with the provided {@link Consistency} and concurrency. + * + * @param consistency the {@code Consistency} + * @param concurrency the concurrency + */ + public ClusteredStoreConfiguration(Consistency consistency, int concurrency) { this.consistency = consistency; + this.concurrency = concurrency; } /** @@ -59,4 +83,8 @@ public Class getServiceType() { public Consistency getConsistency() { return consistency; } + + public int getConcurrency() { + return concurrency; + } } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteredStoreConfigurationBuilder.java b/clustered/client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteredStoreConfigurationBuilder.java index 66c2a12d65..8cb147a747 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteredStoreConfigurationBuilder.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteredStoreConfigurationBuilder.java @@ -26,6 +26,7 @@ public class ClusteredStoreConfigurationBuilder implements Builder { private final Consistency consistency; + private final int concurrency; /** * Creates a new builder instance with the provided {@link Consistency} configured. @@ -33,12 +34,13 @@ public class ClusteredStoreConfigurationBuilder implements Builder parseServiceConfiguration(Element fragment) { if (CLUSTERED_STORE_ELEMENT_NAME.equals(fragment.getLocalName())) { if (fragment.hasAttribute(CONSISTENCY_ATTRIBUTE_NAME)) { - return new ClusteredStoreConfiguration(Consistency.valueOf(fragment.getAttribute("consistency").toUpperCase())); + return new ClusteredStoreConfiguration(Consistency.valueOf(fragment.getAttribute("consistency").toUpperCase()), 16); } else { return new ClusteredStoreConfiguration(); } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java index 694e3500bc..4c770e3cda 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java @@ -340,7 +340,7 @@ protected boolean isStarted() { @Override public ServerStoreProxy getServerStoreProxy(final ClusteredCacheIdentifier cacheIdentifier, final Store.Configuration storeConfig, - Consistency configuredConsistency) throws CachePersistenceException { + Consistency configuredConsistency, int configuredConcurrency) throws CachePersistenceException { final String cacheId = cacheIdentifier.getId(); if (configuredConsistency == null) { @@ -372,7 +372,8 @@ public ServerStoreProxy getServerStoreProxy(final ClusteredCacheIdentifie null, // TODO: Need actual value type -- cache wrappers can wrap key/value types (storeConfig.getKeySerializer() == null ? null : storeConfig.getKeySerializer().getClass().getName()), (storeConfig.getValueSerializer() == null ? null : storeConfig.getValueSerializer().getClass().getName()), - configuredConsistency + configuredConsistency, + configuredConcurrency ); try { diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java index e6eaa1c9a0..89d73a3f68 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java @@ -557,7 +557,8 @@ public ClusteredStore createStore(final Configuration storeCo ClusteredStore store = new ClusteredStore(codec, resolver, timeSource); StatisticsManager.associate(store.clusteredStoreStatsSettings).withParent(store); - createdStores.put(store, new StoreConfig(cacheId, storeConfig, clusteredStoreConfiguration.getConsistency())); + createdStores.put(store, new StoreConfig(cacheId, storeConfig, clusteredStoreConfiguration.getConsistency(), clusteredStoreConfiguration.getConcurrency())); + return store; } @@ -686,11 +687,13 @@ private static class StoreConfig { private final ClusteredCacheIdentifier cacheIdentifier; private final Store.Configuration storeConfig; private final Consistency consistency; + private final int concurrency; - StoreConfig(ClusteredCacheIdentifier cacheIdentifier, Configuration storeConfig, Consistency consistency) { + StoreConfig(ClusteredCacheIdentifier cacheIdentifier, Configuration storeConfig, Consistency consistency, int concurrency) { this.cacheIdentifier = cacheIdentifier; this.storeConfig = storeConfig; this.consistency = consistency; + this.concurrency = concurrency; } public Configuration getStoreConfig() { @@ -704,6 +707,10 @@ public ClusteredCacheIdentifier getCacheIdentifier() { public Consistency getConsistency() { return consistency; } + + public int getConcurrency() { + return concurrency; + } } private static final class ClusteredStoreStatsSettings { diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheExpiryTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheExpiryTest.java index 62babd8882..992b0965ae 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheExpiryTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheExpiryTest.java @@ -55,7 +55,7 @@ public class BasicClusteredCacheExpiryTest { ResourcePoolsBuilder.newResourcePoolsBuilder() .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) .withExpiry(Expirations.timeToLiveExpiration(new Duration(1L, TimeUnit.MILLISECONDS))) - .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG))); + .add(ClusteredStoreConfigurationBuilder.withConsistencyAndConcurrency(Consistency.STRONG, 16))); @Before public void definePassthroughServer() throws Exception { diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheTest.java index c08fd5dac8..ea6e3eb196 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheTest.java @@ -91,7 +91,7 @@ public void testClusteredCacheTwoClients() throws Exception { .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(100, EntryUnit.ENTRIES) .with(clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) - .add(new ClusteredStoreConfiguration(Consistency.STRONG))) + .add(new ClusteredStoreConfiguration(Consistency.STRONG, 1))) ; final PersistentCacheManager cacheManager1 = clusteredCacheManagerBuilder.build(true); @@ -120,7 +120,7 @@ public void testClustered3TierCacheTwoClients() throws Exception { .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(1, EntryUnit.ENTRIES).offheap(1, MemoryUnit.MB) .with(clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) - .add(new ClusteredStoreConfiguration(Consistency.STRONG))) + .add(new ClusteredStoreConfiguration(Consistency.STRONG, 1))) ; final PersistentCacheManager cacheManager1 = clusteredCacheManagerBuilder.build(true); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredCacheDestroyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredCacheDestroyTest.java index 8e6b78909f..de3eb3adc8 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredCacheDestroyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredCacheDestroyTest.java @@ -64,7 +64,7 @@ public class ClusteredCacheDestroyTest { .withCache(CLUSTERED_CACHE, newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 32, MemoryUnit.MB))) - .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG))); + .add(ClusteredStoreConfigurationBuilder.withConsistencyAndConcurrency(Consistency.STRONG, 1))); @Before public void definePassthroughServer() throws Exception { diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java b/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java index 0e6de35959..66030740cf 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java @@ -133,7 +133,7 @@ public void explicitConsistencyConfiguration() throws Exception { CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) - .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG)) // <1> + .add(ClusteredStoreConfigurationBuilder.withConsistencyAndConcurrency(Consistency.STRONG, 16)) // <1> .build(); Cache cache = cacheManager.createCache("clustered-cache", config); @@ -162,7 +162,7 @@ public void clusteredCacheTieredExample() throws Exception { ResourcePoolsBuilder.newResourcePoolsBuilder() .heap(2, MemoryUnit.MB) // <1> .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB))) // <2> - .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG)) + .add(ClusteredStoreConfigurationBuilder.withConsistencyAndConcurrency(Consistency.STRONG, 1)) .build(); Cache cache = cacheManager.createCache("clustered-cache", config); @@ -227,7 +227,7 @@ public void unknownClusteredCacheExample() CacheConfiguration cacheConfigDedicated = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB))) // <2> - .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG)) + .add(ClusteredStoreConfigurationBuilder.withConsistencyAndConcurrency(Consistency.STRONG, 1)) .build(); Cache cacheDedicated = cacheManager1.createCache("my-dedicated-cache", cacheConfigDedicated); // <3> @@ -243,7 +243,7 @@ public void unknownClusteredCacheExample() CacheConfiguration cacheConfigUnspecified = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() .with(ClusteredResourcePoolBuilder.clustered())) // <5> - .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG)) + .add(ClusteredStoreConfigurationBuilder.withConsistencyAndConcurrency(Consistency.STRONG, 1)) .build(); Cache cacheUnspecified = cacheManager2.createCache("my-dedicated-cache", cacheConfigUnspecified); // <6> diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java index b1169a3c72..a76c6d847c 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java @@ -582,7 +582,7 @@ public void testGetServerStoreProxySharedAutoCreate() throws Exception { getSharedStoreConfig(targetPool, serializationProvider, Long.class, String.class); ServerStoreProxy serverStoreProxy = service.getServerStoreProxy( - getClusteredCacheIdentifier(service, cacheAlias), storeConfiguration, Consistency.EVENTUAL); + getClusteredCacheIdentifier(service, cacheAlias), storeConfiguration, Consistency.EVENTUAL, 1); assertThat(serverStoreProxy.getCacheId(), is(cacheAlias)); @@ -643,7 +643,7 @@ public void testGetServerStoreProxySharedNoAutoCreateNonExistent() throws Except try { accessService.getServerStoreProxy( - getClusteredCacheIdentifier(accessService, cacheAlias), storeConfiguration, Consistency.EVENTUAL); + getClusteredCacheIdentifier(accessService, cacheAlias), storeConfiguration, Consistency.EVENTUAL, 1); fail("Expecting CachePersistenceException"); } catch (CachePersistenceException e) { assertThat(getRootCause(e).getMessage(), containsString(" does not exist")); @@ -691,7 +691,7 @@ public void testGetServerStoreProxySharedNoAutoCreateExists() throws Exception { getSharedStoreConfig(targetPool, serializationProvider, Long.class, String.class); ServerStoreProxy creationServerStoreProxy = creationService.getServerStoreProxy( - getClusteredCacheIdentifier(creationService, cacheAlias), creationStoreConfig, Consistency.EVENTUAL); + getClusteredCacheIdentifier(creationService, cacheAlias), creationStoreConfig, Consistency.EVENTUAL, 1); assertThat(creationServerStoreProxy.getCacheId(), is(cacheAlias)); creationService.stop(); @@ -723,7 +723,7 @@ public void testGetServerStoreProxySharedNoAutoCreateExists() throws Exception { getSharedStoreConfig(targetPool, serializationProvider, Long.class, String.class); ServerStoreProxy accessServerStoreProxy = accessService.getServerStoreProxy( - getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfiguration, Consistency.EVENTUAL); + getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfiguration, Consistency.EVENTUAL, 1); assertThat(accessServerStoreProxy.getCacheId(), is(cacheAlias)); activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); @@ -772,7 +772,7 @@ public void testGetServerStoreProxySharedAutoCreateTwice() throws Exception { getSharedStoreConfig(targetPool, serializationProvider, Long.class, String.class); ServerStoreProxy firstServerStoreProxy = firstService.getServerStoreProxy( - getClusteredCacheIdentifier(firstService, cacheAlias), firstSharedStoreConfig, Consistency.EVENTUAL); + getClusteredCacheIdentifier(firstService, cacheAlias), firstSharedStoreConfig, Consistency.EVENTUAL, 1); assertThat(firstServerStoreProxy.getCacheId(), is(cacheAlias)); DefaultClusteringService secondService = new DefaultClusteringService(configuration); @@ -782,7 +782,7 @@ public void testGetServerStoreProxySharedAutoCreateTwice() throws Exception { getSharedStoreConfig(targetPool, serializationProvider, Long.class, String.class); ServerStoreProxy secondServerStoreProxy = secondService.getServerStoreProxy( - getClusteredCacheIdentifier(secondService, cacheAlias), secondSharedStoreConfig, Consistency.EVENTUAL); + getClusteredCacheIdentifier(secondService, cacheAlias), secondSharedStoreConfig, Consistency.EVENTUAL, 1); assertThat(secondServerStoreProxy.getCacheId(), is(cacheAlias)); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); @@ -837,7 +837,7 @@ public void testReleaseServerStoreProxyShared() throws Exception { getSharedStoreConfig(targetPool, serializationProvider, Long.class, String.class); ServerStoreProxy serverStoreProxy = creationService.getServerStoreProxy( - getClusteredCacheIdentifier(creationService, cacheAlias), storeConfiguration, Consistency.EVENTUAL); + getClusteredCacheIdentifier(creationService, cacheAlias), storeConfiguration, Consistency.EVENTUAL, 1); assertThat(serverStoreProxy.getCacheId(), is(cacheAlias)); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); @@ -887,7 +887,7 @@ public void testGetServerStoreProxyDedicatedAutoCreate() throws Exception { getDedicatedStoreConfig(targetResource, serializationProvider, Long.class, String.class); ServerStoreProxy serverStoreProxy = service.getServerStoreProxy( - getClusteredCacheIdentifier(service, cacheAlias), storeConfiguration, Consistency.EVENTUAL); + getClusteredCacheIdentifier(service, cacheAlias), storeConfiguration, Consistency.EVENTUAL, 1); assertThat(serverStoreProxy.getCacheId(), is(cacheAlias)); @@ -950,7 +950,7 @@ public void testGetServerStoreProxyDedicatedNoAutoCreateNonExistent() throws Exc try { accessService.getServerStoreProxy( - getClusteredCacheIdentifier(accessService, cacheAlias), storeConfiguration, Consistency.EVENTUAL); + getClusteredCacheIdentifier(accessService, cacheAlias), storeConfiguration, Consistency.EVENTUAL, 1); fail("Expecting CachePersistenceException"); } catch (CachePersistenceException e) { assertThat(getRootCause(e).getMessage(), containsString(" does not exist")); @@ -1000,7 +1000,7 @@ public void testGetServerStoreProxyDedicatedNoAutoCreateExists() throws Exceptio getDedicatedStoreConfig(targetResource, serializationProvider, Long.class, String.class); ServerStoreProxy creationServerStoreProxy = creationService.getServerStoreProxy( - getClusteredCacheIdentifier(creationService, cacheAlias), creationStoreConfig, Consistency.EVENTUAL); + getClusteredCacheIdentifier(creationService, cacheAlias), creationStoreConfig, Consistency.EVENTUAL, 1); assertThat(creationServerStoreProxy.getCacheId(), is(cacheAlias)); creationService.stop(); @@ -1032,7 +1032,7 @@ public void testGetServerStoreProxyDedicatedNoAutoCreateExists() throws Exceptio getDedicatedStoreConfig(targetResource, serializationProvider, Long.class, String.class); ServerStoreProxy accessServerStoreProxy = accessService.getServerStoreProxy( - getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfiguration, Consistency.EVENTUAL); + getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfiguration, Consistency.EVENTUAL, 1); assertThat(accessServerStoreProxy.getCacheId(), is(cacheAlias)); activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); @@ -1083,7 +1083,7 @@ public void testGetServerStoreProxyDedicatedAutoCreateTwice() throws Exception { getDedicatedStoreConfig(targetResource, serializationProvider, Long.class, String.class); ServerStoreProxy firstServerStoreProxy = firstService.getServerStoreProxy( - getClusteredCacheIdentifier(firstService, cacheAlias), firstSharedStoreConfig, Consistency.EVENTUAL); + getClusteredCacheIdentifier(firstService, cacheAlias), firstSharedStoreConfig, Consistency.EVENTUAL, 1); assertThat(firstServerStoreProxy.getCacheId(), is(cacheAlias)); DefaultClusteringService secondService = new DefaultClusteringService(configuration); @@ -1093,7 +1093,7 @@ public void testGetServerStoreProxyDedicatedAutoCreateTwice() throws Exception { getDedicatedStoreConfig(targetResource, serializationProvider, Long.class, String.class); ServerStoreProxy secondServerStoreProxy = secondService.getServerStoreProxy( - getClusteredCacheIdentifier(secondService, cacheAlias), secondSharedStoreConfig, Consistency.EVENTUAL); + getClusteredCacheIdentifier(secondService, cacheAlias), secondSharedStoreConfig, Consistency.EVENTUAL, 1); assertThat(secondServerStoreProxy.getCacheId(), is(cacheAlias)); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); @@ -1153,7 +1153,7 @@ public void testReleaseServerStoreProxyDedicated() throws Exception { getDedicatedStoreConfig(targetResource, serializationProvider, Long.class, String.class); ServerStoreProxy serverStoreProxy = creationService.getServerStoreProxy( - getClusteredCacheIdentifier(creationService, cacheAlias), storeConfiguration, Consistency.EVENTUAL); + getClusteredCacheIdentifier(creationService, cacheAlias), storeConfiguration, Consistency.EVENTUAL, 1); assertThat(serverStoreProxy.getCacheId(), is(cacheAlias)); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); @@ -1231,7 +1231,7 @@ public void testGetServerStoreProxySharedDestroy() throws Exception { getSharedStoreConfig(targetPool, serializationProvider, Long.class, String.class); ServerStoreProxy serverStoreProxy = creationService.getServerStoreProxy( - getClusteredCacheIdentifier(creationService, cacheAlias), storeConfiguration, Consistency.EVENTUAL); + getClusteredCacheIdentifier(creationService, cacheAlias), storeConfiguration, Consistency.EVENTUAL, 1); assertThat(serverStoreProxy.getCacheId(), is(cacheAlias)); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); @@ -1285,7 +1285,7 @@ public void testGetServerStoreProxyDedicatedDestroy() throws Exception { getDedicatedStoreConfig(targetResource, serializationProvider, Long.class, String.class); ServerStoreProxy serverStoreProxy = creationService.getServerStoreProxy( - getClusteredCacheIdentifier(creationService, cacheAlias), storeConfiguration, Consistency.EVENTUAL); + getClusteredCacheIdentifier(creationService, cacheAlias), storeConfiguration, Consistency.EVENTUAL, 1); assertThat(serverStoreProxy.getCacheId(), is(cacheAlias)); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); @@ -1375,14 +1375,14 @@ public void testFullDestroyAll() throws Exception { getSharedStoreConfig("sharedPrimary", serializationProvider, Long.class, String.class); ServerStoreProxy sharedProxy = createService.getServerStoreProxy( - getClusteredCacheIdentifier(createService, "sharedCache"), sharedStoreConfiguration, Consistency.EVENTUAL); + getClusteredCacheIdentifier(createService, "sharedCache"), sharedStoreConfiguration, Consistency.EVENTUAL, 1); assertThat(sharedProxy.getCacheId(), is("sharedCache")); Store.Configuration storeConfiguration = getDedicatedStoreConfig("serverResource2", serializationProvider, Long.class, String.class); ServerStoreProxy dedicatedProxy = createService.getServerStoreProxy( - getClusteredCacheIdentifier(createService, "dedicatedCache"), storeConfiguration, Consistency.EVENTUAL); + getClusteredCacheIdentifier(createService, "dedicatedCache"), storeConfiguration, Consistency.EVENTUAL, 1); assertThat(dedicatedProxy.getCacheId(), is("dedicatedCache")); createService.stop(); @@ -1474,7 +1474,7 @@ public void testStoreValidation_autoCreateConfigGood_autoCreateConfigBad() throw ClusteredCacheIdentifier clusteredCacheIdentifier = getClusteredCacheIdentifier(creationService, cacheAlias); - creationService.getServerStoreProxy(clusteredCacheIdentifier, createStoreConfig, Consistency.EVENTUAL); + creationService.getServerStoreProxy(clusteredCacheIdentifier, createStoreConfig, Consistency.EVENTUAL,1 ); creationService.stop(); @@ -1485,7 +1485,7 @@ public void testStoreValidation_autoCreateConfigGood_autoCreateConfigBad() throw getSharedStoreConfig("sharedPrimary", serializationProvider, Long.class, Long.class);//ValueType is invalid try { - accessService.getServerStoreProxy(getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfigBad, Consistency.EVENTUAL); + accessService.getServerStoreProxy(getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfigBad, Consistency.EVENTUAL, 1); fail("Expecting CachePersistenceException"); } catch(CachePersistenceException e) { assertThat(getRootCause(e).getMessage(), containsString("Existing ServerStore configuration is not compatible with the desired configuration")); @@ -1533,14 +1533,14 @@ public void testStoreValidation_autoCreateConfigGood_autoCreateConfigGood() thro Store.Configuration storeConfig = getSharedStoreConfig("sharedPrimary", serializationProvider, Long.class, String.class); - creationService.getServerStoreProxy(getClusteredCacheIdentifier(creationService, cacheAlias), storeConfig, Consistency.EVENTUAL); + creationService.getServerStoreProxy(getClusteredCacheIdentifier(creationService, cacheAlias), storeConfig, Consistency.EVENTUAL, 1); creationService.stop(); DefaultClusteringService accessService = new DefaultClusteringService(config); accessService.start(null); - accessService.getServerStoreProxy(getClusteredCacheIdentifier(accessService, cacheAlias), storeConfig, Consistency.EVENTUAL); + accessService.getServerStoreProxy(getClusteredCacheIdentifier(accessService, cacheAlias), storeConfig, Consistency.EVENTUAL, 1); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); assertThat(activeEntities.size(), is(1)); @@ -1586,7 +1586,7 @@ public void testStoreValidation_autoCreateConfigBad() throws Exception { ClusteredCacheIdentifier clusteredCacheIdentifier = getClusteredCacheIdentifier(creationService, cacheAlias); try { - creationService.getServerStoreProxy(clusteredCacheIdentifier, storeConfig, Consistency.EVENTUAL); + creationService.getServerStoreProxy(clusteredCacheIdentifier, storeConfig, Consistency.EVENTUAL, 1); fail("Expecting CachePersistenceException"); } catch(CachePersistenceException e) { //Expected @@ -1632,7 +1632,7 @@ public void testStoreValidation_autoCreateConfigGood_noAutoCreateConfigBad() thr Store.Configuration creationStoreConfig = getSharedStoreConfig("sharedPrimary", serializationProvider, Long.class, String.class); - creationService.getServerStoreProxy(getClusteredCacheIdentifier(creationService, cacheAlias), creationStoreConfig, Consistency.EVENTUAL); + creationService.getServerStoreProxy(getClusteredCacheIdentifier(creationService, cacheAlias), creationStoreConfig, Consistency.EVENTUAL, 1); ClusteringServiceConfiguration noAutoConfig = ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) @@ -1650,7 +1650,7 @@ public void testStoreValidation_autoCreateConfigGood_noAutoCreateConfigBad() thr getSharedStoreConfig("sharedPrimary", serializationProvider, Long.class, Long.class); try { - accessService.getServerStoreProxy(getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfig, Consistency.EVENTUAL); + accessService.getServerStoreProxy(getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfig, Consistency.EVENTUAL, 1); fail("Expecting CachePersistenceException"); } catch(CachePersistenceException e) { assertThat(getRootCause(e).getMessage(), containsString("Existing ServerStore configuration is not compatible with the desired configuration")); @@ -1699,7 +1699,7 @@ public void testStoreValidation_autoCreateConfigGood_noAutoCreateConfigGood() th Store.Configuration storeConfig = getSharedStoreConfig("sharedPrimary", serializationProvider, Long.class, String.class); - creationService.getServerStoreProxy(getClusteredCacheIdentifier(creationService, cacheAlias), storeConfig, Consistency.EVENTUAL); + creationService.getServerStoreProxy(getClusteredCacheIdentifier(creationService, cacheAlias), storeConfig, Consistency.EVENTUAL, 1); ClusteringServiceConfiguration noAutoConfig = @@ -1714,7 +1714,7 @@ public void testStoreValidation_autoCreateConfigGood_noAutoCreateConfigGood() th DefaultClusteringService accessService = new DefaultClusteringService(noAutoConfig); accessService.start(null); - accessService.getServerStoreProxy(getClusteredCacheIdentifier(accessService, cacheAlias), storeConfig, Consistency.EVENTUAL); + accessService.getServerStoreProxy(getClusteredCacheIdentifier(accessService, cacheAlias), storeConfig, Consistency.EVENTUAL, 1); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); assertThat(activeEntities.size(), is(1)); @@ -1759,7 +1759,7 @@ public void testStoreValidation_MismatchedPoolTypes_ConfiguredDedicatedValidateS getDedicatedStoreConfig("serverResource1", serializationProvider, Long.class, String.class); creationService.getServerStoreProxy( - getClusteredCacheIdentifier(creationService, "cacheAlias"), createStoreConfig, Consistency.EVENTUAL); + getClusteredCacheIdentifier(creationService, "cacheAlias"), createStoreConfig, Consistency.EVENTUAL, 1); creationService.stop(); @@ -1779,7 +1779,7 @@ public void testStoreValidation_MismatchedPoolTypes_ConfiguredDedicatedValidateS try { accessService.getServerStoreProxy( - getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfig, Consistency.EVENTUAL); + getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfig, Consistency.EVENTUAL, 1); fail("Expecting CachePersistenceException"); } catch (CachePersistenceException e) { assertThat(getRootCause(e).getMessage(), containsString("Existing ServerStore configuration is not compatible with the desired configuration")); @@ -1827,7 +1827,7 @@ public void testStoreValidation_MismatchedPoolTypes_ConfiguredSharedValidateDedi getSharedStoreConfig("sharedPrimary", serializationProvider, Long.class, String.class); creationService.getServerStoreProxy( - getClusteredCacheIdentifier(creationService, cacheAlias), createStoreConfig, Consistency.EVENTUAL); + getClusteredCacheIdentifier(creationService, cacheAlias), createStoreConfig, Consistency.EVENTUAL, 1); creationService.stop(); @@ -1847,7 +1847,7 @@ public void testStoreValidation_MismatchedPoolTypes_ConfiguredSharedValidateDedi try { accessService.getServerStoreProxy( - getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfig, Consistency.EVENTUAL); + getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfig, Consistency.EVENTUAL, 1); fail("Expecting CachePersistenceException"); } catch (CachePersistenceException e) { assertThat(getRootCause(e).getMessage(), containsString("Existing ServerStore configuration is not compatible with the desired configuration")); @@ -1931,7 +1931,7 @@ public void testGetServerStoreProxyReturnsEventualStore() throws Exception { when(storeConfig.getKeyType()).thenReturn(String.class); when(storeConfig.getValueType()).thenReturn(Object.class); - ServerStoreProxy serverStoreProxy = service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.EVENTUAL); + ServerStoreProxy serverStoreProxy = service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.EVENTUAL, 1); assertThat(serverStoreProxy, instanceOf(EventualServerStoreProxy.class)); } @@ -1954,7 +1954,7 @@ public void testGetServerStoreProxyReturnsEventualStoreByDefault() throws Except when(storeConfig.getKeyType()).thenReturn(String.class); when(storeConfig.getValueType()).thenReturn(Object.class); - ServerStoreProxy serverStoreProxy = service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.EVENTUAL); + ServerStoreProxy serverStoreProxy = service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.EVENTUAL, 1); assertThat(serverStoreProxy, instanceOf(EventualServerStoreProxy.class)); } @@ -1977,7 +1977,7 @@ public void testGetServerStoreProxyReturnsStrongStore() throws Exception { when(storeConfig.getKeyType()).thenReturn(String.class); when(storeConfig.getValueType()).thenReturn(Object.class); - ServerStoreProxy serverStoreProxy = service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.STRONG); + ServerStoreProxy serverStoreProxy = service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.STRONG, 1); assertThat(serverStoreProxy, instanceOf(StrongServerStoreProxy.class)); } diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java index 824ac1e872..adff0716a9 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java @@ -99,7 +99,7 @@ public void setup() throws Exception { Long.class.getName(), String.class.getName(), Long.class.getName(), String.class.getName(), LongSerializer.class.getName(), StringSerializer.class.getName(), - null + null, 1 ); clientEntity.createCache(CACHE_IDENTIFIER, serverStoreConfiguration); ServerStoreMessageFactory factory = new ServerStoreMessageFactory(CACHE_IDENTIFIER); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java index 347621cf9f..22bd27bf08 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java @@ -81,7 +81,7 @@ public static void setUp() throws Exception { ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfiguration(resourcePool.getPoolAllocation(), Long.class.getName(), Long.class.getName(), Long.class.getName(), Long.class.getName(), LongSerializer.class.getName(), LongSerializer.class - .getName(), Consistency.STRONG); + .getName(), Consistency.STRONG, 1); clientEntity1.createCache(CACHE_IDENTIFIER, serverStoreConfiguration); // required to attach the store to the client diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java index 1f9b9fe01c..567c9af212 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java @@ -71,7 +71,7 @@ public static void setUp() throws Exception { clientEntity.createCache(CACHE_IDENTIFIER, new ServerStoreConfiguration(resourcePool.getPoolAllocation(), Long.class.getName(), Long.class.getName(), Long.class.getName(), Long.class.getName(), LongSerializer.class.getName(), LongSerializer.class - .getName(), null)); + .getName(), null, 1)); serverStoreProxy = new NoInvalidationServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER), clientEntity); } diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java index cc0be0eec9..e4c390e405 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java @@ -89,7 +89,7 @@ public static void setUp() throws Exception { ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfiguration(resourcePool.getPoolAllocation(), Long.class.getName(), Long.class.getName(), Long.class.getName(), Long.class.getName(), LongSerializer.class.getName(), LongSerializer.class - .getName(), Consistency.STRONG); + .getName(), Consistency.STRONG, 1); clientEntity1.createCache(CACHE_IDENTIFIER, serverStoreConfiguration); // required to attach the store to the client diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/ServerStoreConfiguration.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/ServerStoreConfiguration.java index 0e30d7ef94..f4ab359deb 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/ServerStoreConfiguration.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/ServerStoreConfiguration.java @@ -39,6 +39,7 @@ public class ServerStoreConfiguration implements Serializable { private final String keySerializerType; private final String valueSerializerType; private final Consistency consistency; + private final int concurrency; // TODO: Loader/Writer configuration ... public ServerStoreConfiguration(PoolAllocation poolAllocation, @@ -48,7 +49,8 @@ public ServerStoreConfiguration(PoolAllocation poolAllocation, String actualValueType, String keySerializerType, String valueSerializerType, - Consistency consistency) { + Consistency consistency, + int concurrency) { this.poolAllocation = poolAllocation; this.storedKeyType = storedKeyType; this.storedValueType = storedValueType; @@ -57,6 +59,7 @@ public ServerStoreConfiguration(PoolAllocation poolAllocation, this.keySerializerType = keySerializerType; this.valueSerializerType = valueSerializerType; this.consistency = consistency; + this.concurrency = concurrency; } public PoolAllocation getPoolAllocation() { @@ -91,17 +94,20 @@ public Consistency getConsistency() { return consistency; } - public boolean isCompatible(ServerStoreConfiguration otherConfiguration, StringBuilder sb) { + public int getConcurrency() { + return concurrency; + } + public boolean isCompatible(ServerStoreConfiguration otherConfiguration, StringBuilder sb) { boolean isCompatible; PoolAllocation otherPoolAllocation = otherConfiguration.getPoolAllocation(); isCompatible = comparePoolAllocationType(sb, otherPoolAllocation); - if(isCompatible) { - if( !(otherPoolAllocation instanceof PoolAllocation.Unknown) ) { + if (isCompatible) { + if (!(otherPoolAllocation instanceof PoolAllocation.Unknown)) { if (poolAllocation instanceof PoolAllocation.Dedicated) { - PoolAllocation.Dedicated serverDedicatedAllocation = (PoolAllocation.Dedicated)poolAllocation; - PoolAllocation.Dedicated clientDedicatedAllocation = (PoolAllocation.Dedicated)otherPoolAllocation; + PoolAllocation.Dedicated serverDedicatedAllocation = (PoolAllocation.Dedicated) poolAllocation; + PoolAllocation.Dedicated clientDedicatedAllocation = (PoolAllocation.Dedicated) otherPoolAllocation; if (compareField(sb, "resourcePoolDedicatedResourceName", serverDedicatedAllocation.getResourceName(), clientDedicatedAllocation.getResourceName())) { @@ -114,8 +120,8 @@ public boolean isCompatible(ServerStoreConfiguration otherConfiguration, StringB } } else if (poolAllocation instanceof PoolAllocation.Shared) { isCompatible &= compareField(sb, "resourcePoolSharedPoolName", - ((PoolAllocation.Shared)poolAllocation).getResourcePoolName(), - ((PoolAllocation.Shared)otherPoolAllocation).getResourcePoolName()); + ((PoolAllocation.Shared) poolAllocation).getResourcePoolName(), + ((PoolAllocation.Shared) otherPoolAllocation).getResourcePoolName()); } } } @@ -125,13 +131,13 @@ public boolean isCompatible(ServerStoreConfiguration otherConfiguration, StringB isCompatible &= compareField(sb, "actualValueType", actualValueType, otherConfiguration.getActualValueType()); isCompatible &= compareField(sb, "keySerializerType", keySerializerType, otherConfiguration.getKeySerializerType()); isCompatible &= compareField(sb, "valueSerializerType", valueSerializerType, otherConfiguration.getValueSerializerType()); - isCompatible &= compareConsistencyField(sb, consistency, otherConfiguration.getConsistency()); + isCompatible &= compareField(sb, "consistency", consistency, otherConfiguration.getConsistency()); + isCompatible &= compareField(sb, "concurrency", concurrency, otherConfiguration.getConcurrency()); return isCompatible; } - private boolean comparePoolAllocationType(StringBuilder sb, PoolAllocation clientPoolAllocation) { - + private boolean comparePoolAllocationType(StringBuilder sb, PoolAllocation clientPoolAllocation) { if (clientPoolAllocation instanceof PoolAllocation.Unknown || poolAllocation.getClass().getName().equals(clientPoolAllocation.getClass().getName())) { return true; } @@ -140,25 +146,15 @@ private boolean comparePoolAllocationType(StringBuilder sb, PoolAllocation clien return false; } - private String getClassName(Object obj) { - if(obj != null) { + private static String getClassName(Object obj) { + if (obj != null) { return obj.getClass().getName(); } else { return null; } } - private boolean compareConsistencyField(StringBuilder sb, Consistency serverConsistencyValue, Consistency clientConsistencyValue) { - if((serverConsistencyValue == null && clientConsistencyValue == null) - || (serverConsistencyValue != null && serverConsistencyValue.equals(clientConsistencyValue))) { - return true; - } - - appendFault(sb, "consistencyType", serverConsistencyValue, clientConsistencyValue); - return false; - } - - private boolean compareField(StringBuilder sb, String fieldName, String serverConfigValue, String clientConfigValue) { + private static boolean compareField(StringBuilder sb, String fieldName, Object serverConfigValue, Object clientConfigValue) { if ((serverConfigValue == null && clientConfigValue == null) || (serverConfigValue != null && serverConfigValue.equals(clientConfigValue))) { return true; @@ -168,7 +164,7 @@ private boolean compareField(StringBuilder sb, String fieldName, String serverCo return false; } - private void appendFault(StringBuilder sb, String fieldName, Object serverConfigValue, Object clientConfigValue) { + private static void appendFault(StringBuilder sb, String fieldName, Object serverConfigValue, Object clientConfigValue) { sb.append("\n\t").append(fieldName) .append(" existing: ").append(serverConfigValue) .append(", desired: ").append(clientConfigValue); diff --git a/clustered/integration-test/src/test/java/org/ehcache/Clust.java b/clustered/integration-test/src/test/java/org/ehcache/Clust.java new file mode 100644 index 0000000000..ba9847270a --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/Clust.java @@ -0,0 +1,54 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache; + +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.junit.Test; + +import java.net.URI; + +/** + * @author Ludovic Orban + */ +public class Clust { + + @Test + public void works() throws Exception { + CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("clustered-cache-works", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 16, MemoryUnit.MB)) + ) +// .add(new ClusteredStoreConfiguration(Consistency.STRONG, 1024)) + ) + .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost:9510/my-application")) + .autoCreate() +// .defaultServerResource("primary-server-resource") +// .resourcePool("resource-pool-a", 28, MemoryUnit.MB) + ); + + PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true); + + cacheManager.close(); + } + +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java index 4fd8dcb7e2..e191400143 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java @@ -113,7 +113,7 @@ public void basicCacheCAS() throws Exception { .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(100, EntryUnit.ENTRIES) .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) - .add(new ClusteredStoreConfiguration(Consistency.STRONG))); + .add(new ClusteredStoreConfiguration(Consistency.STRONG, 1))); final PersistentCacheManager cacheManager1 = clusteredCacheManagerBuilder.build(true); try { @@ -147,7 +147,7 @@ public void basicClusteredBulk() throws Exception { .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) - .add(new ClusteredStoreConfiguration(Consistency.STRONG))); + .add(new ClusteredStoreConfiguration(Consistency.STRONG, 1))); final PersistentCacheManager cacheManager1 = clusteredCacheManagerBuilder.build(true); try { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java index 2f2724e395..47c05c76b5 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java @@ -29,8 +29,6 @@ @CommonComponent public class ServerStoreImpl implements ServerStore { - private static final int OFFHEAP_CHAIN_SEGMENTS = 16; - private final ServerStoreConfiguration storeConfiguration; private final PageSource pageSource; private final OffHeapServerStore store; @@ -38,7 +36,7 @@ public class ServerStoreImpl implements ServerStore { public ServerStoreImpl(ServerStoreConfiguration storeConfiguration, PageSource pageSource) { this.storeConfiguration = storeConfiguration; this.pageSource = pageSource; - this.store = new OffHeapServerStore(pageSource, OFFHEAP_CHAIN_SEGMENTS); + this.store = new OffHeapServerStore(pageSource, storeConfiguration.getConcurrency()); } public void setEvictionListener(ServerStoreEvictionListener listener) { diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index 822d13256b..682b7bcadf 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -2590,8 +2590,14 @@ private static final class ServerStoreConfigBuilder { private String keySerializerType; private String valueSerializerType; private Consistency consistency; + private int concurrency; + ServerStoreConfigBuilder concurrency(int concurrency) { + this.concurrency = concurrency; + return this; + } + ServerStoreConfigBuilder consistency(Consistency consistency) { this.consistency = consistency; return this; @@ -2644,7 +2650,7 @@ ServerStoreConfigBuilder setValueSerializerType(Class valueSerializerType) { ServerStoreConfiguration build() { return new ServerStoreConfiguration(poolAllocation, storedKeyType, storedValueType, - actualKeyType, actualValueType, keySerializerType, valueSerializerType, consistency); + actualKeyType, actualValueType, keySerializerType, valueSerializerType, consistency, concurrency); } } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/ServerStoreCompatibilityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/ServerStoreCompatibilityTest.java index 24049b0f5d..33476bbf03 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/ServerStoreCompatibilityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/ServerStoreCompatibilityTest.java @@ -17,16 +17,13 @@ package org.ehcache.clustered.server; import org.ehcache.clustered.common.Consistency; -import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.PoolAllocation; import org.ehcache.clustered.common.PoolAllocation.Dedicated; import org.ehcache.clustered.common.PoolAllocation.Shared; -import org.ehcache.clustered.common.internal.exceptions.InvalidServerStoreConfigurationException; import org.ehcache.clustered.common.PoolAllocation.Unknown; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.exceptions.InvalidServerStoreConfigurationException; import org.junit.Test; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; @@ -58,7 +55,8 @@ public void testStoredKeyTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, + 1); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, String.class.getName(), @@ -67,7 +65,8 @@ public void testStoredKeyTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, + 1); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -88,7 +87,8 @@ public void testStoredValueTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, + 1); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, STORED_KEY_TYPE, @@ -97,7 +97,8 @@ public void testStoredValueTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, + 1); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -118,7 +119,8 @@ public void testStoredActualKeyTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, + 1); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, STORED_KEY_TYPE, @@ -127,7 +129,8 @@ public void testStoredActualKeyTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, + 1); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -148,7 +151,8 @@ public void testStoredActualValueTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, + 1); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, STORED_KEY_TYPE, @@ -157,7 +161,8 @@ public void testStoredActualValueTypeMismatch() { Long.class.getName(), KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, + 1); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -178,7 +183,8 @@ public void testKeySerializerTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, + 1); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, STORED_KEY_TYPE, @@ -187,7 +193,8 @@ public void testKeySerializerTypeMismatch() { ACTUAL_VALUE_TYPE, Double.class.getName(), VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, + 1); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -208,7 +215,8 @@ public void testValueSerializerTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, + 1); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, STORED_KEY_TYPE, @@ -217,7 +225,7 @@ public void testValueSerializerTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, Double.class.getName(), - Consistency.EVENTUAL); + Consistency.EVENTUAL, 1); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -238,7 +246,7 @@ public void testConsitencyMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, 1); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, STORED_KEY_TYPE, @@ -247,7 +255,7 @@ public void testConsitencyMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, 1); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -268,7 +276,7 @@ public void testDedicatedPoolResourceTooBig() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, 1); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(new Dedicated("primary",8), STORED_KEY_TYPE, @@ -277,7 +285,7 @@ public void testDedicatedPoolResourceTooBig() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, 1); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -298,7 +306,7 @@ public void testDedicatedPoolResourceTooSmall() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, 1); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(new Dedicated("primary",2), STORED_KEY_TYPE, @@ -307,7 +315,7 @@ public void testDedicatedPoolResourceTooSmall() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, 1); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -328,7 +336,7 @@ public void testDedicatedPoolResourceNameMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, 1); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(new Dedicated("primaryBad",4), STORED_KEY_TYPE, @@ -337,7 +345,7 @@ public void testDedicatedPoolResourceNameMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, 1); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -358,7 +366,7 @@ public void testSharedPoolResourceNameMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, 1); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(new Shared("sharedPoolBad"), STORED_KEY_TYPE, @@ -367,7 +375,7 @@ public void testSharedPoolResourceNameMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, 1); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -389,7 +397,7 @@ public void testAllResourceParametersMatch() throws Exception ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, 1); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, STORED_KEY_TYPE, @@ -398,7 +406,7 @@ public void testAllResourceParametersMatch() throws Exception ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL); + Consistency.EVENTUAL, 1); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -414,7 +422,7 @@ public void testPoolResourceTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, 1); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(SHARED_POOL_ALLOCATION, STORED_KEY_TYPE, @@ -423,7 +431,7 @@ public void testPoolResourceTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, 1); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -444,7 +452,7 @@ public void testClientStoreConfigurationUnknownPoolResource() throws InvalidServ ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, 1); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(UNKNOWN_POOL_ALLOCATION, STORED_KEY_TYPE, @@ -453,7 +461,7 @@ public void testClientStoreConfigurationUnknownPoolResource() throws InvalidServ ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, 1); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -470,7 +478,7 @@ public void testServerStoreConfigurationUnknownPoolResourceInvalidKeyType() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, 1); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(UNKNOWN_POOL_ALLOCATION, String.class.getName(), @@ -479,7 +487,7 @@ public void testServerStoreConfigurationUnknownPoolResourceInvalidKeyType() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG); + Consistency.STRONG, 1); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); From c7dd08f1d463f9f7eb463d7e3bc9f1f213b68d21 Mon Sep 17 00:00:00 2001 From: Ludovic Orban Date: Wed, 13 Jul 2016 20:37:02 +0200 Subject: [PATCH 027/218] proto with improved stats support for sampled value stats + stat name discriminator move tiering stats creation into store providers add getAndFault TIMEOUT result cleanup stats tags, discriminator and settings object cleanup expose tier eviction stat add cluster eviction stat that actually is a combination of server eviction + invalidation add cluster resource usage stats --- build.gradle | 4 +- .../client/internal/store/ClusteredStore.java | 118 ++++++- .../main/java/org/ehcache/core/Ehcache.java | 31 +- .../AuthoritativeTierOperationOutcomes.java | 6 +- .../statistics/TierOperationStatistic.java | 234 ++++++++++++++ .../internal/store/disk/OffHeapDiskStore.java | 52 +++- .../impl/internal/store/heap/OnHeapStore.java | 150 +++++++-- .../store/offheap/AbstractOffHeapStore.java | 41 ++- .../internal/store/offheap/OffHeapStore.java | 74 ++++- .../internal/store/tiering/TieredStore.java | 22 +- .../java/org/ehcache/docs/GettingStarted.java | 17 + .../statistics/EhcacheStatistics.java | 291 ------------------ .../statistics/EhcacheStatisticsProvider.java | 8 +- .../statistics/StandardEhcacheStatistics.java | 182 +++++++++++ .../StandardOperationStatistic.java | 130 -------- .../java/org/ehcache/docs/ManagementTest.java | 34 +- .../EhcacheStatisticsProviderTest.java | 4 +- 17 files changed, 862 insertions(+), 536 deletions(-) create mode 100644 core/src/main/java/org/ehcache/core/statistics/TierOperationStatistic.java delete mode 100644 management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatistics.java create mode 100644 management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java delete mode 100755 management/src/main/java/org/ehcache/management/providers/statistics/StandardOperationStatistic.java diff --git a/build.gradle b/build.gradle index 379f474f6e..effe4b6fda 100644 --- a/build.gradle +++ b/build.gradle @@ -20,7 +20,8 @@ ext { // Third parties offheapVersion = '2.2.2' - statisticVersion = '1.1.0' + managementVersion = '5.0.0.beta' + statisticVersion = '1.1-SNAPSHOT' jcacheVersion = '1.0.0' slf4jVersion = '1.7.7' sizeofVersion = '0.3.0' @@ -58,6 +59,7 @@ subprojects { targetCompatibility = 1.6 repositories { + mavenLocal() mavenCentral() maven { url "http://repo.terracotta.org/maven2" } } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java index 89d73a3f68..f807f5e5d5 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java @@ -46,9 +46,11 @@ import org.ehcache.core.spi.store.events.StoreEventSource; import org.ehcache.core.spi.store.StoreAccessException; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; +import org.ehcache.core.statistics.AuthoritativeTierOperationOutcomes; import org.ehcache.core.statistics.StoreOperationOutcomes; import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.time.TimeSourceService; +import org.ehcache.core.statistics.TierOperationStatistic; import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; import org.ehcache.impl.internal.events.NullStoreEventDispatcher; import org.ehcache.spi.persistence.StateRepository; @@ -65,6 +67,8 @@ import org.terracotta.statistics.observer.OperationObserver; import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -72,11 +76,13 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.Callable; import java.util.concurrent.TimeoutException; import static java.util.Collections.singleton; import static org.ehcache.core.exceptions.StorePassThroughException.handleRuntimeException; import static org.ehcache.core.internal.service.ServiceLocator.findSingletonAmongst; +import static org.ehcache.core.statistics.TierOperationStatistic.set; import static org.terracotta.statistics.StatisticBuilder.operation; /** @@ -84,7 +90,7 @@ */ public class ClusteredStore implements AuthoritativeTier { - private static final String STATISTICS_TAG = "clustered-store"; + private static final String STATISTICS_TAG = "Clustered"; private final OperationsCodec codec; private final ChainResolver resolver; @@ -94,6 +100,7 @@ public class ClusteredStore implements AuthoritativeTier { private volatile ServerStoreProxy storeProxy; private volatile InvalidationValve invalidationValve; + private final ClusteredStoreStatsSettings clusteredStoreStatsSettings; private final OperationObserver getObserver; private final OperationObserver putObserver; private final OperationObserver removeObserver; @@ -103,13 +110,15 @@ public class ClusteredStore implements AuthoritativeTier { private final OperationObserver conditionalReplaceObserver; // Needed for JSR-107 compatibility even if unused private final OperationObserver evictionObserver; + private final OperationObserver getAndFaultObserver; - private final ClusteredStoreStatsSettings clusteredStoreStatsSettings; private ClusteredStore(final OperationsCodec codec, final ChainResolver resolver, TimeSource timeSource) { this.codec = codec; this.resolver = resolver; this.timeSource = timeSource; + this.clusteredStoreStatsSettings = new ClusteredStoreStatsSettings(this); + StatisticsManager.associate(clusteredStoreStatsSettings).withParent(this); this.getObserver = operation(StoreOperationOutcomes.GetOutcome.class).of(this).named("get").tag(STATISTICS_TAG).build(); this.putObserver = operation(StoreOperationOutcomes.PutOutcome.class).of(this).named("put").tag(STATISTICS_TAG).build(); @@ -119,8 +128,36 @@ private ClusteredStore(final OperationsCodec codec, final ChainResolver tags = new HashSet(Arrays.asList(STATISTICS_TAG, "tier")); + Map properties = new HashMap(); + properties.put("discriminator", STATISTICS_TAG); + StatisticsManager.createPassThroughStatistic(this, "mappings", tags, properties, new Callable() { + @Override + public Number call() throws Exception { + return -1L; + } + }); + StatisticsManager.createPassThroughStatistic(this, "maxMappings", tags, properties, new Callable() { + @Override + public Number call() throws Exception { + return -1L; + } + }); + StatisticsManager.createPassThroughStatistic(this, "allocatedMemory", tags, properties, new Callable() { + @Override + public Number call() throws Exception { + return -1L; + } + }); + StatisticsManager.createPassThroughStatistic(this, "occupiedMemory", tags, properties, new Callable() { + @Override + public Number call() throws Exception { + return -1L; + } + }); - this.clusteredStoreStatsSettings = new ClusteredStoreStatsSettings(this); } /** @@ -476,7 +513,21 @@ public List getConfigurationChangeListeners() @Override public ValueHolder getAndFault(K key) throws StoreAccessException { - return get(key); + getAndFaultObserver.begin(); + V value; + try { + value = getInternal(key); + } catch (TimeoutException e) { + getAndFaultObserver.end(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.TIMEOUT); + return null; + } + if(value == null) { + getAndFaultObserver.end(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS); + return null; + } else { + getAndFaultObserver.end(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.HIT); + return new ClusteredValueHolder(value); + } } @Override @@ -514,16 +565,38 @@ public static class Provider implements Store.Provider, AuthoritativeTier.Provid private volatile ClusteringService clusteringService; private final Map, StoreConfig> createdStores = new ConcurrentWeakIdentityHashMap, StoreConfig>(); + private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap, Collection>>(); @Override public ClusteredStore createStore(final Configuration storeConfig, final ServiceConfiguration... serviceConfigs) { + ClusteredStore store = createStoreInternal(storeConfig, serviceConfigs); + Collection> tieredOps = new ArrayList>(); + + TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, StoreOperationOutcomes.GetOutcome.class, store, new HashMap>() {{ + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(StoreOperationOutcomes.GetOutcome.HIT)); + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(StoreOperationOutcomes.GetOutcome.MISS)); + }}, "get", 1000, "get"); + StatisticsManager.associate(get).withParent(store); + tieredOps.add(get); + + TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, store, new HashMap>() {{ + put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); + put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); + }}, "eviction", 1000, "eviction"); + StatisticsManager.associate(evict).withParent(store); + tieredOps.add(evict); + + tierOperationStatistics.put(store, tieredOps); + return store; + } - DefaultCacheLoaderWriterConfiguration loaderWriterConfiguration = findSingletonAmongst(DefaultCacheLoaderWriterConfiguration.class, (Object[])serviceConfigs); + private ClusteredStore createStoreInternal(Configuration storeConfig, Object[] serviceConfigs) { + DefaultCacheLoaderWriterConfiguration loaderWriterConfiguration = findSingletonAmongst(DefaultCacheLoaderWriterConfiguration.class, serviceConfigs); if (loaderWriterConfiguration != null) { throw new IllegalStateException("CacheLoaderWriter is not supported with clustered tiers"); } - CacheEventListenerConfiguration eventListenerConfiguration = findSingletonAmongst(CacheEventListenerConfiguration.class, (Object[])serviceConfigs); + CacheEventListenerConfiguration eventListenerConfiguration = findSingletonAmongst(CacheEventListenerConfiguration.class, serviceConfigs); if (eventListenerConfiguration != null) { throw new IllegalStateException("CacheEventListener is not supported with clustered tiers"); } @@ -543,11 +616,11 @@ public ClusteredStore createStore(final Configuration storeCo throw new IllegalStateException(Provider.class.getCanonicalName() + ".createStore can not create clustered tier with multiple clustered resources"); } - ClusteredStoreConfiguration clusteredStoreConfiguration = findSingletonAmongst(ClusteredStoreConfiguration.class, (Object[])serviceConfigs); + ClusteredStoreConfiguration clusteredStoreConfiguration = findSingletonAmongst(ClusteredStoreConfiguration.class, serviceConfigs); if (clusteredStoreConfiguration == null) { clusteredStoreConfiguration = new ClusteredStoreConfiguration(); } - ClusteredCacheIdentifier cacheId = findSingletonAmongst(ClusteredCacheIdentifier.class, (Object[]) serviceConfigs); + ClusteredCacheIdentifier cacheId = findSingletonAmongst(ClusteredCacheIdentifier.class, serviceConfigs); TimeSource timeSource = serviceProvider.getService(TimeSourceService.class).getTimeSource(); @@ -610,6 +683,8 @@ public void initStore(final Store resource) { clusteredStore.storeProxy.addInvalidationListener(new ServerStoreProxy.InvalidationListener() { @Override public void onInvalidateHash(long hash) { + Enum result = StoreOperationOutcomes.EvictionOutcome.SUCCESS; + clusteredStore.evictionObserver.begin(); if (clusteredStore.invalidationValve != null) { try { LOGGER.debug("CLIENT: calling invalidation valve for hash {}", hash); @@ -617,8 +692,10 @@ public void onInvalidateHash(long hash) { } catch (StoreAccessException sae) { //TODO: what should be done here? delegate to resilience strategy? LOGGER.error("Error invalidating hash {}", hash, sae); + result = StoreOperationOutcomes.EvictionOutcome.FAILURE; } } + clusteredStore.evictionObserver.end(result); } @Override @@ -668,7 +745,27 @@ public void stop() { @Override public AuthoritativeTier createAuthoritativeTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - return createStore(storeConfig, serviceConfigs); + ClusteredStore authoritativeTier = createStoreInternal(storeConfig, serviceConfigs); + Collection> tieredOps = new ArrayList>(); + + TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.class, authoritativeTier, new HashMap>() {{ + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.HIT)); + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS)); + }}, "get", 1000, "getAndFault"); + StatisticsManager.associate(get).withParent(authoritativeTier); + tieredOps.add(get); + + + TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, authoritativeTier, new HashMap>() {{ + put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); + put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); + }}, "eviction", 1000, "eviction"); + StatisticsManager.associate(evict).withParent(authoritativeTier); + tieredOps.add(evict); + + + tierOperationStatistics.put(authoritativeTier, tieredOps); + return authoritativeTier; } @Override @@ -720,6 +817,9 @@ private static final class ClusteredStoreStatsSettings { ClusteredStoreStatsSettings(ClusteredStore store) { this.authoritativeTier = store; } + + //@ContextAttribute("tags") private final Set tags = new HashSet(Arrays.asList("store")); + @ContextAttribute("discriminator") private final String discriminator = STATISTICS_TAG; } } diff --git a/core/src/main/java/org/ehcache/core/Ehcache.java b/core/src/main/java/org/ehcache/core/Ehcache.java index c7537bcc46..92d8b30b13 100644 --- a/core/src/main/java/org/ehcache/core/Ehcache.java +++ b/core/src/main/java/org/ehcache/core/Ehcache.java @@ -45,13 +45,13 @@ import org.ehcache.core.spi.store.Store.ReplaceStatus; import org.ehcache.core.statistics.CacheOperationOutcomes.ConditionalRemoveOutcome; import org.ehcache.core.statistics.CacheOperationOutcomes.GetAllOutcome; -import org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome; import org.ehcache.core.statistics.CacheOperationOutcomes.PutAllOutcome; import org.ehcache.core.statistics.CacheOperationOutcomes.PutIfAbsentOutcome; import org.ehcache.core.statistics.CacheOperationOutcomes.PutOutcome; import org.ehcache.core.statistics.CacheOperationOutcomes.RemoveAllOutcome; import org.ehcache.core.statistics.CacheOperationOutcomes.RemoveOutcome; import org.ehcache.core.statistics.CacheOperationOutcomes.ReplaceOutcome; +import org.ehcache.core.statistics.StoreOperationOutcomes; import org.ehcache.expiry.Expiry; import org.ehcache.spi.loaderwriter.BulkCacheLoadingException; import org.ehcache.spi.loaderwriter.BulkCacheWritingException; @@ -92,7 +92,7 @@ public class Ehcache implements InternalCache { private final Jsr107CacheImpl jsr107Cache; protected final Logger logger; - private final OperationObserver getObserver = operation(GetOutcome.class).named("get").of(this).tag("cache").build(); + private final OperationObserver getObserver = operation(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.class).named("get").of(this).tag("cache").build(); private final OperationObserver getAllObserver = operation(GetAllOutcome.class).named("getAll").of(this).tag("cache").build(); private final OperationObserver putObserver = operation(PutOutcome.class).named("put").of(this).tag("cache").build(); private final OperationObserver putAllObserver = operation(PutAllOutcome.class).named("putAll").of(this).tag("cache").build(); @@ -120,6 +120,7 @@ public Ehcache(CacheConfiguration configuration, final Store store, this.store = store; runtimeConfiguration.addCacheConfigurationListener(store.getConfigurationChangeListeners()); StatisticsManager.associate(store).withParent(this); + if (store instanceof RecoveryCache) { this.resilienceStrategy = new LoggingRobustResilienceStrategy(castToRecoveryCache(store)); } else { @@ -168,17 +169,17 @@ public V get(final K key) { // Check for expiry first if (valueHolder == null) { - getObserver.end(GetOutcome.MISS_NO_LOADER); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER); return null; } else { - getObserver.end(GetOutcome.HIT_NO_LOADER); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER); return valueHolder.value(); } } catch (StoreAccessException e) { try { return resilienceStrategy.getFailure(key, e); } finally { - getObserver.end(GetOutcome.FAILURE); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.FAILURE); } } } @@ -735,9 +736,9 @@ public void compute(K key, final BiFunction c @Override public V apply(K mappedKey, V mappedValue) { if (mappedValue == null) { - getObserver.end(GetOutcome.MISS_NO_LOADER); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER); } else { - getObserver.end(GetOutcome.HIT_NO_LOADER); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER); } V newValue = computeFunction.apply(mappedKey, mappedValue); @@ -786,17 +787,17 @@ public V apply(K mappedKey, V mappedValue) { } }); } catch (StoreAccessException e) { - getObserver.end(GetOutcome.FAILURE); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.FAILURE); removeObserver.end(RemoveOutcome.FAILURE); throw new RuntimeException(e); } V returnValue = existingValue.get(); if (returnValue != null) { - getObserver.end(GetOutcome.HIT_NO_LOADER); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER); removeObserver.end(RemoveOutcome.SUCCESS); } else { - getObserver.end(GetOutcome.MISS_NO_LOADER); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER); } return returnValue; } @@ -821,17 +822,17 @@ public V apply(K mappedKey, V mappedValue) { } }); } catch (StoreAccessException e) { - getObserver.end(GetOutcome.FAILURE); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.FAILURE); putObserver.end(PutOutcome.FAILURE); throw new RuntimeException(e); } V returnValue = existingValue.get(); if (returnValue != null) { - getObserver.end(GetOutcome.HIT_NO_LOADER); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER); putObserver.end(PutOutcome.UPDATED); } else { - getObserver.end(GetOutcome.MISS_NO_LOADER); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER); putObserver.end(PutOutcome.PUT); } return returnValue; @@ -902,12 +903,12 @@ public Entry next() { if (!quiet) getObserver.begin(); if (nextException == null) { - if (!quiet) getObserver.end(GetOutcome.HIT_NO_LOADER); + if (!quiet) getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER); current = next; advance(); return new ValueHolderBasedEntry(current); } else { - if (!quiet) getObserver.end(GetOutcome.FAILURE); + if (!quiet) getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.FAILURE); StoreAccessException cae = nextException; nextException = null; return resilienceStrategy.iteratorFailure(cae); diff --git a/core/src/main/java/org/ehcache/core/statistics/AuthoritativeTierOperationOutcomes.java b/core/src/main/java/org/ehcache/core/statistics/AuthoritativeTierOperationOutcomes.java index 8c044e2143..500c1eda2e 100644 --- a/core/src/main/java/org/ehcache/core/statistics/AuthoritativeTierOperationOutcomes.java +++ b/core/src/main/java/org/ehcache/core/statistics/AuthoritativeTierOperationOutcomes.java @@ -31,7 +31,11 @@ enum GetAndFaultOutcome implements AuthoritativeTierOperationOutcomes { /** * miss */ - MISS + MISS, + /** + * timeout + */ + TIMEOUT } /** diff --git a/core/src/main/java/org/ehcache/core/statistics/TierOperationStatistic.java b/core/src/main/java/org/ehcache/core/statistics/TierOperationStatistic.java new file mode 100644 index 0000000000..9adcb17160 --- /dev/null +++ b/core/src/main/java/org/ehcache/core/statistics/TierOperationStatistic.java @@ -0,0 +1,234 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.statistics; + +import org.terracotta.context.ContextManager; +import org.terracotta.context.TreeNode; +import org.terracotta.context.annotations.ContextAttribute; +import org.terracotta.context.query.Matcher; +import org.terracotta.context.query.Matchers; +import org.terracotta.context.query.Query; +import org.terracotta.statistics.OperationStatistic; +import org.terracotta.statistics.ValueStatistic; +import org.terracotta.statistics.observer.ChainedOperationObserver; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import static org.terracotta.context.query.Matchers.attributes; +import static org.terracotta.context.query.Matchers.context; +import static org.terracotta.context.query.Matchers.hasAttribute; +import static org.terracotta.context.query.Matchers.identifier; +import static org.terracotta.context.query.Matchers.subclassOf; +import static org.terracotta.context.query.Queries.self; +import static org.terracotta.context.query.QueryBuilder.queryBuilder; + +/** + * @author Ludovic Orban + */ +@ContextAttribute("this") +public class TierOperationStatistic, D extends Enum> implements OperationStatistic { + + @ContextAttribute("name") public final String name; + @ContextAttribute("tags") public final Set tags; + @ContextAttribute("properties") public final Map properties; + @ContextAttribute("type") public final Class type; + + private final Class aliasing; + private final OperationStatistic operationStatistic; + private final HashMap> xlatMap; + + public TierOperationStatistic(Class aliasing, Class aliased, Object tier, HashMap> xlatMap, String sourceOperationName, int priority, String targetOperationName) { + this.aliasing = aliasing; + this.operationStatistic = TierOperationStatistic.findOperationStat(tier, targetOperationName);; + this.xlatMap = xlatMap; + this.name = sourceOperationName; + this.tags = new HashSet(); + this.tags.add("tier"); + this.properties = new HashMap(); + this.properties.put("priority", priority); + String discriminator = TierOperationStatistic.findDiscriminator(tier); + if (discriminator != null) { + this.properties.put("discriminator", discriminator); + } + this.type = aliasing; + + EnumSet ds = EnumSet.allOf(aliasing); + for (D d : ds) { + if (!xlatMap.containsKey(d)) { + throw new IllegalArgumentException("xlatMap does not contain key " + d); + } + } + + Set allAliasedValues = new HashSet(); + Collection> values = xlatMap.values(); + for (Set value : values) { + allAliasedValues.addAll(value); + } + Set allMissingValues = new HashSet(EnumSet.allOf(aliased)); + allMissingValues.removeAll(allAliasedValues); + if (!allMissingValues.isEmpty()) { + throw new IllegalArgumentException("xlatMap does not contain values " + allMissingValues); + } + } + + @Override + public Class type() { + return aliasing; + } + + @Override + public ValueStatistic statistic(D result) { + return operationStatistic.statistic(xlatMap.get(result)); + } + + @Override + public ValueStatistic statistic(Set results) { + Set xlated = new HashSet(); + for (D result : results) { + xlated.addAll(xlatMap.get(result)); + } + return operationStatistic.statistic(xlated); + } + + @Override + public long count(D type) { + long value = 0L; + Set s = xlatMap.get(type); + for (S s1 : s) { + value += operationStatistic.count(s1); + } + return value; + } + + @Override + public long sum(Set types) { + Set xlated = new HashSet(); + for (D type : types) { + xlated.addAll(xlatMap.get(type)); + } + return operationStatistic.sum(xlated); + } + + @Override + public long sum() { + return operationStatistic.sum(); + } + + @Override + public void addDerivedStatistic(final ChainedOperationObserver derived) { + operationStatistic.addDerivedStatistic(new ChainedOperationObserver() { + @Override + public void begin(long time) { + derived.begin(time); + } + + @Override + public void end(long time, S result) { + derived.end(time, (D) result); + } + + @Override + public void end(long time, S result, long... parameters) { + derived.end(time, (D) result, parameters); + } + }); + } + + @Override + public void removeDerivedStatistic(ChainedOperationObserver derived) { + operationStatistic.removeDerivedStatistic((ChainedOperationObserver) derived); + } + + @Override + public void begin() { + throw new UnsupportedOperationException(); + } + + @Override + public void end(D result) { + throw new UnsupportedOperationException(); + } + + @Override + public void end(D result, long... parameters) { + throw new UnsupportedOperationException(); + } + + private static String findDiscriminator(Object rootNode) { + Set results = queryBuilder().chain(self()) + .children().filter( + context(attributes(Matchers.allOf( + hasAttribute("discriminator", new Matcher() { + @Override + protected boolean matchesSafely(Object object) { + return object instanceof String; + } + }))))).build().execute(Collections.singleton(ContextManager.nodeFor(rootNode))); + + if (results.size() > 1) { + throw new IllegalStateException("More than one discriminator attribute found"); + } else if (results.isEmpty()) { + return null; + } else { + TreeNode node = results.iterator().next(); + return (String) node.getContext().attributes().get("discriminator"); + } + } + + private static OperationStatistic findOperationStat(Object rootNode, final String statName) { + Query q = queryBuilder().chain(self()) + .descendants().filter(context(identifier(subclassOf(OperationStatistic.class)))).build(); + + Set operationStatisticNodes = q.execute(Collections.singleton(ContextManager.nodeFor(rootNode))); + Set result = queryBuilder() + .filter( + context(attributes(Matchers.>allOf( + hasAttribute("name", statName))))).build().execute(operationStatisticNodes); + + if (result.size() != 1) { + throw new RuntimeException("a single stat was expected; found " + result.size()); + } + + TreeNode node = result.iterator().next(); + return (OperationStatistic) node.getContext().attributes().get("this"); + } + + public static Set set(X... xs) { + return new HashSet(Arrays.asList(xs)); + } + + public static class TierOperationOutcomes { + + public enum GetOutcome { + HIT, + MISS, + } + + public enum EvictionOutcome { + SUCCESS, + FAILURE + } + + } + +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java index 8f9ad0751a..bdfdf495a9 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java @@ -23,6 +23,9 @@ import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourceType; import org.ehcache.core.spi.service.DiskResourceService; +import org.ehcache.core.statistics.AuthoritativeTierOperationOutcomes; +import org.ehcache.core.statistics.StoreOperationOutcomes; +import org.ehcache.core.statistics.TierOperationStatistic; import org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.events.StoreEventDispatcher; @@ -70,9 +73,11 @@ import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; @@ -83,6 +88,7 @@ import static java.lang.Math.max; import static org.ehcache.config.Eviction.noAdvice; import static org.ehcache.core.internal.service.ServiceLocator.findSingletonAmongst; +import static org.ehcache.core.statistics.TierOperationStatistic.set; import static org.terracotta.offheapstore.util.MemoryUnit.BYTES; /** @@ -92,6 +98,8 @@ public class OffHeapDiskStore extends AbstractOffHeapStore implement private static final Logger LOGGER = LoggerFactory.getLogger(OffHeapDiskStore.class); + private static final String STATISTICS_TAG = "Disk"; + private static final String KEY_TYPE_PROPERTY_NAME = "keyType"; private static final String VALUE_TYPE_PROPERTY_NAME = "valueType"; private static final int DEFAULT_CONCURRENCY = 16; @@ -115,7 +123,7 @@ public class OffHeapDiskStore extends AbstractOffHeapStore implement public OffHeapDiskStore(FileBasedPersistenceContext fileBasedPersistenceContext, ExecutionService executionService, String threadPoolAlias, int writerConcurrency, final Configuration config, TimeSource timeSource, StoreEventDispatcher eventDispatcher, long sizeInBytes) { - super("local-disk", config, timeSource, eventDispatcher); + super(STATISTICS_TAG, config, timeSource, eventDispatcher); this.fileBasedPersistenceContext = fileBasedPersistenceContext; this.executionService = executionService; this.threadPoolAlias = threadPoolAlias; @@ -303,6 +311,8 @@ public static class Provider implements Store.Provider, AuthoritativeTier.Provid private volatile ServiceProvider serviceProvider; private volatile DiskResourceService diskPersistenceService; + private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap, Collection>>(); + public Provider() { this(null); } @@ -323,7 +333,25 @@ public int rankAuthority(ResourceType authorityResource, Collection OffHeapDiskStore createStore(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - return createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); + OffHeapDiskStore store = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); + Collection> tieredOps = new ArrayList>(); + + TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, StoreOperationOutcomes.GetOutcome.class, store, new HashMap>() {{ + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(StoreOperationOutcomes.GetOutcome.HIT)); + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(StoreOperationOutcomes.GetOutcome.MISS)); + }}, "get", 1000, "get"); + StatisticsManager.associate(get).withParent(store); + tieredOps.add(get); + + TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, store, new HashMap>() {{ + put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); + put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); + }}, "eviction", 1000, "eviction"); + StatisticsManager.associate(evict).withParent(store); + tieredOps.add(evict); + + tierOperationStatistics.put(store, tieredOps); + return store; } private OffHeapDiskStore createStoreInternal(Configuration storeConfig, StoreEventDispatcher eventDispatcher, ServiceConfiguration... serviceConfigs) { @@ -449,7 +477,25 @@ public void stop() { @Override public AuthoritativeTier createAuthoritativeTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - return createStore(storeConfig, serviceConfigs); + OffHeapDiskStore authoritativeTier = createStore(storeConfig, serviceConfigs); + Collection> tieredOps = new ArrayList>(); + + TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.class, authoritativeTier, new HashMap>() {{ + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.HIT)); + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS, AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.TIMEOUT)); + }}, "get", 1000, "getAndFault"); + StatisticsManager.associate(get).withParent(authoritativeTier); + tieredOps.add(get); + + TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, authoritativeTier, new HashMap>() {{ + put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); + put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); + }}, "eviction", 1000, "eviction"); + StatisticsManager.associate(evict).withParent(authoritativeTier); + tieredOps.add(evict); + + tierOperationStatistics.put(authoritativeTier, tieredOps); + return authoritativeTier; } @Override diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java index a38f15839f..a0885dc7c3 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java @@ -30,6 +30,7 @@ import org.ehcache.core.events.StoreEventSink; import org.ehcache.core.spi.store.StoreAccessException; import org.ehcache.core.spi.store.heap.LimitExceededException; +import org.ehcache.core.statistics.TierOperationStatistic; import org.ehcache.expiry.Duration; import org.ehcache.expiry.Expiry; import org.ehcache.core.spi.function.BiFunction; @@ -94,6 +95,7 @@ import static org.ehcache.config.Eviction.noAdvice; import static org.ehcache.core.exceptions.StorePassThroughException.handleRuntimeException; import static org.ehcache.core.internal.util.ValueSuppliers.supplierOf; +import static org.ehcache.core.statistics.TierOperationStatistic.set; import static org.terracotta.statistics.StatisticBuilder.operation; /** @@ -114,6 +116,8 @@ public class OnHeapStore implements Store, HigherCachingTier { private static final Logger LOG = LoggerFactory.getLogger(OnHeapStore.class); + private static final String STATISTICS_TAG = "OnHeap"; + private static final int ATTEMPT_RATIO = 4; private static final int EVICTION_RATIO = 2; @@ -156,6 +160,7 @@ public void onInvalidation(Object key, ValueHolder valueHolder) { private final Copier valueCopier; private final SizeOfEngine sizeOfEngine; + private final boolean byteSized; private volatile long capacity; private final EvictionAdvisor evictionAdvisor; @@ -226,7 +231,7 @@ public OnHeapStore(final Configuration config, final TimeSource timeSource throw new IllegalArgumentException("OnHeap store must be configured with a resource of type 'heap'"); } this.sizeOfEngine = sizeOfEngine; - boolean byteSized = this.sizeOfEngine instanceof NoopSizeOfEngine ? false : true; + this.byteSized = this.sizeOfEngine instanceof NoopSizeOfEngine ? false : true; this.capacity = byteSized ? ((MemoryUnit) heapPool.getUnit()).toBytes(heapPool.getSize()) : heapPool.getSize(); this.timeSource = timeSource; if (config.getEvictionAdvisor() == null) { @@ -244,32 +249,66 @@ public OnHeapStore(final Configuration config, final TimeSource timeSource } else { this.map = new KeyCopyBackend(byteSized, keyCopier); } - onHeapStoreStatsSettings = new OnHeapStoreStatsSettings(this); + onHeapStoreStatsSettings = new OnHeapStoreStatsSettings(); StatisticsManager.associate(onHeapStoreStatsSettings).withParent(this); - getObserver = operation(StoreOperationOutcomes.GetOutcome.class).named("get").of(this).tag("onheap-store").build(); - putObserver = operation(StoreOperationOutcomes.PutOutcome.class).named("put").of(this).tag("onheap-store").build(); - removeObserver = operation(StoreOperationOutcomes.RemoveOutcome.class).named("remove").of(this).tag("onheap-store").build(); - putIfAbsentObserver = operation(StoreOperationOutcomes.PutIfAbsentOutcome.class).named("putIfAbsent").of(this).tag("onheap-store").build(); - conditionalRemoveObserver = operation(StoreOperationOutcomes.ConditionalRemoveOutcome.class).named("conditionalRemove").of(this).tag("onheap-store").build(); - replaceObserver = operation(StoreOperationOutcomes.ReplaceOutcome.class).named("replace").of(this).tag("onheap-store").build(); - conditionalReplaceObserver = operation(StoreOperationOutcomes.ConditionalReplaceOutcome.class).named("conditionalReplace").of(this).tag("onheap-store").build(); - computeObserver = operation(StoreOperationOutcomes.ComputeOutcome.class).named("compute").of(this).tag("onheap-store").build(); - computeIfAbsentObserver = operation(StoreOperationOutcomes.ComputeIfAbsentOutcome.class).named("computeIfAbsent").of(this).tag("onheap-store").build(); - evictionObserver = operation(StoreOperationOutcomes.EvictionOutcome.class).named("eviction").of(this).tag("onheap-store").build(); - expirationObserver = operation(StoreOperationOutcomes.ExpirationOutcome.class).named("expiration").of(this).tag("onheap-store").build(); - getOrComputeIfAbsentObserver = operation(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.class).named("getOrComputeIfAbsent").of(this).tag("onheap-store").build(); - invalidateObserver = operation(CachingTierOperationOutcomes.InvalidateOutcome.class).named("invalidate").of(this).tag("onheap-store").build(); - invalidateAllObserver = operation(CachingTierOperationOutcomes.InvalidateAllOutcome.class).named("invalidateAll").of(this).tag("onheap-store").build(); - invalidateAllWithHashObserver = operation(CachingTierOperationOutcomes.InvalidateAllWithHashOutcome.class).named("invalidateAllWithHash").of(this).tag("onheap-store").build(); - silentInvalidateObserver = operation(HigherCachingTierOperationOutcomes.SilentInvalidateOutcome.class).named("silentInvalidate").of(this).tag("onheap-store").build(); - silentInvalidateAllObserver = operation(HigherCachingTierOperationOutcomes.SilentInvalidateAllOutcome.class).named("silentInvalidateAll").of(this).tag("onheap-store").build(); - silentInvalidateAllWithHashObserver = operation(HigherCachingTierOperationOutcomes.SilentInvalidateAllWithHashOutcome.class).named("silentInvalidateAllWithHash").of(this).tag("onheap-store").build(); - StatisticsManager.createPassThroughStatistic(this, "mappingsCount", Collections.singleton("onheap-store"), new Callable() { + getObserver = operation(StoreOperationOutcomes.GetOutcome.class).named("get").of(this).tag(STATISTICS_TAG).build(); + putObserver = operation(StoreOperationOutcomes.PutOutcome.class).named("put").of(this).tag(STATISTICS_TAG).build(); + removeObserver = operation(StoreOperationOutcomes.RemoveOutcome.class).named("remove").of(this).tag(STATISTICS_TAG).build(); + putIfAbsentObserver = operation(StoreOperationOutcomes.PutIfAbsentOutcome.class).named("putIfAbsent").of(this).tag(STATISTICS_TAG).build(); + conditionalRemoveObserver = operation(StoreOperationOutcomes.ConditionalRemoveOutcome.class).named("conditionalRemove").of(this).tag(STATISTICS_TAG).build(); + replaceObserver = operation(StoreOperationOutcomes.ReplaceOutcome.class).named("replace").of(this).tag(STATISTICS_TAG).build(); + conditionalReplaceObserver = operation(StoreOperationOutcomes.ConditionalReplaceOutcome.class).named("conditionalReplace").of(this).tag(STATISTICS_TAG).build(); + computeObserver = operation(StoreOperationOutcomes.ComputeOutcome.class).named("compute").of(this).tag(STATISTICS_TAG).build(); + computeIfAbsentObserver = operation(StoreOperationOutcomes.ComputeIfAbsentOutcome.class).named("computeIfAbsent").of(this).tag(STATISTICS_TAG).build(); + evictionObserver = operation(StoreOperationOutcomes.EvictionOutcome.class).named("eviction").of(this).tag(STATISTICS_TAG).build(); + expirationObserver = operation(StoreOperationOutcomes.ExpirationOutcome.class).named("expiration").of(this).tag(STATISTICS_TAG).build(); + getOrComputeIfAbsentObserver = operation(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.class).named("getOrComputeIfAbsent").of(this).tag(STATISTICS_TAG).build(); + invalidateObserver = operation(CachingTierOperationOutcomes.InvalidateOutcome.class).named("invalidate").of(this).tag(STATISTICS_TAG).build(); + invalidateAllObserver = operation(CachingTierOperationOutcomes.InvalidateAllOutcome.class).named("invalidateAll").of(this).tag(STATISTICS_TAG).build(); + invalidateAllWithHashObserver = operation(CachingTierOperationOutcomes.InvalidateAllWithHashOutcome.class).named("invalidateAllWithHash").of(this).tag(STATISTICS_TAG).build(); + silentInvalidateObserver = operation(HigherCachingTierOperationOutcomes.SilentInvalidateOutcome.class).named("silentInvalidate").of(this).tag(STATISTICS_TAG).build(); + silentInvalidateAllObserver = operation(HigherCachingTierOperationOutcomes.SilentInvalidateAllOutcome.class).named("silentInvalidateAll").of(this).tag(STATISTICS_TAG).build(); + silentInvalidateAllWithHashObserver = operation(HigherCachingTierOperationOutcomes.SilentInvalidateAllWithHashOutcome.class).named("silentInvalidateAllWithHash").of(this).tag(STATISTICS_TAG).build(); + + Set tags = new HashSet(Arrays.asList(STATISTICS_TAG, "tier")); + Map properties = new HashMap(); + properties.put("discriminator", STATISTICS_TAG); + StatisticsManager.createPassThroughStatistic(this, "mappings", tags, properties, new Callable() { @Override public Number call() throws Exception { return map.mappingCount(); } }); + StatisticsManager.createPassThroughStatistic(this, "maxMappings", tags, properties, new Callable() { + @Override + public Number call() throws Exception { + if (byteSized) { + return -1L; + } else { + return capacity; + } + } + }); + StatisticsManager.createPassThroughStatistic(this, "allocatedMemory", tags, properties, new Callable() { + @Override + public Number call() throws Exception { + if (byteSized) { + return capacity; + } else { + return -1L; + } + } + }); + StatisticsManager.createPassThroughStatistic(this, "occupiedMemory", tags, properties, new Callable() { + @Override + public Number call() throws Exception { + if (byteSized) { + return map.byteSize(); + } else { + return -1L; + } + } + }); } @Override @@ -1629,6 +1668,7 @@ public static class Provider implements Store.Provider, CachingTier.Provider, Hi private volatile ServiceProvider serviceProvider; private final Map, List> createdStores = new ConcurrentWeakIdentityHashMap, List>(); + private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap, Collection>>(); @Override public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { @@ -1642,7 +1682,25 @@ public int rankCachingTier(Set> resourceTypes, Collection OnHeapStore createStore(final Configuration storeConfig, final ServiceConfiguration... serviceConfigs) { - return createStoreInternal(storeConfig, new ScopedStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); + OnHeapStore store = createStoreInternal(storeConfig, new ScopedStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); + Collection> tieredOps = new ArrayList>(); + + TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, StoreOperationOutcomes.GetOutcome.class, store, new HashMap>() {{ + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(StoreOperationOutcomes.GetOutcome.HIT)); + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(StoreOperationOutcomes.GetOutcome.MISS)); + }}, "get", 1000, "get"); + StatisticsManager.associate(get).withParent(store); + tieredOps.add(get); + + TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, store, new HashMap>() {{ + put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); + put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); + }}, "eviction", 1000, "eviction"); + StatisticsManager.associate(evict).withParent(store); + tieredOps.add(evict); + + tierOperationStatistics.put(store, tieredOps); + return store; } public OnHeapStore createStoreInternal(final Configuration storeConfig, final StoreEventDispatcher eventDispatcher, @@ -1720,7 +1778,25 @@ public void stop() { @Override public CachingTier createCachingTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - return createStoreInternal(storeConfig, NullStoreEventDispatcher.nullStoreEventDispatcher(), serviceConfigs); + OnHeapStore cachingTier = createStoreInternal(storeConfig, NullStoreEventDispatcher.nullStoreEventDispatcher(), serviceConfigs); + Collection> tieredOps = new ArrayList>(); + + TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.class, cachingTier, new HashMap>() {{ + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.HIT)); + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULTED, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED_MISS, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS)); + }}, "get", 100, "getOrComputeIfAbsent"); + StatisticsManager.associate(get).withParent(cachingTier); + tieredOps.add(get); + + TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, cachingTier, new HashMap>() {{ + put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); + put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); + }}, "eviction", 100, "eviction"); + StatisticsManager.associate(evict).withParent(cachingTier); + tieredOps.add(evict); + + tierOperationStatistics.put(cachingTier, tieredOps); + return cachingTier; } @Override @@ -1741,7 +1817,25 @@ public void initCachingTier(CachingTier resource) { @Override public HigherCachingTier createHigherCachingTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - return createStore(storeConfig, serviceConfigs); + OnHeapStore higherCachingTier = createStore(storeConfig, serviceConfigs); + Collection> tieredOps = new ArrayList>(); + + TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.class, higherCachingTier, new HashMap>() {{ + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.HIT)); + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULTED, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED_MISS, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS)); + }}, "get", 10, "getOrComputeIfAbsent"); + StatisticsManager.associate(get).withParent(higherCachingTier); + tieredOps.add(get); + + TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, higherCachingTier, new HashMap>() {{ + put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); + put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); + }}, "eviction", 10, "eviction"); + StatisticsManager.associate(evict).withParent(higherCachingTier); + tieredOps.add(evict); + + tierOperationStatistics.put(higherCachingTier, tieredOps); + return higherCachingTier; } @Override @@ -1757,13 +1851,7 @@ public void initHigherCachingTier(HigherCachingTier resource) { private static final class OnHeapStoreStatsSettings { @ContextAttribute("tags") private final Set tags = new HashSet(Arrays.asList("store")); - @ContextAttribute("cachingTier") private final CachingTier cachingTier; - @ContextAttribute("authoritativeTier") private final OnHeapStore authoritativeTier; - - OnHeapStoreStatsSettings(OnHeapStore onHeapStore) { - this.cachingTier = null; - this.authoritativeTier = onHeapStore; - } + @ContextAttribute("discriminator") private final String discriminator = STATISTICS_TAG; } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStore.java index 1594b2597e..353ece1e1e 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStore.java @@ -114,7 +114,7 @@ public AbstractOffHeapStore(String statisticsTag, Configuration config, Ti this.timeSource = timeSource; this.eventDispatcher = eventDispatcher; - this.offHeapStoreStatsSettings = new OffHeapStoreStatsSettings(this); + this.offHeapStoreStatsSettings = new OffHeapStoreStatsSettings(statisticsTag); StatisticsManager.associate(offHeapStoreStatsSettings).withParent(this); this.getObserver = operation(StoreOperationOutcomes.GetOutcome.class).of(this).named("get").tag(statisticsTag).build(); this.putObserver = operation(StoreOperationOutcomes.PutOutcome.class).of(this).named("put").tag(statisticsTag).build(); @@ -138,73 +138,82 @@ public AbstractOffHeapStore(String statisticsTag, Configuration config, Ti this.getAndRemoveObserver= operation(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.class).of(this).named("getAndRemove").tag(statisticsTag).build(); this.installMappingObserver= operation(LowerCachingTierOperationsOutcome.InstallMappingOutcome.class).of(this).named("installMapping").tag(statisticsTag).build(); - StatisticsManager.createPassThroughStatistic(this, "allocatedMemory", Collections.singleton(statisticsTag), new Callable() { + Set tags = new HashSet(Arrays.asList(statisticsTag, "tier")); + Map properties = new HashMap(); + properties.put("discriminator", statisticsTag); + StatisticsManager.createPassThroughStatistic(this, "allocatedMemory", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().allocatedMemory(); } }); - StatisticsManager.createPassThroughStatistic(this, "occupiedMemory", Collections.singleton(statisticsTag), new Callable() { + StatisticsManager.createPassThroughStatistic(this, "occupiedMemory", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().occupiedMemory(); } }); - StatisticsManager.createPassThroughStatistic(this, "dataAllocatedMemory", Collections.singleton(statisticsTag), new Callable() { + StatisticsManager.createPassThroughStatistic(this, "dataAllocatedMemory", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().dataAllocatedMemory(); } }); - StatisticsManager.createPassThroughStatistic(this, "dataOccupiedMemory", Collections.singleton(statisticsTag), new Callable() { + StatisticsManager.createPassThroughStatistic(this, "dataOccupiedMemory", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().dataOccupiedMemory(); } }); - StatisticsManager.createPassThroughStatistic(this, "dataSize", Collections.singleton(statisticsTag), new Callable() { + StatisticsManager.createPassThroughStatistic(this, "dataSize", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().dataSize(); } }); - StatisticsManager.createPassThroughStatistic(this, "dataVitalMemory", Collections.singleton(statisticsTag), new Callable() { + StatisticsManager.createPassThroughStatistic(this, "dataVitalMemory", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().dataVitalMemory(); } }); - StatisticsManager.createPassThroughStatistic(this, "longSize", Collections.singleton(statisticsTag), new Callable() { + StatisticsManager.createPassThroughStatistic(this, "mappings", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().longSize(); } }); - StatisticsManager.createPassThroughStatistic(this, "vitalMemory", Collections.singleton(statisticsTag), new Callable() { + StatisticsManager.createPassThroughStatistic(this, "maxMappings", tags, properties, new Callable() { + @Override + public Number call() throws Exception { + return -1L; + } + }); + StatisticsManager.createPassThroughStatistic(this, "vitalMemory", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().vitalMemory(); } }); - StatisticsManager.createPassThroughStatistic(this, "removedSlotCount", Collections.singleton(statisticsTag), new Callable() { + StatisticsManager.createPassThroughStatistic(this, "removedSlotCount", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().removedSlotCount(); } }); - StatisticsManager.createPassThroughStatistic(this, "reprobeLength", Collections.singleton(statisticsTag), new Callable() { + StatisticsManager.createPassThroughStatistic(this, "reprobeLength", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().reprobeLength(); } }); - StatisticsManager.createPassThroughStatistic(this, "usedSlotCount", Collections.singleton(statisticsTag), new Callable() { + StatisticsManager.createPassThroughStatistic(this, "usedSlotCount", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().usedSlotCount(); } }); - StatisticsManager.createPassThroughStatistic(this, "tableCapacity", Collections.singleton(statisticsTag), new Callable() { + StatisticsManager.createPassThroughStatistic(this, "tableCapacity", tags, properties, new Callable() { @Override public Number call() throws Exception { return backingMap().tableCapacity(); @@ -1311,10 +1320,10 @@ public void onEviction(K key, OffHeapValueHolder value) { private static final class OffHeapStoreStatsSettings { @ContextAttribute("tags") private final Set tags = new HashSet(Arrays.asList("store")); - @ContextAttribute("authoritativeTier") private final AbstractOffHeapStore authoritativeTier; + @ContextAttribute("discriminator") private final String discriminator; - OffHeapStoreStatsSettings(AbstractOffHeapStore store) { - this.authoritativeTier = store; + OffHeapStoreStatsSettings(String discriminator) { + this.discriminator = discriminator; } } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java index eafa055766..6e9dbd66fb 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java @@ -18,12 +18,15 @@ import org.ehcache.config.SizedResourcePool; import org.ehcache.core.CacheConfigurationChangeListener; -import org.ehcache.config.Eviction; import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourceType; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.events.StoreEventDispatcher; import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.core.statistics.AuthoritativeTierOperationOutcomes; +import org.ehcache.core.statistics.LowerCachingTierOperationsOutcome; +import org.ehcache.core.statistics.StoreOperationOutcomes; +import org.ehcache.core.statistics.TierOperationStatistic; import org.ehcache.impl.internal.events.NullStoreEventDispatcher; import org.ehcache.impl.internal.events.ThreadLocalStoreEventDispatcher; import org.ehcache.impl.internal.store.offheap.factories.EhcacheSegmentFactory; @@ -32,7 +35,6 @@ import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.time.TimeSourceService; import org.ehcache.impl.serialization.TransientStateRepository; -import org.ehcache.spi.persistence.PersistableResourceService; import org.ehcache.spi.serialization.StatefulSerializer; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.core.spi.store.Store; @@ -55,13 +57,16 @@ import org.terracotta.offheapstore.util.Factory; import org.terracotta.statistics.StatisticsManager; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import static org.ehcache.config.Eviction.noAdvice; +import static org.ehcache.core.statistics.TierOperationStatistic.set; import static org.ehcache.impl.internal.store.offheap.OffHeapStoreUtils.getBufferSource; /** @@ -69,6 +74,8 @@ */ public class OffHeapStore extends AbstractOffHeapStore { + private static final String STATISTICS_TAG = "OffHeap"; + private final SwitchableEvictionAdvisor> evictionAdvisor; private final Serializer keySerializer; private final Serializer valueSerializer; @@ -77,7 +84,7 @@ public class OffHeapStore extends AbstractOffHeapStore { private volatile EhcacheConcurrentOffHeapClockCache> map; public OffHeapStore(final Configuration config, TimeSource timeSource, StoreEventDispatcher eventDispatcher, long sizeInBytes) { - super("local-offheap", config, timeSource, eventDispatcher); + super(STATISTICS_TAG, config, timeSource, eventDispatcher); EvictionAdvisor evictionAdvisor = config.getEvictionAdvisor(); if (evictionAdvisor != null) { this.evictionAdvisor = wrap(evictionAdvisor); @@ -129,6 +136,7 @@ public static class Provider implements Store.Provider, AuthoritativeTier.Provid private volatile ServiceProvider serviceProvider; private final Set> createdStores = Collections.newSetFromMap(new ConcurrentWeakIdentityHashMap, Boolean>()); + private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap, Collection>>(); @Override public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { @@ -142,7 +150,25 @@ public int rankAuthority(ResourceType authorityResource, Collection OffHeapStore createStore(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - return createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); + OffHeapStore store = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); + Collection> tieredOps = new ArrayList>(); + + TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, StoreOperationOutcomes.GetOutcome.class, store, new HashMap>() {{ + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(StoreOperationOutcomes.GetOutcome.HIT)); + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(StoreOperationOutcomes.GetOutcome.MISS)); + }}, "get", 1000, "get"); + StatisticsManager.associate(get).withParent(store); + tieredOps.add(get); + + TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, store, new HashMap>() {{ + put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); + put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); + }}, "eviction", 1000, "eviction"); + StatisticsManager.associate(evict).withParent(store); + tieredOps.add(evict); + + tierOperationStatistics.put(store, tieredOps); + return store; } private OffHeapStore createStoreInternal(Configuration storeConfig, StoreEventDispatcher eventDispatcher, ServiceConfiguration... serviceConfigs) { @@ -216,7 +242,25 @@ public void stop() { @Override public AuthoritativeTier createAuthoritativeTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - return createStore(storeConfig, serviceConfigs); + OffHeapStore authoritativeTier = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); + Collection> tieredOps = new ArrayList>(); + + TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.class, authoritativeTier, new HashMap>() {{ + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.HIT)); + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS, AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.TIMEOUT)); + }}, "get", 1000, "getAndFault"); + StatisticsManager.associate(get).withParent(authoritativeTier); + tieredOps.add(get); + + TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, authoritativeTier, new HashMap>() {{ + put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); + put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); + }}, "eviction", 1000, "eviction"); + StatisticsManager.associate(evict).withParent(authoritativeTier); + tieredOps.add(evict); + + tierOperationStatistics.put(authoritativeTier, tieredOps); + return authoritativeTier; } @Override @@ -231,7 +275,25 @@ public void initAuthoritativeTier(AuthoritativeTier resource) { @Override public LowerCachingTier createCachingTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - return createStoreInternal(storeConfig, NullStoreEventDispatcher.nullStoreEventDispatcher(), serviceConfigs); + OffHeapStore lowerCachingTier = createStoreInternal(storeConfig, NullStoreEventDispatcher.nullStoreEventDispatcher(), serviceConfigs); + Collection> tieredOps = new ArrayList>(); + + TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.class, lowerCachingTier, new HashMap>() {{ + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.HIT_REMOVED)); + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.MISS)); + }}, "get", 100, "getAndRemove"); + StatisticsManager.associate(get).withParent(lowerCachingTier); + tieredOps.add(get); + + TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, lowerCachingTier, new HashMap>() {{ + put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); + put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); + }}, "eviction", 100, "eviction"); + StatisticsManager.associate(evict).withParent(lowerCachingTier); + tieredOps.add(evict); + + tierOperationStatistics.put(lowerCachingTier, tieredOps); + return lowerCachingTier; } @Override diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java index c98e5f45d7..2beaa58573 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java @@ -16,7 +16,6 @@ package org.ehcache.impl.internal.store.tiering; import org.ehcache.Cache; -import org.ehcache.config.ResourcePool; import org.ehcache.config.ResourcePools; import org.ehcache.config.ResourceType; import org.ehcache.core.CacheConfigurationChangeListener; @@ -45,6 +44,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -52,9 +52,7 @@ import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicReference; -import static org.ehcache.config.ResourceType.Core.DISK; -import static org.ehcache.config.ResourceType.Core.HEAP; -import static org.ehcache.config.ResourceType.Core.OFFHEAP; +import static org.ehcache.core.statistics.TierOperationStatistic.set; /** * A {@link Store} implementation supporting a tiered caching model. @@ -68,9 +66,6 @@ public class TieredStore implements Store { private final CachingTier realCachingTier; private final AuthoritativeTier authoritativeTier; - private final TieringStoreStatsSettings tieringStoreStatsSettings; - - public TieredStore(CachingTier cachingTier, AuthoritativeTier authoritativeTier) { this.cachingTierRef = new AtomicReference>(cachingTier); this.authoritativeTier = authoritativeTier; @@ -99,8 +94,6 @@ public void invalidateAllWithHash(long hash) throws StoreAccessException { StatisticsManager.associate(cachingTier).withParent(this); StatisticsManager.associate(authoritativeTier).withParent(this); - tieringStoreStatsSettings = new TieringStoreStatsSettings(cachingTier, authoritativeTier); - StatisticsManager.associate(tieringStoreStatsSettings).withParent(this); } @@ -517,17 +510,6 @@ public void stop() { } } - private static final class TieringStoreStatsSettings { - @ContextAttribute("tags") private final Set tags = new HashSet(Arrays.asList("store")); - @ContextAttribute("cachingTier") private final CachingTier cachingTier; - @ContextAttribute("authoritativeTier") private final AuthoritativeTier authoritativeTier; - - TieringStoreStatsSettings(CachingTier cachingTier, AuthoritativeTier authoritativeTier) { - this.cachingTier = cachingTier; - this.authoritativeTier = authoritativeTier; - } - } - private static class NoopCachingTier implements CachingTier { private final AuthoritativeTier authoritativeTier; diff --git a/impl/src/test/java/org/ehcache/docs/GettingStarted.java b/impl/src/test/java/org/ehcache/docs/GettingStarted.java index 012afc2325..8f6cdc26a0 100644 --- a/impl/src/test/java/org/ehcache/docs/GettingStarted.java +++ b/impl/src/test/java/org/ehcache/docs/GettingStarted.java @@ -44,11 +44,16 @@ import org.ehcache.event.EventType; import org.ehcache.impl.copy.ReadWriteCopier; import org.junit.Test; +import org.terracotta.context.ContextElement; +import org.terracotta.context.TreeNode; +import org.terracotta.statistics.StatisticsManager; import java.io.File; import java.io.Serializable; import java.net.URISyntaxException; import java.util.EnumSet; +import java.util.Map; +import java.util.Set; import java.util.concurrent.TimeUnit; import static java.util.Collections.singletonMap; @@ -87,6 +92,9 @@ public void cachemanagerExample() { myCache.put(1L, "da one!"); // <7> String value = myCache.get(1L); // <8> + System.out.println(StatisticsManager.nodeFor(myCache).toTreeString()); + + cacheManager.removeCache("preConfigured"); // <9> cacheManager.close(); // <10> @@ -120,6 +128,10 @@ public void offheapCacheManager() { ) .build(true); + Cache tieredCache = cacheManager.getCache("tieredCache", Long.class, String.class); + + System.out.println(StatisticsManager.nodeFor(tieredCache).toTreeString()); + cacheManager.close(); // end::offheapCacheManager[] } @@ -138,6 +150,11 @@ public void threeTiersCacheManager() throws Exception { ) ).build(true); + Cache threeTieredCache = persistentCacheManager.getCache("threeTieredCache", Long.class, String.class); + + System.out.println(StatisticsManager.nodeFor(threeTieredCache).toTreeString()); + + persistentCacheManager.close(); // end::threeTiersCacheManager[] } diff --git a/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatistics.java b/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatistics.java deleted file mode 100644 index 3dffc5103b..0000000000 --- a/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatistics.java +++ /dev/null @@ -1,291 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.management.providers.statistics; - -import org.ehcache.Cache; -import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.ehcache.management.ManagementRegistryServiceConfiguration; -import org.ehcache.management.config.StatisticsProviderConfiguration; -import org.ehcache.management.providers.CacheBinding; -import org.ehcache.management.providers.ExposedCacheBinding; -import org.terracotta.context.ContextManager; -import org.terracotta.context.TreeNode; -import org.terracotta.context.extended.ExposedStatistic; -import org.terracotta.context.extended.OperationType; -import org.terracotta.context.extended.StatisticsRegistry; -import org.terracotta.context.query.Matcher; -import org.terracotta.context.query.Matchers; -import org.terracotta.context.query.Query; -import org.terracotta.management.model.capabilities.descriptors.Descriptor; -import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; -import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptorCategory; -import org.terracotta.management.model.stats.NumberUnit; -import org.terracotta.management.model.stats.Sample; -import org.terracotta.management.model.stats.Statistic; -import org.terracotta.management.model.stats.StatisticType; -import org.terracotta.management.model.stats.history.AverageHistory; -import org.terracotta.management.model.stats.history.CounterHistory; -import org.terracotta.management.model.stats.history.DurationHistory; -import org.terracotta.management.model.stats.history.RateHistory; -import org.terracotta.management.model.stats.history.RatioHistory; -import org.terracotta.management.model.stats.primitive.Counter; -import org.terracotta.statistics.OperationStatistic; -import org.terracotta.statistics.archive.Timestamped; -import org.terracotta.statistics.extended.Result; -import org.terracotta.statistics.extended.SampledStatistic; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; - -import static org.terracotta.context.query.Matchers.attributes; -import static org.terracotta.context.query.Matchers.context; -import static org.terracotta.context.query.Matchers.hasAttribute; -import static org.terracotta.context.query.Matchers.identifier; -import static org.terracotta.context.query.Matchers.subclassOf; -import static org.terracotta.context.query.QueryBuilder.queryBuilder; - -class EhcacheStatistics extends ExposedCacheBinding { - - private static final Set ALL_CACHE_PUT_OUTCOMES = EnumSet.allOf(CacheOperationOutcomes.PutOutcome.class); - private static final Set ALL_CACHE_GET_OUTCOMES = EnumSet.allOf(CacheOperationOutcomes.GetOutcome.class); - private static final Set ALL_CACHE_MISS_OUTCOMES = EnumSet.of(CacheOperationOutcomes.GetOutcome.FAILURE, CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER, CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER); - private static final Set ALL_CACHE_REMOVE_OUTCOMES = EnumSet.allOf(CacheOperationOutcomes.RemoveOutcome.class); - private static final Set GET_WITH_LOADER_OUTCOMES = EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER, CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER); - private static final Set GET_NO_LOADER_OUTCOMES = EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER, CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER); - - private final StatisticsRegistry statisticsRegistry; - private final Map> countStatistics; - - EhcacheStatistics(ManagementRegistryServiceConfiguration registryConfiguration, CacheBinding cacheBinding, StatisticsProviderConfiguration statisticsProviderConfiguration, ScheduledExecutorService executor) { - super(registryConfiguration, cacheBinding); - this.countStatistics = discoverCountStatistics(cacheBinding.getCache()); - this.statisticsRegistry = new StatisticsRegistry(StandardOperationStatistic.class, cacheBinding.getCache(), executor, statisticsProviderConfiguration.averageWindowDuration(), - statisticsProviderConfiguration.averageWindowUnit(), statisticsProviderConfiguration.historySize(), statisticsProviderConfiguration.historyInterval(), statisticsProviderConfiguration.historyIntervalUnit(), - statisticsProviderConfiguration.timeToDisable(), statisticsProviderConfiguration.timeToDisableUnit()); - - statisticsRegistry.registerCompoundOperation("AllCacheGet", Collections.singleton("cache"), Collections.singletonMap("type", "Result"), StandardOperationStatistic.CACHE_GET, ALL_CACHE_GET_OUTCOMES); - statisticsRegistry.registerCompoundOperation("AllCacheMiss", Collections.singleton("cache"), Collections.singletonMap("type", "Result"), StandardOperationStatistic.CACHE_GET, ALL_CACHE_MISS_OUTCOMES); - statisticsRegistry.registerCompoundOperation("AllCachePut", Collections.singleton("cache"), Collections.singletonMap("type", "Result"), StandardOperationStatistic.CACHE_PUT, ALL_CACHE_PUT_OUTCOMES); - statisticsRegistry.registerCompoundOperation("AllCacheRemove", Collections.singleton("cache"), Collections.singletonMap("type", "Result"), StandardOperationStatistic.CACHE_REMOVE, ALL_CACHE_REMOVE_OUTCOMES); - statisticsRegistry.registerCompoundOperation("GetWithLoader", Collections.singleton("cache"), Collections.singletonMap("type", "Result"), StandardOperationStatistic.CACHE_GET, GET_WITH_LOADER_OUTCOMES); - statisticsRegistry.registerCompoundOperation("GetNoLoader", Collections.singleton("cache"), Collections.singletonMap("type", "Result"), StandardOperationStatistic.CACHE_GET, GET_NO_LOADER_OUTCOMES); - statisticsRegistry.registerRatio("Hit", Collections.singleton("cache"), Collections.singletonMap("type", "Ratio"), StandardOperationStatistic.CACHE_GET, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER), ALL_CACHE_GET_OUTCOMES); - } - - @SuppressWarnings("unchecked") - public Map> queryStatistic(String statisticName, long since) { - Collection registrations = statisticsRegistry.getRegistrations(); - for (ExposedStatistic registration : registrations) { - Object type = registration.getProperties().get("type"); - String name = registration.getName(); - - if ("Result".equals(type)) { - Result result = (Result) registration.getStat(); - - // The way ehcache stats computes stats: - // - Durations are in NANOSECONDS - // - Rate are in SECONDS and the values are divided by the average window, in SECONDS. - - if ((name + "Count").equals(statisticName)) { - SampledStatistic count = result.count(); - return Collections.singletonMap(statisticName, new CounterHistory(buildHistory(count, since), NumberUnit.COUNT)); - - } else if ((name + "Rate").equals(statisticName)) { - SampledStatistic rate = result.rate(); - return Collections.singletonMap(statisticName, new RateHistory(buildHistory(rate, since), TimeUnit.SECONDS)); - - } else if ((name + "LatencyMinimum").equals(statisticName)) { - SampledStatistic minimum = result.latency().minimum(); - return Collections.singletonMap(statisticName, new DurationHistory(buildHistory(minimum, since), TimeUnit.NANOSECONDS)); - - } else if ((name + "LatencyMaximum").equals(statisticName)) { - SampledStatistic maximum = result.latency().maximum(); - return Collections.singletonMap(statisticName, new DurationHistory(buildHistory(maximum, since), TimeUnit.NANOSECONDS)); - - } else if ((name + "LatencyAverage").equals(statisticName)) { - SampledStatistic average = result.latency().average(); - return Collections.singletonMap(statisticName, new AverageHistory(buildHistory(average, since), TimeUnit.NANOSECONDS)); - - } else if (name.equals(statisticName)) { - Map> resultStats = new HashMap>(); - resultStats.put(statisticName + "Count", new CounterHistory(buildHistory(result.count(), since), NumberUnit.COUNT)); - resultStats.put(statisticName + "Rate", new RateHistory(buildHistory(result.rate(), since), TimeUnit.SECONDS)); - resultStats.put(statisticName + "LatencyMinimum", new DurationHistory(buildHistory(result.latency().minimum(), since), TimeUnit.NANOSECONDS)); - resultStats.put(statisticName + "LatencyMaximum", new DurationHistory(buildHistory(result.latency().maximum(), since), TimeUnit.NANOSECONDS)); - resultStats.put(statisticName + "LatencyAverage", new AverageHistory(buildHistory(result.latency().average(), since), TimeUnit.NANOSECONDS)); - return resultStats; - } - - } else if ("Ratio".equals(type)) { - if ((name + "Ratio").equals(statisticName)) { - SampledStatistic ratio = (SampledStatistic) registration.getStat(); - return Collections.singletonMap(statisticName, new RatioHistory(buildHistory(ratio, since), NumberUnit.RATIO)); - } - } - } - - OperationStatistic operationStatistic = countStatistics.get(statisticName); - if (operationStatistic != null) { - long sum = operationStatistic.sum(); - return Collections.singletonMap(statisticName, new Counter(sum, NumberUnit.COUNT)); - } - - return Collections.emptyMap(); - } - - private List> buildHistory(SampledStatistic sampledStatistic, long since) { - List> result = new ArrayList>(); - - List> history = sampledStatistic.history(); - for (Timestamped timestamped : history) { - if(timestamped.getTimestamp() >= since) { - result.add(new Sample(timestamped.getTimestamp(), timestamped.getSample())); - } - } - - return result; - } - - @Override - public Collection getDescriptors() { - Set capabilities = new HashSet(); - - capabilities.addAll(queryStatisticsRegistry()); - capabilities.addAll(operationStatistics()); - - return capabilities; - } - - private Set operationStatistics() { - Set capabilities = new HashSet(); - - for (String name : countStatistics.keySet()) { - capabilities.add(new StatisticDescriptor(name, StatisticType.COUNTER)); - } - - return capabilities; - } - - private Set queryStatisticsRegistry() { - Set capabilities = new HashSet(); - - Collection registrations = statisticsRegistry.getRegistrations(); - for (ExposedStatistic registration : registrations) { - String name = registration.getName(); - Object type = registration.getProperties().get("type"); - if ("Result".equals(type)) { - List statistics = new ArrayList(); - statistics.add(new StatisticDescriptor(name + "Count", StatisticType.COUNTER_HISTORY)); - statistics.add(new StatisticDescriptor(name + "Rate", StatisticType.RATE_HISTORY)); - statistics.add(new StatisticDescriptor(name + "LatencyMinimum", StatisticType.DURATION_HISTORY)); - statistics.add(new StatisticDescriptor(name + "LatencyMaximum", StatisticType.DURATION_HISTORY)); - statistics.add(new StatisticDescriptor(name + "LatencyAverage", StatisticType.AVERAGE_HISTORY)); - - capabilities.add(new StatisticDescriptorCategory(name, statistics)); - } else if ("Ratio".equals(type)) { - capabilities.add(new StatisticDescriptor(name + "Ratio", StatisticType.RATIO_HISTORY)); - } - } - - return capabilities; - } - - public void dispose() { - statisticsRegistry.clearRegistrations(); - } - - - @SuppressWarnings({"unchecked", "rawtypes"}) - private static Map> discoverCountStatistics(Cache cache) { - Map> result = new HashMap>(); - - for (OperationType t : StandardOperationStatistic.class.getEnumConstants()) { - OperationStatistic statistic = findOperationObserver(t, cache); - if (statistic == null) { - if (t.required()) { - throw new IllegalStateException("Required statistic " + t + " not found"); - } - } else { - String key = capitalize(t.operationName()) + "Counter"; - if(!result.containsKey(key)) { - result.put(key, statistic); - } - - } - } - - return result; - } - - private static String capitalize(String s) { - if (s.length() < 2) { - return s.toUpperCase(); - } else { - return s.substring(0, 1).toUpperCase() + s.substring(1); - } - } - - @SuppressWarnings({"unchecked", "rawtypes"}) - private static OperationStatistic findOperationObserver(OperationType statistic, Cache cache) { - Set> results = findOperationObserver(statistic.context(), statistic.type(), statistic.operationName(), statistic.tags(), cache); - switch (results.size()) { - case 0: - return null; - case 1: - return (OperationStatistic) results.iterator().next(); - default: - throw new IllegalStateException("Duplicate statistics found for " + statistic); - } - } - - @SuppressWarnings("unchecked") - private static Set> findOperationObserver(Query contextQuery, Class type, String name, - final Set tags, Cache cache) { - Query q = queryBuilder().chain(contextQuery) - .children().filter(context(identifier(subclassOf(OperationStatistic.class)))).build(); - - Set operationStatisticNodes = q.execute(Collections.singleton(ContextManager.nodeFor(cache))); - Set result = queryBuilder() - .filter( - context(attributes(Matchers.>allOf(hasAttribute("type", type), - hasAttribute("name", name), hasAttribute("tags", new Matcher>() { - @Override - protected boolean matchesSafely(Set object) { - return object.containsAll(tags); - } - }))))).build().execute(operationStatisticNodes); - - if (result.isEmpty()) { - return Collections.emptySet(); - } else { - Set> statistics = new HashSet>(); - for (TreeNode node : result) { - statistics.add((OperationStatistic) node.getContext().attributes().get("this")); - } - return statistics; - } - } - -} diff --git a/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProvider.java b/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProvider.java index aeb392b504..fcbaac10e5 100644 --- a/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProvider.java +++ b/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProvider.java @@ -46,12 +46,12 @@ public EhcacheStatisticsProvider(ManagementRegistryServiceConfiguration configur @Override protected ExposedCacheBinding wrap(CacheBinding cacheBinding) { - return new EhcacheStatistics(registryConfiguration, cacheBinding, statisticsProviderConfiguration, executor); + return new StandardEhcacheStatistics(registryConfiguration, cacheBinding, statisticsProviderConfiguration, executor); } @Override protected void dispose(ExposedObject exposedObject) { - ((EhcacheStatistics) exposedObject).dispose(); + ((StandardEhcacheStatistics) exposedObject).dispose(); } @Override @@ -65,10 +65,10 @@ public Capability getCapability() { @Override public Map> collectStatistics(Context context, Collection statisticNames, long since) { Map> statistics = new HashMap>(statisticNames.size()); - EhcacheStatistics ehcacheStatistics = (EhcacheStatistics) findExposedObject(context); + StandardEhcacheStatistics ehcacheStatistics = (StandardEhcacheStatistics) findExposedObject(context); if (ehcacheStatistics != null) { for (String statisticName : statisticNames) { - statistics.putAll(ehcacheStatistics.queryStatistic(statisticName, since)); + statistics.put(statisticName, ehcacheStatistics.queryStatistic(statisticName, since)); } } return statistics; diff --git a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java new file mode 100644 index 0000000000..ec3d512e15 --- /dev/null +++ b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java @@ -0,0 +1,182 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import org.ehcache.core.statistics.CacheOperationOutcomes; +import org.ehcache.core.statistics.TierOperationStatistic; +import org.ehcache.management.ManagementRegistryServiceConfiguration; +import org.ehcache.management.config.StatisticsProviderConfiguration; +import org.ehcache.management.providers.CacheBinding; +import org.ehcache.management.providers.ExposedCacheBinding; +import org.terracotta.context.extended.OperationStatisticDescriptor; +import org.terracotta.context.extended.RegisteredCompoundStatistic; +import org.terracotta.context.extended.RegisteredRatioStatistic; +import org.terracotta.context.extended.RegisteredStatistic; +import org.terracotta.context.extended.RegisteredValueStatistic; +import org.terracotta.context.extended.StatisticsRegistry; +import org.terracotta.context.extended.ValueStatisticDescriptor; +import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.model.stats.NumberUnit; +import org.terracotta.management.model.stats.Sample; +import org.terracotta.management.model.stats.Statistic; +import org.terracotta.management.model.stats.history.AverageHistory; +import org.terracotta.management.model.stats.history.CounterHistory; +import org.terracotta.management.model.stats.history.DurationHistory; +import org.terracotta.management.model.stats.history.RateHistory; +import org.terracotta.management.model.stats.history.RatioHistory; +import org.terracotta.statistics.archive.Timestamped; +import org.terracotta.statistics.extended.CompoundOperation; +import org.terracotta.statistics.extended.SampledStatistic; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +class StandardEhcacheStatistics extends ExposedCacheBinding { + + private static final Set ALL_CACHE_PUT_OUTCOMES = EnumSet.allOf(CacheOperationOutcomes.PutOutcome.class); + private static final Set ALL_CACHE_GET_OUTCOMES = EnumSet.allOf(CacheOperationOutcomes.GetOutcome.class); + private static final Set ALL_CACHE_MISS_OUTCOMES = EnumSet.of(CacheOperationOutcomes.GetOutcome.FAILURE, CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER, CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER); + private static final Set ALL_CACHE_REMOVE_OUTCOMES = EnumSet.allOf(CacheOperationOutcomes.RemoveOutcome.class); + private static final Set GET_WITH_LOADER_OUTCOMES = EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER, CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER); + private static final Set GET_NO_LOADER_OUTCOMES = EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER, CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER); + + private final StatisticsRegistry statisticsRegistry; + + StandardEhcacheStatistics(ManagementRegistryServiceConfiguration registryConfiguration, CacheBinding cacheBinding, StatisticsProviderConfiguration statisticsProviderConfiguration, ScheduledExecutorService executor) { + super(registryConfiguration, cacheBinding); + this.statisticsRegistry = new StatisticsRegistry(cacheBinding.getCache(), executor, statisticsProviderConfiguration.averageWindowDuration(), + statisticsProviderConfiguration.averageWindowUnit(), statisticsProviderConfiguration.historySize(), statisticsProviderConfiguration.historyInterval(), statisticsProviderConfiguration.historyIntervalUnit(), + statisticsProviderConfiguration.timeToDisable(), statisticsProviderConfiguration.timeToDisableUnit()); + + statisticsRegistry.registerCompoundOperations("Cache:Hit", OperationStatisticDescriptor.descriptor("get", Collections.singleton("cache"), CacheOperationOutcomes.GetOutcome.class), EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER, CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER)); + statisticsRegistry.registerCompoundOperations("Cache:Miss", OperationStatisticDescriptor.descriptor("get", Collections.singleton("cache"), CacheOperationOutcomes.GetOutcome.class), EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER, CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER)); + statisticsRegistry.registerCompoundOperations("Hit", OperationStatisticDescriptor.descriptor("get", Collections.singleton("tier"), TierOperationStatistic.TierOperationOutcomes.GetOutcome.class), EnumSet.of(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT)); + statisticsRegistry.registerCompoundOperations("Miss", OperationStatisticDescriptor.descriptor("get", Collections.singleton("tier"), TierOperationStatistic.TierOperationOutcomes.GetOutcome.class), EnumSet.of(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS)); + statisticsRegistry.registerCompoundOperations("Eviction", OperationStatisticDescriptor.descriptor("eviction", Collections.singleton("tier"), TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class), EnumSet.allOf(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class)); + statisticsRegistry.registerRatios("HitRatio", OperationStatisticDescriptor.descriptor("get", Collections.singleton("tier"), TierOperationStatistic.TierOperationOutcomes.GetOutcome.class), EnumSet.of(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT), EnumSet.of(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS)); + statisticsRegistry.registerValue("MappingCount", ValueStatisticDescriptor.descriptor("mappings", Collections.singleton("tier"))); + statisticsRegistry.registerValue("MaxMappingCount", ValueStatisticDescriptor.descriptor("maxMappings", Collections.singleton("tier"))); + statisticsRegistry.registerValue("AllocatedBytesCount", ValueStatisticDescriptor.descriptor("allocatedMemory", Collections.singleton("tier"))); + statisticsRegistry.registerValue("OccupiedBytesCount", ValueStatisticDescriptor.descriptor("occupiedMemory", Collections.singleton("tier"))); + + Map registrations = statisticsRegistry.getRegistrations(); + for (RegisteredStatistic registeredStatistic : registrations.values()) { + registeredStatistic.getSupport().setAlwaysOn(true); + } + } + + @SuppressWarnings("unchecked") + public Statistic queryStatistic(String statisticName, long since) { + Map registrations = statisticsRegistry.getRegistrations(); + for (Map.Entry entry : registrations.entrySet()) { + String name = entry.getKey(); + RegisteredStatistic registeredStatistic = entry.getValue(); + + if (registeredStatistic instanceof RegisteredCompoundStatistic) { + RegisteredCompoundStatistic registeredCompoundStatistic = (RegisteredCompoundStatistic) registeredStatistic; + CompoundOperation compoundOperation = registeredCompoundStatistic.getCompoundOperation(); + + if ((name + "Count").equals(statisticName)) { + SampledStatistic count = compoundOperation.compound((Set) registeredCompoundStatistic.getCompound()).count(); + return new CounterHistory(buildHistory(count, since), NumberUnit.COUNT); + } else if ((name + "Rate").equals(statisticName)) { + SampledStatistic rate = compoundOperation.compound((Set) registeredCompoundStatistic.getCompound()).rate(); + return new RateHistory(buildHistory(rate, since), TimeUnit.SECONDS); + + } else if ((name + "LatencyMinimum").equals(statisticName)) { + SampledStatistic minimum = compoundOperation.compound((Set) registeredCompoundStatistic.getCompound()).latency().minimum(); + return new DurationHistory(buildHistory(minimum, since), TimeUnit.NANOSECONDS); + + } else if ((name + "LatencyMaximum").equals(statisticName)) { + SampledStatistic maximum = compoundOperation.compound((Set) registeredCompoundStatistic.getCompound()).latency().maximum(); + return new DurationHistory(buildHistory(maximum, since), TimeUnit.NANOSECONDS); + + } else if ((name + "LatencyAverage").equals(statisticName)) { + SampledStatistic average = compoundOperation.compound((Set) registeredCompoundStatistic.getCompound()).latency().average(); + return new AverageHistory(buildHistory(average, since), TimeUnit.NANOSECONDS); + } + } else if (registeredStatistic instanceof RegisteredRatioStatistic) { + RegisteredRatioStatistic registeredRatioStatistic = (RegisteredRatioStatistic) registeredStatistic; + CompoundOperation compoundOperation = registeredRatioStatistic.getCompoundOperation(); + + if (name.equals(statisticName)) { + SampledStatistic ratio = (SampledStatistic) compoundOperation.ratioOf((Set) registeredRatioStatistic.getNumerator(), (Set) registeredRatioStatistic.getDenominator()); + return new RatioHistory(buildHistory(ratio, since), NumberUnit.RATIO); + } + } else if (registeredStatistic instanceof RegisteredValueStatistic) { + RegisteredValueStatistic registeredValueStatistic = (RegisteredValueStatistic) registeredStatistic; + + if (name.equals(statisticName)) { + SampledStatistic count = (SampledStatistic) registeredValueStatistic.getSampledStatistic(); + return new CounterHistory(buildHistory(count, since), NumberUnit.COUNT); + } + } else { + throw new UnsupportedOperationException("Cannot handle registered statistic type : " + registeredStatistic); + } + } + + throw new IllegalArgumentException("No registered statistic named '" + statisticName + "'"); + } + + private List> buildHistory(SampledStatistic sampledStatistic, long since) { + List> result = new ArrayList>(); + + List> history = sampledStatistic.history(); + for (Timestamped timestamped : history) { + if(timestamped.getTimestamp() >= since) { + result.add(new Sample(timestamped.getTimestamp(), timestamped.getSample())); + } + } + + return result; + } + + @Override + public Collection getDescriptors() { + Set capabilities = new HashSet(); + + capabilities.addAll(queryStatisticsRegistry()); + capabilities.addAll(operationStatistics()); + + return capabilities; + } + + private Set operationStatistics() { + Set capabilities = new HashSet(); + + return capabilities; + } + + private Set queryStatisticsRegistry() { + Set capabilities = new HashSet(); + + return capabilities; + } + + public void dispose() { + statisticsRegistry.clearRegistrations(); + } + + +} diff --git a/management/src/main/java/org/ehcache/management/providers/statistics/StandardOperationStatistic.java b/management/src/main/java/org/ehcache/management/providers/statistics/StandardOperationStatistic.java deleted file mode 100755 index 87b072e945..0000000000 --- a/management/src/main/java/org/ehcache/management/providers/statistics/StandardOperationStatistic.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.management.providers.statistics; - -import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.terracotta.context.extended.OperationType; -import org.terracotta.context.query.Query; - -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; - -import static org.terracotta.context.query.Queries.self; - - -/** - * The Enum OperationType. - */ -enum StandardOperationStatistic implements OperationType { - CACHE_LOADING(false, self(), CacheOperationOutcomes.CacheLoadingOutcome.class, "cacheLoading", "cache"), - - /** - * The cache get. - */ - CACHE_GET(true, self(), CacheOperationOutcomes.GetOutcome.class, "get", "cache"), - - /** - * The cache put. - */ - CACHE_PUT(true, self(), CacheOperationOutcomes.PutOutcome.class, "put", "cache"), - - /** - * The cache remove. - */ - CACHE_REMOVE(true, self(), CacheOperationOutcomes.RemoveOutcome.class, "remove", "cache"), - - /** - * The cache remove(K, V) - */ - CACHE_CONDITIONAL_REMOVE(true, self(), CacheOperationOutcomes.ConditionalRemoveOutcome.class, "conditionalRemove", "cache"), - - /** - * The cache putIfAbsent. - */ - CACHE_PUT_IF_ABSENT(true, self(), CacheOperationOutcomes.PutIfAbsentOutcome.class, "putIfAbsent", "cache"), - - /** - * The cache replace. - */ - CACHE_REPLACE(true, self(), CacheOperationOutcomes.ReplaceOutcome.class, "replace", "cache"), - - ; - - private final boolean required; - private final Query context; - private final Class> type; - private final String name; - private final Set tags; - - StandardOperationStatistic(boolean required, Query context, Class> type, String name, String... tags) { - this.required = required; - this.context = context; - this.type = type; - this.name = name; - this.tags = Collections.unmodifiableSet(new HashSet(Arrays.asList(tags))); - } - - /** - * If this statistic is required. - *

- * If required and this statistic is not present an exception will be thrown. - * - * @return - */ - public final boolean required() { - return required; - } - - /** - * Query that select context nodes for this statistic. - * - * @return context query - */ - public final Query context() { - return context; - } - - /** - * Operation result type. - * - * @return operation result type - */ - @SuppressWarnings("rawtypes") - public final Class> type() { - return type; - } - - /** - * The name of the statistic as found in the statistics context tree. - * - * @return the statistic name - */ - public final String operationName() { - return name; - } - - /** - * A set of tags that will be on the statistic found in the statistics context tree. - * - * @return the statistic tags - */ - public final Set tags() { - return tags; - } - -} diff --git a/management/src/test/java/org/ehcache/docs/ManagementTest.java b/management/src/test/java/org/ehcache/docs/ManagementTest.java index 9a0c7e429d..13baad2327 100644 --- a/management/src/test/java/org/ehcache/docs/ManagementTest.java +++ b/management/src/test/java/org/ehcache/docs/ManagementTest.java @@ -21,11 +21,12 @@ import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; import org.ehcache.management.ManagementRegistryService; -import org.ehcache.management.registry.DefaultManagementRegistryService; -import org.terracotta.management.registry.ResultSet; import org.ehcache.management.SharedManagementService; import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryService; import org.ehcache.management.registry.DefaultSharedManagementService; import org.hamcrest.Matchers; import org.junit.Assert; @@ -36,8 +37,11 @@ import org.terracotta.management.model.context.Context; import org.terracotta.management.model.context.ContextContainer; import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.model.stats.history.CounterHistory; import org.terracotta.management.model.stats.primitive.Counter; +import org.terracotta.management.registry.ResultSet; +import java.util.Arrays; import java.util.Collection; import java.util.Iterator; @@ -46,7 +50,8 @@ public class ManagementTest { @Test public void usingManagementRegistry() throws Exception { // tag::usingManagementRegistry[] - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10)) + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(1, EntryUnit.ENTRIES).offheap(10, MemoryUnit.MB)) .build(); DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); // <1> @@ -58,23 +63,38 @@ public void usingManagementRegistry() throws Exception { Cache aCache = cacheManager.getCache("aCache", Long.class, String.class); + aCache.put(-1L, "-one"); + aCache.put(0L, "zero"); + aCache.get(-1L); // <4> aCache.get(0L); // <4> aCache.get(0L); aCache.get(0L); + Thread.sleep(1100); + Context context = createContext(managementRegistry); // <5> ContextualStatistics counters = managementRegistry.withCapability("StatisticsCapability") // <6> - .queryStatistic("GetCounter") + .queryStatistics(Arrays.asList("OnHeap:HitCount", "OnHeap:EvictionCount", "OffHeap:HitCount", "Cache:HitCount", "OffHeap:OccupiedBytesCount", "OffHeap:MappingCount")) .on(context) .build() .execute() .getSingleResult(); - Assert.assertThat(counters.size(), Matchers.is(1)); - Counter getCounter = counters.getStatistic(Counter.class); + Assert.assertThat(counters.size(), Matchers.is(6)); + CounterHistory onHeapStore_Hit_Count = counters.getStatistic(CounterHistory.class, "OnHeap:HitCount"); + CounterHistory onHeapStore_Eviction_Count = counters.getStatistic(CounterHistory.class, "OnHeap:EvictionCount"); + CounterHistory offHeapStore_Hit_Count = counters.getStatistic(CounterHistory.class, "OffHeap:HitCount"); + CounterHistory cache_Hit_Count = counters.getStatistic(CounterHistory.class, "Cache:HitCount"); + CounterHistory offHeapStore_Mapping_Count = counters.getStatistic(CounterHistory.class, "OffHeap:MappingCount"); + CounterHistory offHeapStore_OccupiedBytes_Count = counters.getStatistic(CounterHistory.class, "OffHeap:OccupiedBytesCount"); + + Assert.assertThat(onHeapStore_Hit_Count.getValue()[0].getValue() + offHeapStore_Hit_Count.getValue()[0].getValue(), Matchers.equalTo(4L)); // <7> + Assert.assertThat(cache_Hit_Count.getValue()[0].getValue(), Matchers.equalTo(4L)); // <7> - Assert.assertThat(getCounter.getValue(), Matchers.equalTo(3L)); // <7> + System.out.println("onheap evictions: " + onHeapStore_Eviction_Count.getValue()[0].getValue()); + System.out.println("offheap mappings: " + offHeapStore_Mapping_Count.getValue()[0].getValue()); + System.out.println("offheap used bytes: " + offHeapStore_OccupiedBytes_Count.getValue()[0].getValue()); cacheManager.close(); // end::usingManagementRegistry[] diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProviderTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProviderTest.java index f119932510..9873687473 100644 --- a/management/src/test/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProviderTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProviderTest.java @@ -64,7 +64,7 @@ public void testDescriptions() throws Exception { EhcacheStatisticsProvider ehcacheStatisticsProvider = new EhcacheStatisticsProvider(cmConfig_0, executor) { @Override protected ExposedCacheBinding wrap(CacheBinding cacheBinding) { - EhcacheStatistics mock = mock(EhcacheStatistics.class); + StandardEhcacheStatistics mock = mock(StandardEhcacheStatistics.class); Set descriptors = new HashSet(); descriptors.add(new StatisticDescriptor("aCounter", StatisticType.COUNTER)); descriptors.add(new StatisticDescriptor("aDuration", StatisticType.DURATION)); @@ -90,7 +90,7 @@ public void testCapabilityContext() throws Exception { EhcacheStatisticsProvider ehcacheStatisticsProvider = new EhcacheStatisticsProvider(cmConfig_0, executor) { @Override protected ExposedCacheBinding wrap(CacheBinding cacheBinding) { - return mock(EhcacheStatistics.class); + return mock(StandardEhcacheStatistics.class); } }; From 817017217e1ebaebbaf2d5cd1d1fd4496c7b9609 Mon Sep 17 00:00:00 2001 From: geoff gibson Date: Tue, 16 Aug 2016 13:29:39 -0700 Subject: [PATCH 028/218] Issue #1286 adds statistics capability for tiers. Removes stats groups. Adds stats: working stats: Cache:HitCount Cache:HitRatio Cache:MissCount Cache:MissRatio Cache:ClearCount OnHeap:EvictionCount OffHeap:EvictionCount done but need better testing to verify correctness: OnHeap:OccupiedBytesCount OffHeap:OccupiedBytesCount Disk:OccupiedBytesCount Disk:EvictionCount --- .../jsr107/Eh107CacheStatisticsMXBean.java | 61 ++- build.gradle | 6 +- .../config/ClusteredStoreConfiguration.java | 30 +- .../ClusteredStoreConfigurationBuilder.java | 10 +- .../ClusteringServiceConfigurationParser.java | 2 +- .../service/DefaultClusteringService.java | 5 +- .../client/internal/store/ClusteredStore.java | 48 +-- .../client/BasicClusteredCacheExpiryTest.java | 2 +- .../client/BasicClusteredCacheTest.java | 4 +- .../client/ClusteredCacheDestroyTest.java | 2 +- .../clustered/client/docs/GettingStarted.java | 8 +- .../service/DefaultClusteringServiceTest.java | 68 ++-- .../store/ClusteredStoreProviderTest.java | 53 --- .../internal/store/ClusteredStoreTest.java | 2 +- .../store/EventualServerStoreProxyTest.java | 2 +- .../NoInvalidationServerStoreProxyTest.java | 2 +- .../store/StrongServerStoreProxyTest.java | 2 +- .../internal/ServerStoreConfiguration.java | 46 ++- .../src/test/java/org/ehcache/Clust.java | 54 --- .../clustered/BasicClusteredCacheOpsTest.java | 4 +- .../ClusteringManagementServiceTest.java | 27 +- .../clustered/server/ServerStoreImpl.java | 4 +- .../server/EhcacheActiveEntityTest.java | 8 +- .../server/ServerStoreCompatibilityTest.java | 75 ++-- .../main/java/org/ehcache/core/Ehcache.java | 15 +- .../statistics/CacheOperationOutcomes.java | 14 + .../statistics/TierOperationStatistic.java | 73 ++-- .../internal/store/disk/OffHeapDiskStore.java | 21 +- .../impl/internal/store/heap/OnHeapStore.java | 56 +-- .../store/offheap/AbstractOffHeapStore.java | 13 - .../internal/store/offheap/OffHeapStore.java | 21 +- .../java/org/ehcache/docs/GettingStarted.java | 7 - .../disk/OffHeapDiskStoreProviderTest.java | 10 +- .../store/disk/OffHeapDiskStoreTest.java | 2 +- .../offheap/OffHeapStoreProviderTest.java | 179 --------- .../integration/StoreStatisticsTest.java | 12 +- ...hcacheStatisticsProviderConfiguration.java | 10 + .../statistics/EhcacheStatisticsProvider.java | 6 +- .../statistics/StandardEhcacheStatistics.java | 42 +- .../java/org/ehcache/docs/ManagementTest.java | 315 ++++++++------- .../providers/statistics/EvictionTest.java | 179 +++++++++ .../StandardEhcacheStatisticsTest.java | 225 +++++++++++ .../providers/statistics/StatsUtil.java | 71 ++++ .../registry/DefaultCollectorServiceTest.java | 3 +- .../DefaultManagementRegistryServiceTest.java | 375 +++++++++++++----- .../DefaultSharedManagementServiceTest.java | 66 ++- 46 files changed, 1286 insertions(+), 954 deletions(-) delete mode 100644 clustered/integration-test/src/test/java/org/ehcache/Clust.java delete mode 100644 impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreProviderTest.java create mode 100755 management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java create mode 100755 management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java create mode 100755 management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java b/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java index 3965fbec02..162c37c119 100644 --- a/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java +++ b/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java @@ -22,11 +22,9 @@ import org.ehcache.core.statistics.BulkOps; import org.terracotta.context.ContextManager; import org.terracotta.context.TreeNode; -import org.terracotta.context.query.Matcher; import org.terracotta.context.query.Matchers; import org.terracotta.context.query.Query; import org.terracotta.statistics.OperationStatistic; -import org.terracotta.statistics.StatisticsManager; import org.terracotta.statistics.derived.LatencySampling; import org.terracotta.statistics.derived.MinMaxAverage; import org.terracotta.statistics.jsr166e.LongAdder; @@ -55,7 +53,7 @@ class Eh107CacheStatisticsMXBean extends Eh107MXBean implements javax.cache.mana private final OperationStatistic putIfAbsent; private final OperationStatistic replace; private final OperationStatistic conditionalRemove; - private final OperationStatistic authorityEviction; + private final OperationStatistic lowestTierEviction; private final Map bulkMethodEntries; private final LatencyMonitor averageGetTime; private final LatencyMonitor averagePutTime; @@ -71,7 +69,7 @@ class Eh107CacheStatisticsMXBean extends Eh107MXBean implements javax.cache.mana putIfAbsent = findCacheStatistic(cache, CacheOperationOutcomes.PutIfAbsentOutcome.class, "putIfAbsent"); replace = findCacheStatistic(cache, CacheOperationOutcomes.ReplaceOutcome.class, "replace"); conditionalRemove = findCacheStatistic(cache, CacheOperationOutcomes.ConditionalRemoveOutcome.class, "conditionalRemove"); - authorityEviction = findAuthoritativeTierStatistic(cache, StoreOperationOutcomes.EvictionOutcome.class, "eviction"); + lowestTierEviction = findLowestTierStatistic(cache, StoreOperationOutcomes.EvictionOutcome.class, "eviction"); averageGetTime = new LatencyMonitor(allOf(CacheOperationOutcomes.GetOutcome.class)); get.addDerivedStatistic(averageGetTime); @@ -139,7 +137,7 @@ public long getCacheRemovals() { @Override public long getCacheEvictions() { - return normalize(authorityEviction.sum(EnumSet.of(StoreOperationOutcomes.EvictionOutcome.SUCCESS)) - compensatingCounters.cacheEvictions); + return normalize(lowestTierEviction.sum(EnumSet.of(StoreOperationOutcomes.EvictionOutcome.SUCCESS)) - compensatingCounters.cacheEvictions); } @Override @@ -204,42 +202,39 @@ static > OperationStatistic findCacheStatistic(Cache return (OperationStatistic) result.iterator().next().getContext().attributes().get("this"); } - > OperationStatistic findAuthoritativeTierStatistic(Cache cache, Class type, String statName) { - Query storeQuery = queryBuilder() - .children() - .children() - .filter(context(attributes(Matchers.>allOf( - hasAttribute("tags", new Matcher>() { - @Override - protected boolean matchesSafely(Set object) { - return object.containsAll(Collections.singleton("store")); - } - }))))) - .build(); - - Set storeResult = storeQuery.execute(Collections.singleton(ContextManager.nodeFor(cache))); - if (storeResult.size() > 1) { - throw new RuntimeException("store result must be unique"); - } - if (storeResult.isEmpty()) { - throw new RuntimeException("store result must not be null"); - } - Object authoritativeTier = storeResult.iterator().next().getContext().attributes().get("authoritativeTier"); + > OperationStatistic findLowestTierStatistic(Cache cache, Class type, String statName) { Query statQuery = queryBuilder() - .children() + .descendants() .filter(context(attributes(Matchers.>allOf(hasAttribute("name", statName), hasAttribute("type", type))))) .build(); - Set statResult = statQuery.execute(Collections.singleton(StatisticsManager.nodeFor(authoritativeTier))); - if (statResult.size() > 1) { - throw new RuntimeException("stat result must be unique"); + Set statResult = statQuery.execute(Collections.singleton(ContextManager.nodeFor(cache))); + + if(statResult.size() < 1) { + throw new RuntimeException("Failed to find lowest tier statistic: " + statName + " , valid result Set sizes must 1 or more. Found result Set size of: " + statResult.size()); + } + + //if only 1 store then you don't need to find the lowest tier + if(statResult.size() == 1) { + return (OperationStatistic) statResult.iterator().next().getContext().attributes().get("this"); } - if (statResult.isEmpty()) { - throw new RuntimeException("stat result must not be null"); + + String lowestStoreType = "onheap"; + TreeNode lowestTierNode = null; + for(TreeNode treeNode : statResult) { + if(((Set)treeNode.getContext().attributes().get("tags")).size() != 1) { + throw new RuntimeException("Failed to find lowest tier statistic. \"tags\" set must be size 1"); + } + + String storeType = treeNode.getContext().attributes().get("tags").toString(); + if(storeType.compareToIgnoreCase(lowestStoreType) < 0) { + lowestStoreType = treeNode.getContext().attributes().get("tags").toString(); + lowestTierNode = treeNode; + } } - return (OperationStatistic) statResult.iterator().next().getContext().attributes().get("this"); + return (OperationStatistic)lowestTierNode.getContext().attributes().get("this"); } class CompensatingCounters { diff --git a/build.gradle b/build.gradle index effe4b6fda..da5b5c16d0 100644 --- a/build.gradle +++ b/build.gradle @@ -20,8 +20,7 @@ ext { // Third parties offheapVersion = '2.2.2' - managementVersion = '5.0.0.beta' - statisticVersion = '1.1-SNAPSHOT' + statisticVersion = '1.2.0' jcacheVersion = '1.0.0' slf4jVersion = '1.7.7' sizeofVersion = '0.3.0' @@ -59,7 +58,6 @@ subprojects { targetCompatibility = 1.6 repositories { - mavenLocal() mavenCentral() maven { url "http://repo.terracotta.org/maven2" } } @@ -92,7 +90,7 @@ subprojects { } test { - maxHeapSize = "512m" + maxHeapSize = "1024m" systemProperty 'java.awt.headless', 'true' if (parent.isCloudbees) { systemProperty 'disable.concurrent.tests', 'true' diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/config/ClusteredStoreConfiguration.java b/clustered/client/src/main/java/org/ehcache/clustered/client/config/ClusteredStoreConfiguration.java index f30724c5d7..7faa760d85 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/config/ClusteredStoreConfiguration.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/config/ClusteredStoreConfiguration.java @@ -25,17 +25,13 @@ */ public class ClusteredStoreConfiguration implements ServiceConfiguration { - public static final Consistency DEFAULT_CONSISTENCY = Consistency.EVENTUAL; - public static final int DEFAULT_CONCURRENCY = 16; - private final Consistency consistency; - private final int concurrency; /** * Creates a new configuration with consistency set to {@link Consistency#EVENTUAL EVENTUAL}. */ public ClusteredStoreConfiguration() { - this(DEFAULT_CONSISTENCY, DEFAULT_CONCURRENCY); + this(Consistency.EVENTUAL); } /** @@ -44,27 +40,7 @@ public ClusteredStoreConfiguration() { * @param consistency the {@code Consistency} */ public ClusteredStoreConfiguration(Consistency consistency) { - this(consistency, DEFAULT_CONCURRENCY); - } - - /** - * Creates a new configuration with the provided concurrency. - * - * @param concurrency the concurrency - */ - public ClusteredStoreConfiguration(int concurrency) { - this(DEFAULT_CONSISTENCY, concurrency); - } - - /** - * Creates a new configuration with the provided {@link Consistency} and concurrency. - * - * @param consistency the {@code Consistency} - * @param concurrency the concurrency - */ - public ClusteredStoreConfiguration(Consistency consistency, int concurrency) { this.consistency = consistency; - this.concurrency = concurrency; } /** @@ -83,8 +59,4 @@ public Class getServiceType() { public Consistency getConsistency() { return consistency; } - - public int getConcurrency() { - return concurrency; - } } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteredStoreConfigurationBuilder.java b/clustered/client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteredStoreConfigurationBuilder.java index 8cb147a747..66c2a12d65 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteredStoreConfigurationBuilder.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/config/builders/ClusteredStoreConfigurationBuilder.java @@ -26,7 +26,6 @@ public class ClusteredStoreConfigurationBuilder implements Builder { private final Consistency consistency; - private final int concurrency; /** * Creates a new builder instance with the provided {@link Consistency} configured. @@ -34,13 +33,12 @@ public class ClusteredStoreConfigurationBuilder implements Builder parseServiceConfiguration(Element fragment) { if (CLUSTERED_STORE_ELEMENT_NAME.equals(fragment.getLocalName())) { if (fragment.hasAttribute(CONSISTENCY_ATTRIBUTE_NAME)) { - return new ClusteredStoreConfiguration(Consistency.valueOf(fragment.getAttribute("consistency").toUpperCase()), 16); + return new ClusteredStoreConfiguration(Consistency.valueOf(fragment.getAttribute("consistency").toUpperCase())); } else { return new ClusteredStoreConfiguration(); } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java index 4c770e3cda..694e3500bc 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java @@ -340,7 +340,7 @@ protected boolean isStarted() { @Override public ServerStoreProxy getServerStoreProxy(final ClusteredCacheIdentifier cacheIdentifier, final Store.Configuration storeConfig, - Consistency configuredConsistency, int configuredConcurrency) throws CachePersistenceException { + Consistency configuredConsistency) throws CachePersistenceException { final String cacheId = cacheIdentifier.getId(); if (configuredConsistency == null) { @@ -372,8 +372,7 @@ public ServerStoreProxy getServerStoreProxy(final ClusteredCacheIdentifie null, // TODO: Need actual value type -- cache wrappers can wrap key/value types (storeConfig.getKeySerializer() == null ? null : storeConfig.getKeySerializer().getClass().getName()), (storeConfig.getValueSerializer() == null ? null : storeConfig.getValueSerializer().getClass().getName()), - configuredConsistency, - configuredConcurrency + configuredConsistency ); try { diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java index f807f5e5d5..9dd06ea84b 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java @@ -79,7 +79,6 @@ import java.util.concurrent.Callable; import java.util.concurrent.TimeoutException; -import static java.util.Collections.singleton; import static org.ehcache.core.exceptions.StorePassThroughException.handleRuntimeException; import static org.ehcache.core.internal.service.ServiceLocator.findSingletonAmongst; import static org.ehcache.core.statistics.TierOperationStatistic.set; @@ -91,6 +90,7 @@ public class ClusteredStore implements AuthoritativeTier { private static final String STATISTICS_TAG = "Clustered"; + private static final int TIER_HEIGHT = ClusteredResourceType.Types.UNKNOWN.getTierHeight(); //TierHeight is the same for all ClusteredResourceType.Types private final OperationsCodec codec; private final ChainResolver resolver; @@ -100,7 +100,6 @@ public class ClusteredStore implements AuthoritativeTier { private volatile ServerStoreProxy storeProxy; private volatile InvalidationValve invalidationValve; - private final ClusteredStoreStatsSettings clusteredStoreStatsSettings; private final OperationObserver getObserver; private final OperationObserver putObserver; private final OperationObserver removeObserver; @@ -117,8 +116,6 @@ private ClusteredStore(final OperationsCodec codec, final ChainResolver ClusteredStore createStore(final Configuration storeCo TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, StoreOperationOutcomes.GetOutcome.class, store, new HashMap>() {{ put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(StoreOperationOutcomes.GetOutcome.HIT)); - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(StoreOperationOutcomes.GetOutcome.MISS)); - }}, "get", 1000, "get"); + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(StoreOperationOutcomes.GetOutcome.MISS, StoreOperationOutcomes.GetOutcome.TIMEOUT)); + }}, "get", TIER_HEIGHT, "get", STATISTICS_TAG); StatisticsManager.associate(get).withParent(store); tieredOps.add(get); TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, store, new HashMap>() {{ put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); - }}, "eviction", 1000, "eviction"); + }}, "eviction", TIER_HEIGHT, "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(store); tieredOps.add(evict); @@ -629,9 +626,8 @@ private ClusteredStore createStoreInternal(Configuration stor ClusteredStore store = new ClusteredStore(codec, resolver, timeSource); - StatisticsManager.associate(store.clusteredStoreStatsSettings).withParent(store); - createdStores.put(store, new StoreConfig(cacheId, storeConfig, clusteredStoreConfiguration.getConsistency(), clusteredStoreConfiguration.getConcurrency())); + createdStores.put(store, new StoreConfig(cacheId, storeConfig, clusteredStoreConfiguration.getConsistency())); return store; } @@ -642,7 +638,8 @@ public void releaseStore(final Store resource) { } ClusteredStore clusteredStore = (ClusteredStore)resource; this.clusteringService.releaseServerStoreProxy(clusteredStore.storeProxy); - StatisticsManager.dissociate(clusteredStore.clusteredStoreStatsSettings).fromParent(clusteredStore); + StatisticsManager.nodeFor(clusteredStore).clean(); + tierOperationStatistics.remove(clusteredStore); } @Override @@ -750,19 +747,17 @@ public AuthoritativeTier createAuthoritativeTier(Configuration get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.class, authoritativeTier, new HashMap>() {{ put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.HIT)); - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS)); - }}, "get", 1000, "getAndFault"); + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS, AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.TIMEOUT)); + }}, "get", TIER_HEIGHT, "getAndFault", STATISTICS_TAG); StatisticsManager.associate(get).withParent(authoritativeTier); tieredOps.add(get); - TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, authoritativeTier, new HashMap>() {{ put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); - }}, "eviction", 1000, "eviction"); + }}, "eviction", TIER_HEIGHT, "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(authoritativeTier); - tieredOps.add(evict); - + tieredOps.add(get); tierOperationStatistics.put(authoritativeTier, tieredOps); return authoritativeTier; @@ -784,13 +779,11 @@ private static class StoreConfig { private final ClusteredCacheIdentifier cacheIdentifier; private final Store.Configuration storeConfig; private final Consistency consistency; - private final int concurrency; - StoreConfig(ClusteredCacheIdentifier cacheIdentifier, Configuration storeConfig, Consistency consistency, int concurrency) { + StoreConfig(ClusteredCacheIdentifier cacheIdentifier, Configuration storeConfig, Consistency consistency) { this.cacheIdentifier = cacheIdentifier; this.storeConfig = storeConfig; this.consistency = consistency; - this.concurrency = concurrency; } public Configuration getStoreConfig() { @@ -804,22 +797,5 @@ public ClusteredCacheIdentifier getCacheIdentifier() { public Consistency getConsistency() { return consistency; } - - public int getConcurrency() { - return concurrency; - } } - - private static final class ClusteredStoreStatsSettings { - @ContextAttribute("tags") private final Set tags = singleton("store"); - @ContextAttribute("authoritativeTier") private final ClusteredStore authoritativeTier; - - ClusteredStoreStatsSettings(ClusteredStore store) { - this.authoritativeTier = store; - } - - //@ContextAttribute("tags") private final Set tags = new HashSet(Arrays.asList("store")); - @ContextAttribute("discriminator") private final String discriminator = STATISTICS_TAG; - } - } diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheExpiryTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheExpiryTest.java index 992b0965ae..62babd8882 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheExpiryTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheExpiryTest.java @@ -55,7 +55,7 @@ public class BasicClusteredCacheExpiryTest { ResourcePoolsBuilder.newResourcePoolsBuilder() .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) .withExpiry(Expirations.timeToLiveExpiration(new Duration(1L, TimeUnit.MILLISECONDS))) - .add(ClusteredStoreConfigurationBuilder.withConsistencyAndConcurrency(Consistency.STRONG, 16))); + .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG))); @Before public void definePassthroughServer() throws Exception { diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheTest.java index ea6e3eb196..c08fd5dac8 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/BasicClusteredCacheTest.java @@ -91,7 +91,7 @@ public void testClusteredCacheTwoClients() throws Exception { .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(100, EntryUnit.ENTRIES) .with(clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) - .add(new ClusteredStoreConfiguration(Consistency.STRONG, 1))) + .add(new ClusteredStoreConfiguration(Consistency.STRONG))) ; final PersistentCacheManager cacheManager1 = clusteredCacheManagerBuilder.build(true); @@ -120,7 +120,7 @@ public void testClustered3TierCacheTwoClients() throws Exception { .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(1, EntryUnit.ENTRIES).offheap(1, MemoryUnit.MB) .with(clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) - .add(new ClusteredStoreConfiguration(Consistency.STRONG, 1))) + .add(new ClusteredStoreConfiguration(Consistency.STRONG))) ; final PersistentCacheManager cacheManager1 = clusteredCacheManagerBuilder.build(true); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredCacheDestroyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredCacheDestroyTest.java index de3eb3adc8..8e6b78909f 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredCacheDestroyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredCacheDestroyTest.java @@ -64,7 +64,7 @@ public class ClusteredCacheDestroyTest { .withCache(CLUSTERED_CACHE, newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 32, MemoryUnit.MB))) - .add(ClusteredStoreConfigurationBuilder.withConsistencyAndConcurrency(Consistency.STRONG, 1))); + .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG))); @Before public void definePassthroughServer() throws Exception { diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java b/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java index 66030740cf..0e6de35959 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java @@ -133,7 +133,7 @@ public void explicitConsistencyConfiguration() throws Exception { CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) - .add(ClusteredStoreConfigurationBuilder.withConsistencyAndConcurrency(Consistency.STRONG, 16)) // <1> + .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG)) // <1> .build(); Cache cache = cacheManager.createCache("clustered-cache", config); @@ -162,7 +162,7 @@ public void clusteredCacheTieredExample() throws Exception { ResourcePoolsBuilder.newResourcePoolsBuilder() .heap(2, MemoryUnit.MB) // <1> .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB))) // <2> - .add(ClusteredStoreConfigurationBuilder.withConsistencyAndConcurrency(Consistency.STRONG, 1)) + .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG)) .build(); Cache cache = cacheManager.createCache("clustered-cache", config); @@ -227,7 +227,7 @@ public void unknownClusteredCacheExample() CacheConfiguration cacheConfigDedicated = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB))) // <2> - .add(ClusteredStoreConfigurationBuilder.withConsistencyAndConcurrency(Consistency.STRONG, 1)) + .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG)) .build(); Cache cacheDedicated = cacheManager1.createCache("my-dedicated-cache", cacheConfigDedicated); // <3> @@ -243,7 +243,7 @@ public void unknownClusteredCacheExample() CacheConfiguration cacheConfigUnspecified = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() .with(ClusteredResourcePoolBuilder.clustered())) // <5> - .add(ClusteredStoreConfigurationBuilder.withConsistencyAndConcurrency(Consistency.STRONG, 1)) + .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG)) .build(); Cache cacheUnspecified = cacheManager2.createCache("my-dedicated-cache", cacheConfigUnspecified); // <6> diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java index a76c6d847c..b1169a3c72 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java @@ -582,7 +582,7 @@ public void testGetServerStoreProxySharedAutoCreate() throws Exception { getSharedStoreConfig(targetPool, serializationProvider, Long.class, String.class); ServerStoreProxy serverStoreProxy = service.getServerStoreProxy( - getClusteredCacheIdentifier(service, cacheAlias), storeConfiguration, Consistency.EVENTUAL, 1); + getClusteredCacheIdentifier(service, cacheAlias), storeConfiguration, Consistency.EVENTUAL); assertThat(serverStoreProxy.getCacheId(), is(cacheAlias)); @@ -643,7 +643,7 @@ public void testGetServerStoreProxySharedNoAutoCreateNonExistent() throws Except try { accessService.getServerStoreProxy( - getClusteredCacheIdentifier(accessService, cacheAlias), storeConfiguration, Consistency.EVENTUAL, 1); + getClusteredCacheIdentifier(accessService, cacheAlias), storeConfiguration, Consistency.EVENTUAL); fail("Expecting CachePersistenceException"); } catch (CachePersistenceException e) { assertThat(getRootCause(e).getMessage(), containsString(" does not exist")); @@ -691,7 +691,7 @@ public void testGetServerStoreProxySharedNoAutoCreateExists() throws Exception { getSharedStoreConfig(targetPool, serializationProvider, Long.class, String.class); ServerStoreProxy creationServerStoreProxy = creationService.getServerStoreProxy( - getClusteredCacheIdentifier(creationService, cacheAlias), creationStoreConfig, Consistency.EVENTUAL, 1); + getClusteredCacheIdentifier(creationService, cacheAlias), creationStoreConfig, Consistency.EVENTUAL); assertThat(creationServerStoreProxy.getCacheId(), is(cacheAlias)); creationService.stop(); @@ -723,7 +723,7 @@ public void testGetServerStoreProxySharedNoAutoCreateExists() throws Exception { getSharedStoreConfig(targetPool, serializationProvider, Long.class, String.class); ServerStoreProxy accessServerStoreProxy = accessService.getServerStoreProxy( - getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfiguration, Consistency.EVENTUAL, 1); + getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfiguration, Consistency.EVENTUAL); assertThat(accessServerStoreProxy.getCacheId(), is(cacheAlias)); activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); @@ -772,7 +772,7 @@ public void testGetServerStoreProxySharedAutoCreateTwice() throws Exception { getSharedStoreConfig(targetPool, serializationProvider, Long.class, String.class); ServerStoreProxy firstServerStoreProxy = firstService.getServerStoreProxy( - getClusteredCacheIdentifier(firstService, cacheAlias), firstSharedStoreConfig, Consistency.EVENTUAL, 1); + getClusteredCacheIdentifier(firstService, cacheAlias), firstSharedStoreConfig, Consistency.EVENTUAL); assertThat(firstServerStoreProxy.getCacheId(), is(cacheAlias)); DefaultClusteringService secondService = new DefaultClusteringService(configuration); @@ -782,7 +782,7 @@ public void testGetServerStoreProxySharedAutoCreateTwice() throws Exception { getSharedStoreConfig(targetPool, serializationProvider, Long.class, String.class); ServerStoreProxy secondServerStoreProxy = secondService.getServerStoreProxy( - getClusteredCacheIdentifier(secondService, cacheAlias), secondSharedStoreConfig, Consistency.EVENTUAL, 1); + getClusteredCacheIdentifier(secondService, cacheAlias), secondSharedStoreConfig, Consistency.EVENTUAL); assertThat(secondServerStoreProxy.getCacheId(), is(cacheAlias)); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); @@ -837,7 +837,7 @@ public void testReleaseServerStoreProxyShared() throws Exception { getSharedStoreConfig(targetPool, serializationProvider, Long.class, String.class); ServerStoreProxy serverStoreProxy = creationService.getServerStoreProxy( - getClusteredCacheIdentifier(creationService, cacheAlias), storeConfiguration, Consistency.EVENTUAL, 1); + getClusteredCacheIdentifier(creationService, cacheAlias), storeConfiguration, Consistency.EVENTUAL); assertThat(serverStoreProxy.getCacheId(), is(cacheAlias)); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); @@ -887,7 +887,7 @@ public void testGetServerStoreProxyDedicatedAutoCreate() throws Exception { getDedicatedStoreConfig(targetResource, serializationProvider, Long.class, String.class); ServerStoreProxy serverStoreProxy = service.getServerStoreProxy( - getClusteredCacheIdentifier(service, cacheAlias), storeConfiguration, Consistency.EVENTUAL, 1); + getClusteredCacheIdentifier(service, cacheAlias), storeConfiguration, Consistency.EVENTUAL); assertThat(serverStoreProxy.getCacheId(), is(cacheAlias)); @@ -950,7 +950,7 @@ public void testGetServerStoreProxyDedicatedNoAutoCreateNonExistent() throws Exc try { accessService.getServerStoreProxy( - getClusteredCacheIdentifier(accessService, cacheAlias), storeConfiguration, Consistency.EVENTUAL, 1); + getClusteredCacheIdentifier(accessService, cacheAlias), storeConfiguration, Consistency.EVENTUAL); fail("Expecting CachePersistenceException"); } catch (CachePersistenceException e) { assertThat(getRootCause(e).getMessage(), containsString(" does not exist")); @@ -1000,7 +1000,7 @@ public void testGetServerStoreProxyDedicatedNoAutoCreateExists() throws Exceptio getDedicatedStoreConfig(targetResource, serializationProvider, Long.class, String.class); ServerStoreProxy creationServerStoreProxy = creationService.getServerStoreProxy( - getClusteredCacheIdentifier(creationService, cacheAlias), creationStoreConfig, Consistency.EVENTUAL, 1); + getClusteredCacheIdentifier(creationService, cacheAlias), creationStoreConfig, Consistency.EVENTUAL); assertThat(creationServerStoreProxy.getCacheId(), is(cacheAlias)); creationService.stop(); @@ -1032,7 +1032,7 @@ public void testGetServerStoreProxyDedicatedNoAutoCreateExists() throws Exceptio getDedicatedStoreConfig(targetResource, serializationProvider, Long.class, String.class); ServerStoreProxy accessServerStoreProxy = accessService.getServerStoreProxy( - getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfiguration, Consistency.EVENTUAL, 1); + getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfiguration, Consistency.EVENTUAL); assertThat(accessServerStoreProxy.getCacheId(), is(cacheAlias)); activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); @@ -1083,7 +1083,7 @@ public void testGetServerStoreProxyDedicatedAutoCreateTwice() throws Exception { getDedicatedStoreConfig(targetResource, serializationProvider, Long.class, String.class); ServerStoreProxy firstServerStoreProxy = firstService.getServerStoreProxy( - getClusteredCacheIdentifier(firstService, cacheAlias), firstSharedStoreConfig, Consistency.EVENTUAL, 1); + getClusteredCacheIdentifier(firstService, cacheAlias), firstSharedStoreConfig, Consistency.EVENTUAL); assertThat(firstServerStoreProxy.getCacheId(), is(cacheAlias)); DefaultClusteringService secondService = new DefaultClusteringService(configuration); @@ -1093,7 +1093,7 @@ public void testGetServerStoreProxyDedicatedAutoCreateTwice() throws Exception { getDedicatedStoreConfig(targetResource, serializationProvider, Long.class, String.class); ServerStoreProxy secondServerStoreProxy = secondService.getServerStoreProxy( - getClusteredCacheIdentifier(secondService, cacheAlias), secondSharedStoreConfig, Consistency.EVENTUAL, 1); + getClusteredCacheIdentifier(secondService, cacheAlias), secondSharedStoreConfig, Consistency.EVENTUAL); assertThat(secondServerStoreProxy.getCacheId(), is(cacheAlias)); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); @@ -1153,7 +1153,7 @@ public void testReleaseServerStoreProxyDedicated() throws Exception { getDedicatedStoreConfig(targetResource, serializationProvider, Long.class, String.class); ServerStoreProxy serverStoreProxy = creationService.getServerStoreProxy( - getClusteredCacheIdentifier(creationService, cacheAlias), storeConfiguration, Consistency.EVENTUAL, 1); + getClusteredCacheIdentifier(creationService, cacheAlias), storeConfiguration, Consistency.EVENTUAL); assertThat(serverStoreProxy.getCacheId(), is(cacheAlias)); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); @@ -1231,7 +1231,7 @@ public void testGetServerStoreProxySharedDestroy() throws Exception { getSharedStoreConfig(targetPool, serializationProvider, Long.class, String.class); ServerStoreProxy serverStoreProxy = creationService.getServerStoreProxy( - getClusteredCacheIdentifier(creationService, cacheAlias), storeConfiguration, Consistency.EVENTUAL, 1); + getClusteredCacheIdentifier(creationService, cacheAlias), storeConfiguration, Consistency.EVENTUAL); assertThat(serverStoreProxy.getCacheId(), is(cacheAlias)); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); @@ -1285,7 +1285,7 @@ public void testGetServerStoreProxyDedicatedDestroy() throws Exception { getDedicatedStoreConfig(targetResource, serializationProvider, Long.class, String.class); ServerStoreProxy serverStoreProxy = creationService.getServerStoreProxy( - getClusteredCacheIdentifier(creationService, cacheAlias), storeConfiguration, Consistency.EVENTUAL, 1); + getClusteredCacheIdentifier(creationService, cacheAlias), storeConfiguration, Consistency.EVENTUAL); assertThat(serverStoreProxy.getCacheId(), is(cacheAlias)); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); @@ -1375,14 +1375,14 @@ public void testFullDestroyAll() throws Exception { getSharedStoreConfig("sharedPrimary", serializationProvider, Long.class, String.class); ServerStoreProxy sharedProxy = createService.getServerStoreProxy( - getClusteredCacheIdentifier(createService, "sharedCache"), sharedStoreConfiguration, Consistency.EVENTUAL, 1); + getClusteredCacheIdentifier(createService, "sharedCache"), sharedStoreConfiguration, Consistency.EVENTUAL); assertThat(sharedProxy.getCacheId(), is("sharedCache")); Store.Configuration storeConfiguration = getDedicatedStoreConfig("serverResource2", serializationProvider, Long.class, String.class); ServerStoreProxy dedicatedProxy = createService.getServerStoreProxy( - getClusteredCacheIdentifier(createService, "dedicatedCache"), storeConfiguration, Consistency.EVENTUAL, 1); + getClusteredCacheIdentifier(createService, "dedicatedCache"), storeConfiguration, Consistency.EVENTUAL); assertThat(dedicatedProxy.getCacheId(), is("dedicatedCache")); createService.stop(); @@ -1474,7 +1474,7 @@ public void testStoreValidation_autoCreateConfigGood_autoCreateConfigBad() throw ClusteredCacheIdentifier clusteredCacheIdentifier = getClusteredCacheIdentifier(creationService, cacheAlias); - creationService.getServerStoreProxy(clusteredCacheIdentifier, createStoreConfig, Consistency.EVENTUAL,1 ); + creationService.getServerStoreProxy(clusteredCacheIdentifier, createStoreConfig, Consistency.EVENTUAL); creationService.stop(); @@ -1485,7 +1485,7 @@ public void testStoreValidation_autoCreateConfigGood_autoCreateConfigBad() throw getSharedStoreConfig("sharedPrimary", serializationProvider, Long.class, Long.class);//ValueType is invalid try { - accessService.getServerStoreProxy(getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfigBad, Consistency.EVENTUAL, 1); + accessService.getServerStoreProxy(getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfigBad, Consistency.EVENTUAL); fail("Expecting CachePersistenceException"); } catch(CachePersistenceException e) { assertThat(getRootCause(e).getMessage(), containsString("Existing ServerStore configuration is not compatible with the desired configuration")); @@ -1533,14 +1533,14 @@ public void testStoreValidation_autoCreateConfigGood_autoCreateConfigGood() thro Store.Configuration storeConfig = getSharedStoreConfig("sharedPrimary", serializationProvider, Long.class, String.class); - creationService.getServerStoreProxy(getClusteredCacheIdentifier(creationService, cacheAlias), storeConfig, Consistency.EVENTUAL, 1); + creationService.getServerStoreProxy(getClusteredCacheIdentifier(creationService, cacheAlias), storeConfig, Consistency.EVENTUAL); creationService.stop(); DefaultClusteringService accessService = new DefaultClusteringService(config); accessService.start(null); - accessService.getServerStoreProxy(getClusteredCacheIdentifier(accessService, cacheAlias), storeConfig, Consistency.EVENTUAL, 1); + accessService.getServerStoreProxy(getClusteredCacheIdentifier(accessService, cacheAlias), storeConfig, Consistency.EVENTUAL); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); assertThat(activeEntities.size(), is(1)); @@ -1586,7 +1586,7 @@ public void testStoreValidation_autoCreateConfigBad() throws Exception { ClusteredCacheIdentifier clusteredCacheIdentifier = getClusteredCacheIdentifier(creationService, cacheAlias); try { - creationService.getServerStoreProxy(clusteredCacheIdentifier, storeConfig, Consistency.EVENTUAL, 1); + creationService.getServerStoreProxy(clusteredCacheIdentifier, storeConfig, Consistency.EVENTUAL); fail("Expecting CachePersistenceException"); } catch(CachePersistenceException e) { //Expected @@ -1632,7 +1632,7 @@ public void testStoreValidation_autoCreateConfigGood_noAutoCreateConfigBad() thr Store.Configuration creationStoreConfig = getSharedStoreConfig("sharedPrimary", serializationProvider, Long.class, String.class); - creationService.getServerStoreProxy(getClusteredCacheIdentifier(creationService, cacheAlias), creationStoreConfig, Consistency.EVENTUAL, 1); + creationService.getServerStoreProxy(getClusteredCacheIdentifier(creationService, cacheAlias), creationStoreConfig, Consistency.EVENTUAL); ClusteringServiceConfiguration noAutoConfig = ClusteringServiceConfigurationBuilder.cluster(URI.create(CLUSTER_URI_BASE + "my-application")) @@ -1650,7 +1650,7 @@ public void testStoreValidation_autoCreateConfigGood_noAutoCreateConfigBad() thr getSharedStoreConfig("sharedPrimary", serializationProvider, Long.class, Long.class); try { - accessService.getServerStoreProxy(getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfig, Consistency.EVENTUAL, 1); + accessService.getServerStoreProxy(getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfig, Consistency.EVENTUAL); fail("Expecting CachePersistenceException"); } catch(CachePersistenceException e) { assertThat(getRootCause(e).getMessage(), containsString("Existing ServerStore configuration is not compatible with the desired configuration")); @@ -1699,7 +1699,7 @@ public void testStoreValidation_autoCreateConfigGood_noAutoCreateConfigGood() th Store.Configuration storeConfig = getSharedStoreConfig("sharedPrimary", serializationProvider, Long.class, String.class); - creationService.getServerStoreProxy(getClusteredCacheIdentifier(creationService, cacheAlias), storeConfig, Consistency.EVENTUAL, 1); + creationService.getServerStoreProxy(getClusteredCacheIdentifier(creationService, cacheAlias), storeConfig, Consistency.EVENTUAL); ClusteringServiceConfiguration noAutoConfig = @@ -1714,7 +1714,7 @@ public void testStoreValidation_autoCreateConfigGood_noAutoCreateConfigGood() th DefaultClusteringService accessService = new DefaultClusteringService(noAutoConfig); accessService.start(null); - accessService.getServerStoreProxy(getClusteredCacheIdentifier(accessService, cacheAlias), storeConfig, Consistency.EVENTUAL, 1); + accessService.getServerStoreProxy(getClusteredCacheIdentifier(accessService, cacheAlias), storeConfig, Consistency.EVENTUAL); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); assertThat(activeEntities.size(), is(1)); @@ -1759,7 +1759,7 @@ public void testStoreValidation_MismatchedPoolTypes_ConfiguredDedicatedValidateS getDedicatedStoreConfig("serverResource1", serializationProvider, Long.class, String.class); creationService.getServerStoreProxy( - getClusteredCacheIdentifier(creationService, "cacheAlias"), createStoreConfig, Consistency.EVENTUAL, 1); + getClusteredCacheIdentifier(creationService, "cacheAlias"), createStoreConfig, Consistency.EVENTUAL); creationService.stop(); @@ -1779,7 +1779,7 @@ public void testStoreValidation_MismatchedPoolTypes_ConfiguredDedicatedValidateS try { accessService.getServerStoreProxy( - getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfig, Consistency.EVENTUAL, 1); + getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfig, Consistency.EVENTUAL); fail("Expecting CachePersistenceException"); } catch (CachePersistenceException e) { assertThat(getRootCause(e).getMessage(), containsString("Existing ServerStore configuration is not compatible with the desired configuration")); @@ -1827,7 +1827,7 @@ public void testStoreValidation_MismatchedPoolTypes_ConfiguredSharedValidateDedi getSharedStoreConfig("sharedPrimary", serializationProvider, Long.class, String.class); creationService.getServerStoreProxy( - getClusteredCacheIdentifier(creationService, cacheAlias), createStoreConfig, Consistency.EVENTUAL, 1); + getClusteredCacheIdentifier(creationService, cacheAlias), createStoreConfig, Consistency.EVENTUAL); creationService.stop(); @@ -1847,7 +1847,7 @@ public void testStoreValidation_MismatchedPoolTypes_ConfiguredSharedValidateDedi try { accessService.getServerStoreProxy( - getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfig, Consistency.EVENTUAL, 1); + getClusteredCacheIdentifier(accessService, cacheAlias), accessStoreConfig, Consistency.EVENTUAL); fail("Expecting CachePersistenceException"); } catch (CachePersistenceException e) { assertThat(getRootCause(e).getMessage(), containsString("Existing ServerStore configuration is not compatible with the desired configuration")); @@ -1931,7 +1931,7 @@ public void testGetServerStoreProxyReturnsEventualStore() throws Exception { when(storeConfig.getKeyType()).thenReturn(String.class); when(storeConfig.getValueType()).thenReturn(Object.class); - ServerStoreProxy serverStoreProxy = service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.EVENTUAL, 1); + ServerStoreProxy serverStoreProxy = service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.EVENTUAL); assertThat(serverStoreProxy, instanceOf(EventualServerStoreProxy.class)); } @@ -1954,7 +1954,7 @@ public void testGetServerStoreProxyReturnsEventualStoreByDefault() throws Except when(storeConfig.getKeyType()).thenReturn(String.class); when(storeConfig.getValueType()).thenReturn(Object.class); - ServerStoreProxy serverStoreProxy = service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.EVENTUAL, 1); + ServerStoreProxy serverStoreProxy = service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.EVENTUAL); assertThat(serverStoreProxy, instanceOf(EventualServerStoreProxy.class)); } @@ -1977,7 +1977,7 @@ public void testGetServerStoreProxyReturnsStrongStore() throws Exception { when(storeConfig.getKeyType()).thenReturn(String.class); when(storeConfig.getValueType()).thenReturn(Object.class); - ServerStoreProxy serverStoreProxy = service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.STRONG, 1); + ServerStoreProxy serverStoreProxy = service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.STRONG); assertThat(serverStoreProxy, instanceOf(StrongServerStoreProxy.class)); } diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java index c52a1b7067..f63a5a5a45 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java @@ -25,14 +25,12 @@ import org.ehcache.config.ResourcePool; import org.ehcache.config.ResourcePools; import org.ehcache.config.ResourceType; -import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.config.ResourcePoolsImpl; import org.ehcache.core.internal.service.ServiceLocator; import org.ehcache.core.spi.store.Store; import org.ehcache.expiry.Expirations; import org.ehcache.expiry.Expiry; -import org.ehcache.impl.internal.DefaultTimeSourceService; import org.ehcache.impl.internal.store.disk.OffHeapDiskStore; import org.ehcache.impl.internal.store.heap.OnHeapStore; import org.ehcache.impl.internal.store.offheap.OffHeapStore; @@ -41,32 +39,17 @@ import org.ehcache.impl.serialization.StringSerializer; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.service.ServiceConfiguration; -import org.junit.Before; import org.junit.Test; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.terracotta.context.ContextManager; -import org.terracotta.context.TreeNode; -import org.terracotta.context.query.Matcher; -import org.terracotta.context.query.Matchers; -import org.terracotta.context.query.Query; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; - -import static com.sun.corba.se.impl.util.RepositoryId.cache; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.startsWith; import static org.junit.Assert.*; import static org.mockito.Mockito.mock; -import static org.terracotta.context.query.Matchers.attributes; -import static org.terracotta.context.query.Matchers.context; -import static org.terracotta.context.query.Matchers.hasAttribute; -import static org.terracotta.context.query.QueryBuilder.queryBuilder; /** * Provides basic tests for {@link org.ehcache.clustered.client.internal.store.ClusteredStore.Provider ClusteredStore.Provider}. @@ -140,42 +123,6 @@ public void testAuthoritativeRank() throws Exception { assertThat(provider.rankAuthority(new UnmatchedResourceType(), Collections.EMPTY_LIST), is(0)); } - @Test - public void testStatisticsAssociations() throws Exception { - ClusteredStore.Provider provider = new ClusteredStore.Provider(); - ServiceLocator serviceLocator = new ServiceLocator( - new TieredStore.Provider(), - new OnHeapStore.Provider(), - new OffHeapStore.Provider(), - new OffHeapDiskStore.Provider(), - new DefaultTimeSourceService(null), - mock(ClusteringService.class)); - provider.start(serviceLocator); - - ClusteredStore store = provider.createStore(getStoreConfig()); - - Query storeQuery = queryBuilder() - .children() - .filter(context(attributes(Matchers.>allOf( - hasAttribute("tags", new Matcher>() { - @Override - protected boolean matchesSafely(Set object) { - return object.containsAll(Collections.singleton("store")); - } - }))))) - .build(); - - Set nodes = Collections.singleton(ContextManager.nodeFor(store)); - - Set storeResult = storeQuery.execute(nodes); - assertThat(storeResult.isEmpty(), is(false)); - - provider.releaseStore(store); - - storeResult = storeQuery.execute(nodes); - assertThat(storeResult.isEmpty(), is(true)); - } - private void assertRank(final Store.Provider provider, final int expectedRank, final ResourceType... resources) { final List> serviceConfigs = Collections.emptyList(); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java index adff0716a9..824ac1e872 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java @@ -99,7 +99,7 @@ public void setup() throws Exception { Long.class.getName(), String.class.getName(), Long.class.getName(), String.class.getName(), LongSerializer.class.getName(), StringSerializer.class.getName(), - null, 1 + null ); clientEntity.createCache(CACHE_IDENTIFIER, serverStoreConfiguration); ServerStoreMessageFactory factory = new ServerStoreMessageFactory(CACHE_IDENTIFIER); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java index 22bd27bf08..347621cf9f 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java @@ -81,7 +81,7 @@ public static void setUp() throws Exception { ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfiguration(resourcePool.getPoolAllocation(), Long.class.getName(), Long.class.getName(), Long.class.getName(), Long.class.getName(), LongSerializer.class.getName(), LongSerializer.class - .getName(), Consistency.STRONG, 1); + .getName(), Consistency.STRONG); clientEntity1.createCache(CACHE_IDENTIFIER, serverStoreConfiguration); // required to attach the store to the client diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java index 567c9af212..1f9b9fe01c 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java @@ -71,7 +71,7 @@ public static void setUp() throws Exception { clientEntity.createCache(CACHE_IDENTIFIER, new ServerStoreConfiguration(resourcePool.getPoolAllocation(), Long.class.getName(), Long.class.getName(), Long.class.getName(), Long.class.getName(), LongSerializer.class.getName(), LongSerializer.class - .getName(), null, 1)); + .getName(), null)); serverStoreProxy = new NoInvalidationServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER), clientEntity); } diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java index e4c390e405..cc0be0eec9 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java @@ -89,7 +89,7 @@ public static void setUp() throws Exception { ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfiguration(resourcePool.getPoolAllocation(), Long.class.getName(), Long.class.getName(), Long.class.getName(), Long.class.getName(), LongSerializer.class.getName(), LongSerializer.class - .getName(), Consistency.STRONG, 1); + .getName(), Consistency.STRONG); clientEntity1.createCache(CACHE_IDENTIFIER, serverStoreConfiguration); // required to attach the store to the client diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/ServerStoreConfiguration.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/ServerStoreConfiguration.java index f4ab359deb..0e30d7ef94 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/ServerStoreConfiguration.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/ServerStoreConfiguration.java @@ -39,7 +39,6 @@ public class ServerStoreConfiguration implements Serializable { private final String keySerializerType; private final String valueSerializerType; private final Consistency consistency; - private final int concurrency; // TODO: Loader/Writer configuration ... public ServerStoreConfiguration(PoolAllocation poolAllocation, @@ -49,8 +48,7 @@ public ServerStoreConfiguration(PoolAllocation poolAllocation, String actualValueType, String keySerializerType, String valueSerializerType, - Consistency consistency, - int concurrency) { + Consistency consistency) { this.poolAllocation = poolAllocation; this.storedKeyType = storedKeyType; this.storedValueType = storedValueType; @@ -59,7 +57,6 @@ public ServerStoreConfiguration(PoolAllocation poolAllocation, this.keySerializerType = keySerializerType; this.valueSerializerType = valueSerializerType; this.consistency = consistency; - this.concurrency = concurrency; } public PoolAllocation getPoolAllocation() { @@ -94,20 +91,17 @@ public Consistency getConsistency() { return consistency; } - public int getConcurrency() { - return concurrency; - } - public boolean isCompatible(ServerStoreConfiguration otherConfiguration, StringBuilder sb) { + boolean isCompatible; PoolAllocation otherPoolAllocation = otherConfiguration.getPoolAllocation(); isCompatible = comparePoolAllocationType(sb, otherPoolAllocation); - if (isCompatible) { - if (!(otherPoolAllocation instanceof PoolAllocation.Unknown)) { + if(isCompatible) { + if( !(otherPoolAllocation instanceof PoolAllocation.Unknown) ) { if (poolAllocation instanceof PoolAllocation.Dedicated) { - PoolAllocation.Dedicated serverDedicatedAllocation = (PoolAllocation.Dedicated) poolAllocation; - PoolAllocation.Dedicated clientDedicatedAllocation = (PoolAllocation.Dedicated) otherPoolAllocation; + PoolAllocation.Dedicated serverDedicatedAllocation = (PoolAllocation.Dedicated)poolAllocation; + PoolAllocation.Dedicated clientDedicatedAllocation = (PoolAllocation.Dedicated)otherPoolAllocation; if (compareField(sb, "resourcePoolDedicatedResourceName", serverDedicatedAllocation.getResourceName(), clientDedicatedAllocation.getResourceName())) { @@ -120,8 +114,8 @@ public boolean isCompatible(ServerStoreConfiguration otherConfiguration, StringB } } else if (poolAllocation instanceof PoolAllocation.Shared) { isCompatible &= compareField(sb, "resourcePoolSharedPoolName", - ((PoolAllocation.Shared) poolAllocation).getResourcePoolName(), - ((PoolAllocation.Shared) otherPoolAllocation).getResourcePoolName()); + ((PoolAllocation.Shared)poolAllocation).getResourcePoolName(), + ((PoolAllocation.Shared)otherPoolAllocation).getResourcePoolName()); } } } @@ -131,13 +125,13 @@ public boolean isCompatible(ServerStoreConfiguration otherConfiguration, StringB isCompatible &= compareField(sb, "actualValueType", actualValueType, otherConfiguration.getActualValueType()); isCompatible &= compareField(sb, "keySerializerType", keySerializerType, otherConfiguration.getKeySerializerType()); isCompatible &= compareField(sb, "valueSerializerType", valueSerializerType, otherConfiguration.getValueSerializerType()); - isCompatible &= compareField(sb, "consistency", consistency, otherConfiguration.getConsistency()); - isCompatible &= compareField(sb, "concurrency", concurrency, otherConfiguration.getConcurrency()); + isCompatible &= compareConsistencyField(sb, consistency, otherConfiguration.getConsistency()); return isCompatible; } - private boolean comparePoolAllocationType(StringBuilder sb, PoolAllocation clientPoolAllocation) { + private boolean comparePoolAllocationType(StringBuilder sb, PoolAllocation clientPoolAllocation) { + if (clientPoolAllocation instanceof PoolAllocation.Unknown || poolAllocation.getClass().getName().equals(clientPoolAllocation.getClass().getName())) { return true; } @@ -146,15 +140,25 @@ private boolean comparePoolAllocationType(StringBuilder sb, PoolAllocation clien return false; } - private static String getClassName(Object obj) { - if (obj != null) { + private String getClassName(Object obj) { + if(obj != null) { return obj.getClass().getName(); } else { return null; } } - private static boolean compareField(StringBuilder sb, String fieldName, Object serverConfigValue, Object clientConfigValue) { + private boolean compareConsistencyField(StringBuilder sb, Consistency serverConsistencyValue, Consistency clientConsistencyValue) { + if((serverConsistencyValue == null && clientConsistencyValue == null) + || (serverConsistencyValue != null && serverConsistencyValue.equals(clientConsistencyValue))) { + return true; + } + + appendFault(sb, "consistencyType", serverConsistencyValue, clientConsistencyValue); + return false; + } + + private boolean compareField(StringBuilder sb, String fieldName, String serverConfigValue, String clientConfigValue) { if ((serverConfigValue == null && clientConfigValue == null) || (serverConfigValue != null && serverConfigValue.equals(clientConfigValue))) { return true; @@ -164,7 +168,7 @@ private static boolean compareField(StringBuilder sb, String fieldName, Object s return false; } - private static void appendFault(StringBuilder sb, String fieldName, Object serverConfigValue, Object clientConfigValue) { + private void appendFault(StringBuilder sb, String fieldName, Object serverConfigValue, Object clientConfigValue) { sb.append("\n\t").append(fieldName) .append(" existing: ").append(serverConfigValue) .append(", desired: ").append(clientConfigValue); diff --git a/clustered/integration-test/src/test/java/org/ehcache/Clust.java b/clustered/integration-test/src/test/java/org/ehcache/Clust.java deleted file mode 100644 index ba9847270a..0000000000 --- a/clustered/integration-test/src/test/java/org/ehcache/Clust.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache; - -import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; -import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.config.units.MemoryUnit; -import org.junit.Test; - -import java.net.URI; - -/** - * @author Ludovic Orban - */ -public class Clust { - - @Test - public void works() throws Exception { - CacheManagerBuilder clusteredCacheManagerBuilder - = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("clustered-cache-works", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder() - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 16, MemoryUnit.MB)) - ) -// .add(new ClusteredStoreConfiguration(Consistency.STRONG, 1024)) - ) - .with(ClusteringServiceConfigurationBuilder.cluster(URI.create("terracotta://localhost:9510/my-application")) - .autoCreate() -// .defaultServerResource("primary-server-resource") -// .resourcePool("resource-pool-a", 28, MemoryUnit.MB) - ); - - PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true); - - cacheManager.close(); - } - -} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java index e191400143..4fd8dcb7e2 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java @@ -113,7 +113,7 @@ public void basicCacheCAS() throws Exception { .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(100, EntryUnit.ENTRIES) .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) - .add(new ClusteredStoreConfiguration(Consistency.STRONG, 1))); + .add(new ClusteredStoreConfiguration(Consistency.STRONG))); final PersistentCacheManager cacheManager1 = clusteredCacheManagerBuilder.build(true); try { @@ -147,7 +147,7 @@ public void basicClusteredBulk() throws Exception { .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder() .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) - .add(new ClusteredStoreConfiguration(Consistency.STRONG, 1))); + .add(new ClusteredStoreConfiguration(Consistency.STRONG))); final PersistentCacheManager cacheManager1 = clusteredCacheManagerBuilder.build(true); try { diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java index 14d9627a05..74eeae7db3 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java @@ -15,6 +15,7 @@ */ package org.ehcache.clustered.management; + import org.ehcache.Cache; import org.ehcache.CacheManager; import org.ehcache.Status; @@ -38,8 +39,8 @@ import org.terracotta.management.model.message.Message; import org.terracotta.management.model.notification.ContextualNotification; import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.model.stats.Sample; import org.terracotta.management.model.stats.history.CounterHistory; -import org.terracotta.management.model.stats.primitive.Counter; import java.io.Serializable; import java.util.Arrays; @@ -51,8 +52,8 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.collection.IsCollectionWithSize.hasSize; +import static org.hamcrest.number.OrderingComparison.greaterThanOrEqualTo; import static org.junit.Assert.assertThat; public class ClusteringManagementServiceTest extends AbstractClusteringManagementTest { @@ -128,7 +129,7 @@ public void test_capabilities_exposed() throws Exception { assertThat(capabilities[3].getName(), equalTo("SettingsCapability")); assertThat(capabilities[4].getName(), equalTo("ManagementAgentService")); assertThat(capabilities[0].getDescriptors(), hasSize(4)); - assertThat(capabilities[1].getDescriptors(), hasSize(13)); + assertThat(capabilities[1].getDescriptors(), hasSize(75)); } @Test @@ -180,7 +181,7 @@ public void test_notifs_on_remove_cache() throws Exception { @Test public void test_stats_collection() throws Exception { - sendManagementCallToCollectStats("GetCounter", "InexistingRate", "AllCacheGetCount"); + sendManagementCallToCollectStats("Cache:HitCount"); Cache cache1 = cacheManager.getCache("cache-1", String.class, String.class); cache1.put("key1", "val"); @@ -191,28 +192,24 @@ public void test_stats_collection() throws Exception { // get the stats (we are getting the primitive counter, not the sample history) ContextualStatistics[] stats = waitForNextStats(); + Sample[] samples = stats[0].getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); assertThat(stats.length, equalTo(1)); assertThat(stats[0].getContext().get("cacheName"), equalTo("cache-1")); - assertThat(stats[0].getStatistic(Counter.class, "GetCounter").getValue(), equalTo(2L)); - - // first collect of a sample gives no value because it "triggers" the stat computation - // this is how the internal ehcache's stat framework works: first call to a sample activates it. - assertThat(stats[0].getContext().get("cacheName"), equalTo("cache-1")); - assertThat(stats[0].getStatistic(CounterHistory.class, "AllCacheGetCount").getValue().length, equalTo(0)); + assertThat(samples[0].getValue(), equalTo(2L)); // do some other operations cache1.get("key1"); cache1.get("key2"); stats = waitForNextStats(); + samples = stats[0].getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); assertThat(stats.length, equalTo(1)); assertThat(stats[0].getContext().get("cacheName"), equalTo("cache-1")); - assertThat(stats[0].getStatistic(Counter.class, "GetCounter").getValue(), equalTo(4L)); - assertThat(stats[0].getContext().get("cacheName"), equalTo("cache-1")); - assertThat(stats[0].getStatistic(CounterHistory.class, "AllCacheGetCount").getValue().length, greaterThanOrEqualTo(1)); - assertThat(stats[0].getStatistic(CounterHistory.class, "AllCacheGetCount").getValue()[0].getValue(), equalTo(4L)); + assertThat(samples.length, greaterThanOrEqualTo(1)); + assertThat(samples[samples.length - 1].getValue(), equalTo(4L)); + } -} +} \ No newline at end of file diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java index 47c05c76b5..2f2724e395 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java @@ -29,6 +29,8 @@ @CommonComponent public class ServerStoreImpl implements ServerStore { + private static final int OFFHEAP_CHAIN_SEGMENTS = 16; + private final ServerStoreConfiguration storeConfiguration; private final PageSource pageSource; private final OffHeapServerStore store; @@ -36,7 +38,7 @@ public class ServerStoreImpl implements ServerStore { public ServerStoreImpl(ServerStoreConfiguration storeConfiguration, PageSource pageSource) { this.storeConfiguration = storeConfiguration; this.pageSource = pageSource; - this.store = new OffHeapServerStore(pageSource, storeConfiguration.getConcurrency()); + this.store = new OffHeapServerStore(pageSource, OFFHEAP_CHAIN_SEGMENTS); } public void setEvictionListener(ServerStoreEvictionListener listener) { diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index 682b7bcadf..822d13256b 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -2590,14 +2590,8 @@ private static final class ServerStoreConfigBuilder { private String keySerializerType; private String valueSerializerType; private Consistency consistency; - private int concurrency; - ServerStoreConfigBuilder concurrency(int concurrency) { - this.concurrency = concurrency; - return this; - } - ServerStoreConfigBuilder consistency(Consistency consistency) { this.consistency = consistency; return this; @@ -2650,7 +2644,7 @@ ServerStoreConfigBuilder setValueSerializerType(Class valueSerializerType) { ServerStoreConfiguration build() { return new ServerStoreConfiguration(poolAllocation, storedKeyType, storedValueType, - actualKeyType, actualValueType, keySerializerType, valueSerializerType, consistency, concurrency); + actualKeyType, actualValueType, keySerializerType, valueSerializerType, consistency); } } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/ServerStoreCompatibilityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/ServerStoreCompatibilityTest.java index 33476bbf03..2f7e972379 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/ServerStoreCompatibilityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/ServerStoreCompatibilityTest.java @@ -17,12 +17,12 @@ package org.ehcache.clustered.server; import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.PoolAllocation; import org.ehcache.clustered.common.PoolAllocation.Dedicated; import org.ehcache.clustered.common.PoolAllocation.Shared; -import org.ehcache.clustered.common.PoolAllocation.Unknown; -import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.exceptions.InvalidServerStoreConfigurationException; +import org.ehcache.clustered.common.PoolAllocation.Unknown; import org.junit.Test; import static org.hamcrest.Matchers.is; @@ -55,8 +55,7 @@ public void testStoredKeyTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL, - 1); + Consistency.EVENTUAL); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, String.class.getName(), @@ -65,8 +64,7 @@ public void testStoredKeyTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL, - 1); + Consistency.EVENTUAL); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -87,8 +85,7 @@ public void testStoredValueTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL, - 1); + Consistency.EVENTUAL); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, STORED_KEY_TYPE, @@ -97,8 +94,7 @@ public void testStoredValueTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL, - 1); + Consistency.EVENTUAL); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -119,8 +115,7 @@ public void testStoredActualKeyTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL, - 1); + Consistency.EVENTUAL); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, STORED_KEY_TYPE, @@ -129,8 +124,7 @@ public void testStoredActualKeyTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL, - 1); + Consistency.EVENTUAL); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -151,8 +145,7 @@ public void testStoredActualValueTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL, - 1); + Consistency.EVENTUAL); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, STORED_KEY_TYPE, @@ -161,8 +154,7 @@ public void testStoredActualValueTypeMismatch() { Long.class.getName(), KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL, - 1); + Consistency.EVENTUAL); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -183,8 +175,7 @@ public void testKeySerializerTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL, - 1); + Consistency.EVENTUAL); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, STORED_KEY_TYPE, @@ -193,8 +184,7 @@ public void testKeySerializerTypeMismatch() { ACTUAL_VALUE_TYPE, Double.class.getName(), VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL, - 1); + Consistency.EVENTUAL); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -215,8 +205,7 @@ public void testValueSerializerTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL, - 1); + Consistency.EVENTUAL); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, STORED_KEY_TYPE, @@ -225,7 +214,7 @@ public void testValueSerializerTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, Double.class.getName(), - Consistency.EVENTUAL, 1); + Consistency.EVENTUAL); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -246,7 +235,7 @@ public void testConsitencyMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL, 1); + Consistency.EVENTUAL); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, STORED_KEY_TYPE, @@ -255,7 +244,7 @@ public void testConsitencyMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG, 1); + Consistency.STRONG); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -276,7 +265,7 @@ public void testDedicatedPoolResourceTooBig() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG, 1); + Consistency.STRONG); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(new Dedicated("primary",8), STORED_KEY_TYPE, @@ -285,7 +274,7 @@ public void testDedicatedPoolResourceTooBig() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG, 1); + Consistency.STRONG); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -306,7 +295,7 @@ public void testDedicatedPoolResourceTooSmall() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG, 1); + Consistency.STRONG); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(new Dedicated("primary",2), STORED_KEY_TYPE, @@ -315,7 +304,7 @@ public void testDedicatedPoolResourceTooSmall() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG, 1); + Consistency.STRONG); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -336,7 +325,7 @@ public void testDedicatedPoolResourceNameMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG, 1); + Consistency.STRONG); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(new Dedicated("primaryBad",4), STORED_KEY_TYPE, @@ -345,7 +334,7 @@ public void testDedicatedPoolResourceNameMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG, 1); + Consistency.STRONG); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -366,7 +355,7 @@ public void testSharedPoolResourceNameMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG, 1); + Consistency.STRONG); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(new Shared("sharedPoolBad"), STORED_KEY_TYPE, @@ -375,7 +364,7 @@ public void testSharedPoolResourceNameMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG, 1); + Consistency.STRONG); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -397,7 +386,7 @@ public void testAllResourceParametersMatch() throws Exception ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL, 1); + Consistency.EVENTUAL); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(DEDICATED_POOL_ALLOCATION, STORED_KEY_TYPE, @@ -406,7 +395,7 @@ public void testAllResourceParametersMatch() throws Exception ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.EVENTUAL, 1); + Consistency.EVENTUAL); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -422,7 +411,7 @@ public void testPoolResourceTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG, 1); + Consistency.STRONG); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(SHARED_POOL_ALLOCATION, STORED_KEY_TYPE, @@ -431,7 +420,7 @@ public void testPoolResourceTypeMismatch() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG, 1); + Consistency.STRONG); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -452,7 +441,7 @@ public void testClientStoreConfigurationUnknownPoolResource() throws InvalidServ ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG, 1); + Consistency.STRONG); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(UNKNOWN_POOL_ALLOCATION, STORED_KEY_TYPE, @@ -461,7 +450,7 @@ public void testClientStoreConfigurationUnknownPoolResource() throws InvalidServ ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG, 1); + Consistency.STRONG); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); @@ -478,7 +467,7 @@ public void testServerStoreConfigurationUnknownPoolResourceInvalidKeyType() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG, 1); + Consistency.STRONG); ServerStoreConfiguration clientConfiguration = new ServerStoreConfiguration(UNKNOWN_POOL_ALLOCATION, String.class.getName(), @@ -487,7 +476,7 @@ public void testServerStoreConfigurationUnknownPoolResourceInvalidKeyType() { ACTUAL_VALUE_TYPE, KEY_SERIALIZER_TYPE, VALUE_SERIALIZER_TYPE, - Consistency.STRONG, 1); + Consistency.STRONG); ServerStoreCompatibility serverStoreCompatibility = new ServerStoreCompatibility(); diff --git a/core/src/main/java/org/ehcache/core/Ehcache.java b/core/src/main/java/org/ehcache/core/Ehcache.java index 92d8b30b13..b6a9a0b27f 100644 --- a/core/src/main/java/org/ehcache/core/Ehcache.java +++ b/core/src/main/java/org/ehcache/core/Ehcache.java @@ -51,7 +51,6 @@ import org.ehcache.core.statistics.CacheOperationOutcomes.RemoveAllOutcome; import org.ehcache.core.statistics.CacheOperationOutcomes.RemoveOutcome; import org.ehcache.core.statistics.CacheOperationOutcomes.ReplaceOutcome; -import org.ehcache.core.statistics.StoreOperationOutcomes; import org.ehcache.expiry.Expiry; import org.ehcache.spi.loaderwriter.BulkCacheLoadingException; import org.ehcache.spi.loaderwriter.BulkCacheWritingException; @@ -71,6 +70,8 @@ import static org.ehcache.core.exceptions.ExceptionFactory.newCacheLoadingException; import static org.ehcache.core.internal.util.ValueSuppliers.supplierOf; +import org.ehcache.core.statistics.CacheOperationOutcomes.ClearOutcome; +import org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome; import static org.terracotta.statistics.StatisticBuilder.operation; /** @@ -92,7 +93,7 @@ public class Ehcache implements InternalCache { private final Jsr107CacheImpl jsr107Cache; protected final Logger logger; - private final OperationObserver getObserver = operation(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.class).named("get").of(this).tag("cache").build(); + private final OperationObserver getObserver = operation(GetOutcome.class).named("get").of(this).tag("cache").build(); private final OperationObserver getAllObserver = operation(GetAllOutcome.class).named("getAll").of(this).tag("cache").build(); private final OperationObserver putObserver = operation(PutOutcome.class).named("put").of(this).tag("cache").build(); private final OperationObserver putAllObserver = operation(PutAllOutcome.class).named("putAll").of(this).tag("cache").build(); @@ -102,6 +103,7 @@ public class Ehcache implements InternalCache { private final OperationObserver putIfAbsentObserver = operation(PutIfAbsentOutcome.class).named("putIfAbsent").of(this).tag("cache").build(); private final OperationObserver replaceObserver = operation(ReplaceOutcome.class).named("replace").of(this).tag("cache").build(); private final Map bulkMethodEntries = new EnumMap(BulkOps.class); + private final OperationObserver clearObserver = operation(ClearOutcome.class).named("clear").of(this).tag("cache").build(); /** * Creates a new {@code Ehcache} based on the provided parameters. @@ -169,17 +171,17 @@ public V get(final K key) { // Check for expiry first if (valueHolder == null) { - getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER); + getObserver.end(GetOutcome.MISS_NO_LOADER); return null; } else { - getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER); + getObserver.end(GetOutcome.HIT_NO_LOADER); return valueHolder.value(); } } catch (StoreAccessException e) { try { return resilienceStrategy.getFailure(key, e); } finally { - getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.FAILURE); + getObserver.end(GetOutcome.FAILURE); } } } @@ -294,10 +296,13 @@ private boolean removeInternal(final K key) { */ @Override public void clear() { + this.clearObserver.begin(); statusTransitioner.checkAvailable(); try { store.clear(); + this.clearObserver.end(ClearOutcome.SUCCESS); } catch (StoreAccessException e) { + this.clearObserver.end(ClearOutcome.FAILURE); resilienceStrategy.clearFailure(e); } } diff --git a/core/src/main/java/org/ehcache/core/statistics/CacheOperationOutcomes.java b/core/src/main/java/org/ehcache/core/statistics/CacheOperationOutcomes.java index e83cc30b7a..1edd170384 100755 --- a/core/src/main/java/org/ehcache/core/statistics/CacheOperationOutcomes.java +++ b/core/src/main/java/org/ehcache/core/statistics/CacheOperationOutcomes.java @@ -21,6 +21,20 @@ */ public interface CacheOperationOutcomes { + /** + * Outcomes for cache Clear operations. + */ + enum ClearOutcome implements CacheOperationOutcomes { + /** + * success + */ + SUCCESS, + /** + * failure + */ + FAILURE + } + /** * Outcomes for cache Get operations. */ diff --git a/core/src/main/java/org/ehcache/core/statistics/TierOperationStatistic.java b/core/src/main/java/org/ehcache/core/statistics/TierOperationStatistic.java index 9adcb17160..6ea91e664b 100644 --- a/core/src/main/java/org/ehcache/core/statistics/TierOperationStatistic.java +++ b/core/src/main/java/org/ehcache/core/statistics/TierOperationStatistic.java @@ -18,7 +18,6 @@ import org.terracotta.context.ContextManager; import org.terracotta.context.TreeNode; import org.terracotta.context.annotations.ContextAttribute; -import org.terracotta.context.query.Matcher; import org.terracotta.context.query.Matchers; import org.terracotta.context.query.Query; import org.terracotta.statistics.OperationStatistic; @@ -43,7 +42,7 @@ import static org.terracotta.context.query.QueryBuilder.queryBuilder; /** - * @author Ludovic Orban + * */ @ContextAttribute("this") public class TierOperationStatistic, D extends Enum> implements OperationStatistic { @@ -53,59 +52,58 @@ public class TierOperationStatistic, D extends Enum> implem @ContextAttribute("properties") public final Map properties; @ContextAttribute("type") public final Class type; - private final Class aliasing; + private final Class tierOperatioOutcome; private final OperationStatistic operationStatistic; - private final HashMap> xlatMap; + private final HashMap> storeToTierOperationOutcomeMap; - public TierOperationStatistic(Class aliasing, Class aliased, Object tier, HashMap> xlatMap, String sourceOperationName, int priority, String targetOperationName) { - this.aliasing = aliasing; - this.operationStatistic = TierOperationStatistic.findOperationStat(tier, targetOperationName);; - this.xlatMap = xlatMap; + public TierOperationStatistic(Class tierOperatioOutcome, Class storeOperatioOutcome, Object tier, HashMap> storeToTierOperationOutcomeMap, String sourceOperationName, int tierHeight, String targetOperationName, String discriminator) { + this.tierOperatioOutcome = tierOperatioOutcome; + this.operationStatistic = TierOperationStatistic.findOperationStat(tier, targetOperationName); + this.storeToTierOperationOutcomeMap = storeToTierOperationOutcomeMap; this.name = sourceOperationName; this.tags = new HashSet(); this.tags.add("tier"); this.properties = new HashMap(); - this.properties.put("priority", priority); - String discriminator = TierOperationStatistic.findDiscriminator(tier); - if (discriminator != null) { - this.properties.put("discriminator", discriminator); - } - this.type = aliasing; - - EnumSet ds = EnumSet.allOf(aliasing); - for (D d : ds) { - if (!xlatMap.containsKey(d)) { - throw new IllegalArgumentException("xlatMap does not contain key " + d); + this.properties.put("tierHeight", tierHeight); + this.properties.put("discriminator", discriminator); + this.type = tierOperatioOutcome; + + EnumSet tierOperatioOutcomeSet = EnumSet.allOf(tierOperatioOutcome); + //make sure all tierOperatioOutcome enum values are keys in the storeToTierOperationOutcomeMap + for (D tierOperatioOutcomeKey : tierOperatioOutcomeSet) { + if (!storeToTierOperationOutcomeMap.containsKey(tierOperatioOutcomeKey)) { + throw new IllegalArgumentException("storeTierOperationOutcomeMap does not contain key " + tierOperatioOutcomeKey); } } + //verify that all storeOperatioOutcomes are tracked Set allAliasedValues = new HashSet(); - Collection> values = xlatMap.values(); + Collection> values = storeToTierOperationOutcomeMap.values(); for (Set value : values) { allAliasedValues.addAll(value); } - Set allMissingValues = new HashSet(EnumSet.allOf(aliased)); + Set allMissingValues = new HashSet(EnumSet.allOf(storeOperatioOutcome)); allMissingValues.removeAll(allAliasedValues); if (!allMissingValues.isEmpty()) { - throw new IllegalArgumentException("xlatMap does not contain values " + allMissingValues); + throw new IllegalArgumentException("storeTierOperationOutcomeMap does not contain values " + allMissingValues); } } @Override public Class type() { - return aliasing; + return tierOperatioOutcome; } @Override public ValueStatistic statistic(D result) { - return operationStatistic.statistic(xlatMap.get(result)); + return operationStatistic.statistic(storeToTierOperationOutcomeMap.get(result)); } @Override public ValueStatistic statistic(Set results) { Set xlated = new HashSet(); for (D result : results) { - xlated.addAll(xlatMap.get(result)); + xlated.addAll(storeToTierOperationOutcomeMap.get(result)); } return operationStatistic.statistic(xlated); } @@ -113,7 +111,7 @@ public ValueStatistic statistic(Set results) { @Override public long count(D type) { long value = 0L; - Set s = xlatMap.get(type); + Set s = storeToTierOperationOutcomeMap.get(type); for (S s1 : s) { value += operationStatistic.count(s1); } @@ -124,7 +122,7 @@ public long count(D type) { public long sum(Set types) { Set xlated = new HashSet(); for (D type : types) { - xlated.addAll(xlatMap.get(type)); + xlated.addAll(storeToTierOperationOutcomeMap.get(type)); } return operationStatistic.sum(xlated); } @@ -174,27 +172,6 @@ public void end(D result, long... parameters) { throw new UnsupportedOperationException(); } - private static String findDiscriminator(Object rootNode) { - Set results = queryBuilder().chain(self()) - .children().filter( - context(attributes(Matchers.allOf( - hasAttribute("discriminator", new Matcher() { - @Override - protected boolean matchesSafely(Object object) { - return object instanceof String; - } - }))))).build().execute(Collections.singleton(ContextManager.nodeFor(rootNode))); - - if (results.size() > 1) { - throw new IllegalStateException("More than one discriminator attribute found"); - } else if (results.isEmpty()) { - return null; - } else { - TreeNode node = results.iterator().next(); - return (String) node.getContext().attributes().get("discriminator"); - } - } - private static OperationStatistic findOperationStat(Object rootNode, final String statName) { Query q = queryBuilder().chain(self()) .descendants().filter(context(identifier(subclassOf(OperationStatistic.class)))).build(); diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java index bdfdf495a9..9f3968733e 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java @@ -306,13 +306,12 @@ private File getMetadataFile() { @ServiceDependencies({TimeSourceService.class, SerializationProvider.class, ExecutionService.class, DiskResourceService.class}) public static class Provider implements Store.Provider, AuthoritativeTier.Provider { + private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap, Collection>>(); private final Map, PersistenceSpaceIdentifier> createdStores = new ConcurrentWeakIdentityHashMap, PersistenceSpaceIdentifier>(); private final String defaultThreadPool; private volatile ServiceProvider serviceProvider; private volatile DiskResourceService diskPersistenceService; - private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap, Collection>>(); - public Provider() { this(null); } @@ -338,15 +337,15 @@ public OffHeapDiskStore createStore(Configuration storeConfig TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, StoreOperationOutcomes.GetOutcome.class, store, new HashMap>() {{ put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(StoreOperationOutcomes.GetOutcome.HIT)); - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(StoreOperationOutcomes.GetOutcome.MISS)); - }}, "get", 1000, "get"); + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(StoreOperationOutcomes.GetOutcome.MISS, StoreOperationOutcomes.GetOutcome.TIMEOUT)); + }}, "get", ResourceType.Core.DISK.getTierHeight(), "get", STATISTICS_TAG); StatisticsManager.associate(get).withParent(store); tieredOps.add(get); TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, store, new HashMap>() {{ put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); - }}, "eviction", 1000, "eviction"); + }}, "eviction", ResourceType.Core.DISK.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(store); tieredOps.add(evict); @@ -400,7 +399,10 @@ public void releaseStore(Store resource) { throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); } try { - close((OffHeapDiskStore)resource); + OffHeapDiskStore offHeapDiskStore = (OffHeapDiskStore)resource; + close(offHeapDiskStore); + StatisticsManager.nodeFor(offHeapDiskStore).clean(); + tierOperationStatistics.remove(offHeapDiskStore); } catch (IOException e) { throw new RuntimeException(e); } @@ -420,7 +422,6 @@ static void close(final OffHeapDiskStore resource) throws IOExcepti } localMap.close(); } - StatisticsManager.dissociate(resource.offHeapStoreStatsSettings).fromParent(resource); } @Override @@ -477,20 +478,20 @@ public void stop() { @Override public AuthoritativeTier createAuthoritativeTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - OffHeapDiskStore authoritativeTier = createStore(storeConfig, serviceConfigs); + OffHeapDiskStore authoritativeTier = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); Collection> tieredOps = new ArrayList>(); TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.class, authoritativeTier, new HashMap>() {{ put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.HIT)); put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS, AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.TIMEOUT)); - }}, "get", 1000, "getAndFault"); + }}, "get", ResourceType.Core.DISK.getTierHeight(), "getAndFault", STATISTICS_TAG); StatisticsManager.associate(get).withParent(authoritativeTier); tieredOps.add(get); TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, authoritativeTier, new HashMap>() {{ put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); - }}, "eviction", 1000, "eviction"); + }}, "eviction", ResourceType.Core.DISK.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(authoritativeTier); tieredOps.add(evict); diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java index a0885dc7c3..0a42d93870 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java @@ -210,8 +210,6 @@ public void cacheConfigurationChange(CacheConfigurationChangeEvent event) { private final OperationObserver silentInvalidateAllObserver; private final OperationObserver silentInvalidateAllWithHashObserver; - private final OnHeapStoreStatsSettings onHeapStoreStatsSettings; - private static final NullaryFunction REPLACE_EQUALS_TRUE = new NullaryFunction() { @Override public Boolean apply() { @@ -249,8 +247,7 @@ public OnHeapStore(final Configuration config, final TimeSource timeSource } else { this.map = new KeyCopyBackend(byteSized, keyCopier); } - onHeapStoreStatsSettings = new OnHeapStoreStatsSettings(); - StatisticsManager.associate(onHeapStoreStatsSettings).withParent(this); + getObserver = operation(StoreOperationOutcomes.GetOutcome.class).named("get").of(this).tag(STATISTICS_TAG).build(); putObserver = operation(StoreOperationOutcomes.PutOutcome.class).named("put").of(this).tag(STATISTICS_TAG).build(); removeObserver = operation(StoreOperationOutcomes.RemoveOutcome.class).named("remove").of(this).tag(STATISTICS_TAG).build(); @@ -262,10 +259,12 @@ public OnHeapStore(final Configuration config, final TimeSource timeSource computeIfAbsentObserver = operation(StoreOperationOutcomes.ComputeIfAbsentOutcome.class).named("computeIfAbsent").of(this).tag(STATISTICS_TAG).build(); evictionObserver = operation(StoreOperationOutcomes.EvictionOutcome.class).named("eviction").of(this).tag(STATISTICS_TAG).build(); expirationObserver = operation(StoreOperationOutcomes.ExpirationOutcome.class).named("expiration").of(this).tag(STATISTICS_TAG).build(); + getOrComputeIfAbsentObserver = operation(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.class).named("getOrComputeIfAbsent").of(this).tag(STATISTICS_TAG).build(); invalidateObserver = operation(CachingTierOperationOutcomes.InvalidateOutcome.class).named("invalidate").of(this).tag(STATISTICS_TAG).build(); invalidateAllObserver = operation(CachingTierOperationOutcomes.InvalidateAllOutcome.class).named("invalidateAll").of(this).tag(STATISTICS_TAG).build(); invalidateAllWithHashObserver = operation(CachingTierOperationOutcomes.InvalidateAllWithHashOutcome.class).named("invalidateAllWithHash").of(this).tag(STATISTICS_TAG).build(); + silentInvalidateObserver = operation(HigherCachingTierOperationOutcomes.SilentInvalidateOutcome.class).named("silentInvalidate").of(this).tag(STATISTICS_TAG).build(); silentInvalidateAllObserver = operation(HigherCachingTierOperationOutcomes.SilentInvalidateAllOutcome.class).named("silentInvalidateAll").of(this).tag(STATISTICS_TAG).build(); silentInvalidateAllWithHashObserver = operation(HigherCachingTierOperationOutcomes.SilentInvalidateAllWithHashOutcome.class).named("silentInvalidateAllWithHash").of(this).tag(STATISTICS_TAG).build(); @@ -279,26 +278,6 @@ public Number call() throws Exception { return map.mappingCount(); } }); - StatisticsManager.createPassThroughStatistic(this, "maxMappings", tags, properties, new Callable() { - @Override - public Number call() throws Exception { - if (byteSized) { - return -1L; - } else { - return capacity; - } - } - }); - StatisticsManager.createPassThroughStatistic(this, "allocatedMemory", tags, properties, new Callable() { - @Override - public Number call() throws Exception { - if (byteSized) { - return capacity; - } else { - return -1L; - } - } - }); StatisticsManager.createPassThroughStatistic(this, "occupiedMemory", tags, properties, new Callable() { @Override public Number call() throws Exception { @@ -1687,15 +1666,15 @@ public OnHeapStore createStore(final Configuration storeConfi TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, StoreOperationOutcomes.GetOutcome.class, store, new HashMap>() {{ put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(StoreOperationOutcomes.GetOutcome.HIT)); - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(StoreOperationOutcomes.GetOutcome.MISS)); - }}, "get", 1000, "get"); + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(StoreOperationOutcomes.GetOutcome.MISS, StoreOperationOutcomes.GetOutcome.TIMEOUT)); + }}, "get", ResourceType.Core.HEAP.getTierHeight(), "get", STATISTICS_TAG); StatisticsManager.associate(get).withParent(store); tieredOps.add(get); TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, store, new HashMap>() {{ put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); - }}, "eviction", 1000, "eviction"); + }}, "eviction", ResourceType.Core.HEAP.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(store); tieredOps.add(evict); @@ -1730,6 +1709,9 @@ public void releaseStore(Store resource) { } final OnHeapStore onHeapStore = (OnHeapStore)resource; close(onHeapStore); + StatisticsManager.nodeFor(onHeapStore).clean(); + tierOperationStatistics.remove(onHeapStore); + CopyProvider copyProvider = serviceProvider.getService(CopyProvider.class); for (Copier copier: copiers) { try { @@ -1784,18 +1766,18 @@ public CachingTier createCachingTier(Configuration storeConfi TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.class, cachingTier, new HashMap>() {{ put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.HIT)); put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULTED, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED_MISS, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS)); - }}, "get", 100, "getOrComputeIfAbsent"); + }}, "get", ResourceType.Core.HEAP.getTierHeight(), "getOrComputeIfAbsent", STATISTICS_TAG); StatisticsManager.associate(get).withParent(cachingTier); tieredOps.add(get); TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, cachingTier, new HashMap>() {{ put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); - }}, "eviction", 100, "eviction"); + }}, "eviction", ResourceType.Core.HEAP.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(cachingTier); - tieredOps.add(evict); + tieredOps.add(get); - tierOperationStatistics.put(cachingTier, tieredOps); + this.tierOperationStatistics.put(cachingTier, tieredOps); return cachingTier; } @@ -1817,20 +1799,20 @@ public void initCachingTier(CachingTier resource) { @Override public HigherCachingTier createHigherCachingTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - OnHeapStore higherCachingTier = createStore(storeConfig, serviceConfigs); + OnHeapStore higherCachingTier = createStoreInternal(storeConfig, new ScopedStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); Collection> tieredOps = new ArrayList>(); TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.class, higherCachingTier, new HashMap>() {{ put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.HIT)); put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULTED, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED_MISS, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS)); - }}, "get", 10, "getOrComputeIfAbsent"); + }}, "get", ResourceType.Core.HEAP.getTierHeight(), "getOrComputeIfAbsent", STATISTICS_TAG); StatisticsManager.associate(get).withParent(higherCachingTier); tieredOps.add(get); TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, higherCachingTier, new HashMap>() {{ put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); - }}, "eviction", 10, "eviction"); + }}, "eviction", ResourceType.Core.HEAP.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(higherCachingTier); tieredOps.add(evict); @@ -1848,10 +1830,4 @@ public void initHigherCachingTier(HigherCachingTier resource) { checkResource(resource); } } - - private static final class OnHeapStoreStatsSettings { - @ContextAttribute("tags") private final Set tags = new HashSet(Arrays.asList("store")); - @ContextAttribute("discriminator") private final String discriminator = STATISTICS_TAG; - } - } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStore.java index 353ece1e1e..a2492e5d40 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStore.java @@ -56,7 +56,6 @@ import static org.ehcache.core.exceptions.StorePassThroughException.handleRuntimeException; import static org.ehcache.core.internal.util.ValueSuppliers.supplierOf; -import org.terracotta.context.annotations.ContextAttribute; import static org.terracotta.statistics.StatisticBuilder.operation; public abstract class AbstractOffHeapStore implements AuthoritativeTier, LowerCachingTier { @@ -99,7 +98,6 @@ public void onInvalidation(Object key, ValueHolder valueHolder) { private final OperationObserver getAndRemoveObserver; private final OperationObserver installMappingObserver; - protected final OffHeapStoreStatsSettings offHeapStoreStatsSettings; private volatile InvalidationValve valve; protected BackingMapEvictionListener mapEvictionListener; @@ -114,8 +112,6 @@ public AbstractOffHeapStore(String statisticsTag, Configuration config, Ti this.timeSource = timeSource; this.eventDispatcher = eventDispatcher; - this.offHeapStoreStatsSettings = new OffHeapStoreStatsSettings(statisticsTag); - StatisticsManager.associate(offHeapStoreStatsSettings).withParent(this); this.getObserver = operation(StoreOperationOutcomes.GetOutcome.class).of(this).named("get").tag(statisticsTag).build(); this.putObserver = operation(StoreOperationOutcomes.PutOutcome.class).of(this).named("put").tag(statisticsTag).build(); this.putIfAbsentObserver = operation(StoreOperationOutcomes.PutIfAbsentOutcome.class).of(this).named("putIfAbsent").tag(statisticsTag).build(); @@ -1317,13 +1313,4 @@ public void onEviction(K key, OffHeapValueHolder value) { evictionObserver.end(StoreOperationOutcomes.EvictionOutcome.SUCCESS); } } - - private static final class OffHeapStoreStatsSettings { - @ContextAttribute("tags") private final Set tags = new HashSet(Arrays.asList("store")); - @ContextAttribute("discriminator") private final String discriminator; - - OffHeapStoreStatsSettings(String discriminator) { - this.discriminator = discriminator; - } - } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java index 6e9dbd66fb..9b2a747d14 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java @@ -67,6 +67,7 @@ import static org.ehcache.config.Eviction.noAdvice; import static org.ehcache.core.statistics.TierOperationStatistic.set; +import org.ehcache.impl.internal.store.heap.OnHeapStore; import static org.ehcache.impl.internal.store.offheap.OffHeapStoreUtils.getBufferSource; /** @@ -155,15 +156,15 @@ public OffHeapStore createStore(Configuration storeConfig, Se TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, StoreOperationOutcomes.GetOutcome.class, store, new HashMap>() {{ put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(StoreOperationOutcomes.GetOutcome.HIT)); - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(StoreOperationOutcomes.GetOutcome.MISS)); - }}, "get", 1000, "get"); + put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(StoreOperationOutcomes.GetOutcome.MISS, StoreOperationOutcomes.GetOutcome.TIMEOUT)); + }}, "get", ResourceType.Core.OFFHEAP.getTierHeight(), "get", STATISTICS_TAG); StatisticsManager.associate(get).withParent(store); tieredOps.add(get); TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, store, new HashMap>() {{ put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); - }}, "eviction", 1000, "eviction"); + }}, "eviction", ResourceType.Core.OFFHEAP.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(store); tieredOps.add(evict); @@ -194,7 +195,10 @@ public void releaseStore(Store resource) { if (!createdStores.contains(resource)) { throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); } - close((OffHeapStore)resource); + OffHeapStore offHeapStore = (OffHeapStore)resource; + close(offHeapStore); + StatisticsManager.nodeFor(offHeapStore).clean(); + tierOperationStatistics.remove(offHeapStore); } static void close(final OffHeapStore resource) { @@ -203,7 +207,6 @@ static void close(final OffHeapStore resource) { resource.map = null; localMap.destroy(); } - StatisticsManager.dissociate(resource.offHeapStoreStatsSettings).fromParent(resource); } @Override @@ -248,14 +251,14 @@ public AuthoritativeTier createAuthoritativeTier(Configuration get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.class, authoritativeTier, new HashMap>() {{ put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.HIT)); put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS, AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.TIMEOUT)); - }}, "get", 1000, "getAndFault"); + }}, "get", ResourceType.Core.OFFHEAP.getTierHeight(), "getAndFault", STATISTICS_TAG); StatisticsManager.associate(get).withParent(authoritativeTier); tieredOps.add(get); TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, authoritativeTier, new HashMap>() {{ put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); - }}, "eviction", 1000, "eviction"); + }}, "eviction", ResourceType.Core.OFFHEAP.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(authoritativeTier); tieredOps.add(evict); @@ -281,14 +284,14 @@ public LowerCachingTier createCachingTier(Configuration store TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.class, lowerCachingTier, new HashMap>() {{ put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.HIT_REMOVED)); put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.MISS)); - }}, "get", 100, "getAndRemove"); + }}, "get", ResourceType.Core.OFFHEAP.getTierHeight(), "getAndRemove", STATISTICS_TAG); StatisticsManager.associate(get).withParent(lowerCachingTier); tieredOps.add(get); TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, lowerCachingTier, new HashMap>() {{ put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); - }}, "eviction", 100, "eviction"); + }}, "eviction", ResourceType.Core.OFFHEAP.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(lowerCachingTier); tieredOps.add(evict); diff --git a/impl/src/test/java/org/ehcache/docs/GettingStarted.java b/impl/src/test/java/org/ehcache/docs/GettingStarted.java index 8f6cdc26a0..4109e7d317 100644 --- a/impl/src/test/java/org/ehcache/docs/GettingStarted.java +++ b/impl/src/test/java/org/ehcache/docs/GettingStarted.java @@ -92,9 +92,6 @@ public void cachemanagerExample() { myCache.put(1L, "da one!"); // <7> String value = myCache.get(1L); // <8> - System.out.println(StatisticsManager.nodeFor(myCache).toTreeString()); - - cacheManager.removeCache("preConfigured"); // <9> cacheManager.close(); // <10> @@ -130,8 +127,6 @@ public void offheapCacheManager() { Cache tieredCache = cacheManager.getCache("tieredCache", Long.class, String.class); - System.out.println(StatisticsManager.nodeFor(tieredCache).toTreeString()); - cacheManager.close(); // end::offheapCacheManager[] } @@ -152,8 +147,6 @@ public void threeTiersCacheManager() throws Exception { Cache threeTieredCache = persistentCacheManager.getCache("threeTieredCache", Long.class, String.class); - System.out.println(StatisticsManager.nodeFor(threeTieredCache).toTreeString()); - persistentCacheManager.close(); // end::threeTiersCacheManager[] diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java index 954c187e11..03b5c15677 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java @@ -46,7 +46,8 @@ import java.util.Set; import static java.util.Collections.singleton; -import static org.hamcrest.Matchers.is; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.Matchers.empty; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; import static org.terracotta.context.query.Matchers.attributes; @@ -58,6 +59,7 @@ * OffHeapStoreProviderTest */ public class OffHeapDiskStoreProviderTest { + @Test public void testStatisticsAssociations() throws Exception { OffHeapDiskStore.Provider provider = new OffHeapDiskStore.Provider(); @@ -73,7 +75,7 @@ public void testStatisticsAssociations() throws Exception { hasAttribute("tags", new Matcher>() { @Override protected boolean matchesSafely(Set object) { - return object.containsAll(singleton("store")); + return object.containsAll(singleton("Disk")); } }))))) .build(); @@ -81,12 +83,12 @@ protected boolean matchesSafely(Set object) { Set nodes = singleton(ContextManager.nodeFor(store)); Set storeResult = storeQuery.execute(nodes); - assertThat(storeResult.isEmpty(), is(false)); + assertThat(storeResult, not(empty())); provider.releaseStore(store); storeResult = storeQuery.execute(nodes); - assertThat(storeResult.isEmpty(), is(true)); + assertThat(storeResult, empty()); } private Store.Configuration getStoreConfig() { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java index ca5e9457cc..0a6e8c5dca 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java @@ -419,7 +419,7 @@ public Void call() { Query invalidateAllQuery = QueryBuilder.queryBuilder().descendants().filter(context(attributes(hasAttribute("tags", new Matcher>() { @Override protected boolean matchesSafely(Set object) { - return object.contains("local-offheap"); + return object.contains("OffHeap"); } })))).filter(context(attributes(hasAttribute("name", "invalidateAll")))).ensureUnique().build(); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreProviderTest.java deleted file mode 100644 index 3e305e3841..0000000000 --- a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreProviderTest.java +++ /dev/null @@ -1,179 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.impl.internal.store.offheap; - -import org.ehcache.config.Eviction; -import org.ehcache.config.EvictionAdvisor; -import org.ehcache.config.ResourcePool; -import org.ehcache.config.ResourcePools; -import org.ehcache.config.ResourceType; -import org.ehcache.config.ResourceUnit; -import org.ehcache.config.SizedResourcePool; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.internal.service.ServiceLocator; -import org.ehcache.core.spi.store.Store; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; -import org.ehcache.impl.internal.DefaultTimeSourceService; -import org.ehcache.impl.serialization.LongSerializer; -import org.ehcache.impl.serialization.StringSerializer; -import org.ehcache.spi.serialization.SerializationProvider; -import org.ehcache.spi.serialization.Serializer; -import org.junit.Test; -import org.terracotta.context.ContextManager; -import org.terracotta.context.TreeNode; -import org.terracotta.context.query.Matcher; -import org.terracotta.context.query.Matchers; -import org.terracotta.context.query.Query; - -import java.util.Map; -import java.util.Set; - -import static java.util.Collections.singleton; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; -import static org.mockito.Mockito.mock; -import static org.terracotta.context.query.Matchers.attributes; -import static org.terracotta.context.query.Matchers.context; -import static org.terracotta.context.query.Matchers.hasAttribute; -import static org.terracotta.context.query.QueryBuilder.queryBuilder; - -/** - * OffHeapStoreProviderTest - */ -public class OffHeapStoreProviderTest { - @Test - public void testStatisticsAssociations() throws Exception { - OffHeapStore.Provider provider = new OffHeapStore.Provider(); - - ServiceLocator serviceLocator = new ServiceLocator(mock(SerializationProvider.class), new DefaultTimeSourceService(null)); - - provider.start(serviceLocator); - - OffHeapStore store = provider.createStore(getStoreConfig()); - - Query storeQuery = queryBuilder() - .children() - .filter(context(attributes(Matchers.>allOf( - hasAttribute("tags", new Matcher>() { - @Override - protected boolean matchesSafely(Set object) { - return object.containsAll(singleton("store")); - } - }))))) - .build(); - - Set nodes = singleton(ContextManager.nodeFor(store)); - - Set storeResult = storeQuery.execute(nodes); - assertThat(storeResult.isEmpty(), is(false)); - - provider.releaseStore(store); - - storeResult = storeQuery.execute(nodes); - assertThat(storeResult.isEmpty(), is(true)); - } - - private Store.Configuration getStoreConfig() { - return new Store.Configuration() { - @Override - public Class getKeyType() { - return Long.class; - } - - @Override - public Class getValueType() { - return String.class; - } - - @Override - public EvictionAdvisor getEvictionAdvisor() { - return Eviction.noAdvice(); - } - - @Override - public ClassLoader getClassLoader() { - return getClass().getClassLoader(); - } - - @Override - public Expiry getExpiry() { - return Expirations.noExpiration(); - } - - @Override - public ResourcePools getResourcePools() { - return new ResourcePools() { - @Override - public ResourcePool getPoolForResource(ResourceType resourceType) { - return new SizedResourcePool() { - @Override - public ResourceType getType() { - return ResourceType.Core.OFFHEAP; - } - - @Override - public long getSize() { - return 1; - } - - @Override - public ResourceUnit getUnit() { - return MemoryUnit.MB; - } - - @Override - public boolean isPersistent() { - return false; - } - - @Override - public void validateUpdate(ResourcePool newPool) { - throw new UnsupportedOperationException("TODO Implement me!"); - } - }; - } - - @Override - public Set> getResourceTypeSet() { - return (Set) singleton(ResourceType.Core.OFFHEAP); - } - - @Override - public ResourcePools validateAndMerge(ResourcePools toBeUpdated) throws IllegalArgumentException, UnsupportedOperationException { - throw new UnsupportedOperationException("TODO Implement me!"); - } - }; - } - - @Override - public Serializer getKeySerializer() { - return new LongSerializer(); - } - - @Override - public Serializer getValueSerializer() { - return new StringSerializer(); - } - - @Override - public int getDispatcherConcurrency() { - return 1; - } - - }; - }} diff --git a/integration-test/src/test/java/org/ehcache/integration/StoreStatisticsTest.java b/integration-test/src/test/java/org/ehcache/integration/StoreStatisticsTest.java index ae9b914609..acb04c37ce 100644 --- a/integration-test/src/test/java/org/ehcache/integration/StoreStatisticsTest.java +++ b/integration-test/src/test/java/org/ehcache/integration/StoreStatisticsTest.java @@ -69,7 +69,7 @@ public void test1TierStoreStatsAvailableInContextManager() throws Exception { assertNull(cache.get(0L)); - long onHeapMisses = StoreStatisticsTest.findStat(cache, "get", "onheap-store").count(StoreOperationOutcomes.GetOutcome.MISS); + long onHeapMisses = StoreStatisticsTest.findStat(cache, "get", "OnHeap").count(StoreOperationOutcomes.GetOutcome.MISS); assertThat(onHeapMisses, equalTo(1L)); cacheManager.close(); @@ -90,9 +90,9 @@ public void test2TiersStoreStatsAvailableInContextManager() throws Exception { assertNull(cache.get(0L)); - long onHeapMisses = StoreStatisticsTest.findStat(cache, "getOrComputeIfAbsent", "onheap-store").count(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS); + long onHeapMisses = StoreStatisticsTest.findStat(cache, "getOrComputeIfAbsent", "OnHeap").count(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS); assertThat(onHeapMisses, equalTo(1L)); - long offheapMisses = StoreStatisticsTest.findStat(cache, "getAndFault", "local-offheap").count(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS); + long offheapMisses = StoreStatisticsTest.findStat(cache, "getAndFault", "OffHeap").count(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS); assertThat(offheapMisses, equalTo(1L)); cacheManager.close(); @@ -115,11 +115,11 @@ public void test3TiersStoreStatsAvailableInContextManager() throws Exception { assertNull(cache.get(0L)); - long onHeapMisses = StoreStatisticsTest.findStat(cache, "getOrComputeIfAbsent", "onheap-store").count(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS); + long onHeapMisses = StoreStatisticsTest.findStat(cache, "getOrComputeIfAbsent", "OnHeap").count(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS); assertThat(onHeapMisses, equalTo(1L)); - long offHeapMisses = StoreStatisticsTest.findStat(cache, "getAndRemove", "local-offheap").count(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.MISS); + long offHeapMisses = StoreStatisticsTest.findStat(cache, "getAndRemove", "OffHeap").count(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.MISS); assertThat(offHeapMisses, equalTo(1L)); - long diskMisses = StoreStatisticsTest.findStat(cache, "getAndFault", "local-disk").count(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS); + long diskMisses = StoreStatisticsTest.findStat(cache, "getAndFault", "Disk").count(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS); assertThat(diskMisses, equalTo(1L)); cacheManager.close(); diff --git a/management/src/main/java/org/ehcache/management/config/EhcacheStatisticsProviderConfiguration.java b/management/src/main/java/org/ehcache/management/config/EhcacheStatisticsProviderConfiguration.java index 3f98330cec..f9e07d8071 100644 --- a/management/src/main/java/org/ehcache/management/config/EhcacheStatisticsProviderConfiguration.java +++ b/management/src/main/java/org/ehcache/management/config/EhcacheStatisticsProviderConfiguration.java @@ -21,6 +21,16 @@ public class EhcacheStatisticsProviderConfiguration extends DefaultStatisticsProviderConfiguration { + /** + * + * @param averageWindowDuration Time window to calculate average. + * @param averageWindowUnit TimeUnit of averageWindowDuration. E.g. TimeUnit.MILLISECONDS, TimeUnit.MINUTES etc + * @param historySize Number of statistics to keep in memory. E.g. if set to 20 then the last 20 statistic values will be stored. + * @param historyInterval The interval in which a sample is taken + * @param historyIntervalUnit the time interval in which a sample is taken. + * @param timeToDisable the time period in which a statistic is valid. After this amount of time the statistic is removed. + * @param timeToDisableUnit the TimeUnit associated with the timeToDisable parameter + */ public EhcacheStatisticsProviderConfiguration(long averageWindowDuration, TimeUnit averageWindowUnit, int historySize, long historyInterval, TimeUnit historyIntervalUnit, long timeToDisable, TimeUnit timeToDisableUnit) { super(EhcacheStatisticsProvider.class, averageWindowDuration, averageWindowUnit, historySize, historyInterval, historyIntervalUnit, timeToDisable, timeToDisableUnit); } diff --git a/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProvider.java b/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProvider.java index fcbaac10e5..83d0bcc0c2 100644 --- a/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProvider.java +++ b/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProvider.java @@ -68,7 +68,11 @@ public Capability getCapability() { StandardEhcacheStatistics ehcacheStatistics = (StandardEhcacheStatistics) findExposedObject(context); if (ehcacheStatistics != null) { for (String statisticName : statisticNames) { - statistics.put(statisticName, ehcacheStatistics.queryStatistic(statisticName, since)); + try { + statistics.put(statisticName, ehcacheStatistics.queryStatistic(statisticName, since)); + } catch (IllegalArgumentException ignored) { + // ignore when statisticName does not exist and throws an exception + } } } return statistics; diff --git a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java index ec3d512e15..7e18a8523a 100644 --- a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java +++ b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java @@ -48,19 +48,15 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; +import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; +import org.terracotta.management.model.stats.StatisticType; class StandardEhcacheStatistics extends ExposedCacheBinding { - private static final Set ALL_CACHE_PUT_OUTCOMES = EnumSet.allOf(CacheOperationOutcomes.PutOutcome.class); - private static final Set ALL_CACHE_GET_OUTCOMES = EnumSet.allOf(CacheOperationOutcomes.GetOutcome.class); - private static final Set ALL_CACHE_MISS_OUTCOMES = EnumSet.of(CacheOperationOutcomes.GetOutcome.FAILURE, CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER, CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER); - private static final Set ALL_CACHE_REMOVE_OUTCOMES = EnumSet.allOf(CacheOperationOutcomes.RemoveOutcome.class); - private static final Set GET_WITH_LOADER_OUTCOMES = EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER, CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER); - private static final Set GET_NO_LOADER_OUTCOMES = EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER, CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER); - private final StatisticsRegistry statisticsRegistry; StandardEhcacheStatistics(ManagementRegistryServiceConfiguration registryConfiguration, CacheBinding cacheBinding, StatisticsProviderConfiguration statisticsProviderConfiguration, ScheduledExecutorService executor) { @@ -71,6 +67,10 @@ class StandardEhcacheStatistics extends ExposedCacheBinding { statisticsRegistry.registerCompoundOperations("Cache:Hit", OperationStatisticDescriptor.descriptor("get", Collections.singleton("cache"), CacheOperationOutcomes.GetOutcome.class), EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER, CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER)); statisticsRegistry.registerCompoundOperations("Cache:Miss", OperationStatisticDescriptor.descriptor("get", Collections.singleton("cache"), CacheOperationOutcomes.GetOutcome.class), EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER, CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER)); + statisticsRegistry.registerCompoundOperations("Cache:Clear", OperationStatisticDescriptor.descriptor("clear",Collections.singleton("cache"),CacheOperationOutcomes.ClearOutcome.class), EnumSet.allOf(CacheOperationOutcomes.ClearOutcome.class)); + statisticsRegistry.registerRatios("Cache:HitRatio", OperationStatisticDescriptor.descriptor("get", Collections.singleton("cache"), CacheOperationOutcomes.GetOutcome.class), EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER, CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER), EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER, CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER)); + statisticsRegistry.registerRatios("Cache:MissRatio", OperationStatisticDescriptor.descriptor("get", Collections.singleton("cache"), CacheOperationOutcomes.GetOutcome.class), EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER, CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER), EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER, CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER)); + statisticsRegistry.registerCompoundOperations("Hit", OperationStatisticDescriptor.descriptor("get", Collections.singleton("tier"), TierOperationStatistic.TierOperationOutcomes.GetOutcome.class), EnumSet.of(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT)); statisticsRegistry.registerCompoundOperations("Miss", OperationStatisticDescriptor.descriptor("get", Collections.singleton("tier"), TierOperationStatistic.TierOperationOutcomes.GetOutcome.class), EnumSet.of(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS)); statisticsRegistry.registerCompoundOperations("Eviction", OperationStatisticDescriptor.descriptor("eviction", Collections.singleton("tier"), TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class), EnumSet.allOf(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class)); @@ -155,21 +155,33 @@ private List> buildHistory(SampledStatistic samp @Override public Collection getDescriptors() { Set capabilities = new HashSet(); - capabilities.addAll(queryStatisticsRegistry()); - capabilities.addAll(operationStatistics()); - return capabilities; } - private Set operationStatistics() { + private Set queryStatisticsRegistry() { Set capabilities = new HashSet(); - return capabilities; - } + Map registrations = statisticsRegistry.getRegistrations(); - private Set queryStatisticsRegistry() { - Set capabilities = new HashSet(); + for(Entry entry : registrations.entrySet()) { + RegisteredStatistic registeredStatistic = registrations.get(entry.getKey().toString()); + + if(registeredStatistic instanceof RegisteredCompoundStatistic) { + List statistics = new ArrayList(); + statistics.add(new StatisticDescriptor(entry.getKey() + "Count", StatisticType.COUNTER_HISTORY)); + statistics.add(new StatisticDescriptor(entry.getKey() + "Rate", StatisticType.RATE_HISTORY)); + statistics.add(new StatisticDescriptor(entry.getKey() + "LatencyMinimum", StatisticType.DURATION_HISTORY)); + statistics.add(new StatisticDescriptor(entry.getKey() + "LatencyMaximum", StatisticType.DURATION_HISTORY)); + statistics.add(new StatisticDescriptor(entry.getKey() + "LatencyAverage", StatisticType.AVERAGE_HISTORY)); + + capabilities.addAll(statistics); + } else if(registeredStatistic instanceof RegisteredRatioStatistic) { + capabilities.add(new StatisticDescriptor(entry.getKey() + "Ratio", StatisticType.RATIO_HISTORY)); + } else if(registeredStatistic instanceof RegisteredValueStatistic) { + capabilities.add(new StatisticDescriptor(entry.getKey().toString(), StatisticType.COUNTER_HISTORY)); + } + } return capabilities; } diff --git a/management/src/test/java/org/ehcache/docs/ManagementTest.java b/management/src/test/java/org/ehcache/docs/ManagementTest.java index 13baad2327..fd7243b967 100644 --- a/management/src/test/java/org/ehcache/docs/ManagementTest.java +++ b/management/src/test/java/org/ehcache/docs/ManagementTest.java @@ -21,7 +21,6 @@ import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; import org.ehcache.management.ManagementRegistryService; import org.ehcache.management.SharedManagementService; @@ -38,65 +37,71 @@ import org.terracotta.management.model.context.ContextContainer; import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.management.model.stats.history.CounterHistory; -import org.terracotta.management.model.stats.primitive.Counter; import org.terracotta.management.registry.ResultSet; -import java.util.Arrays; import java.util.Collection; import java.util.Iterator; +import java.util.concurrent.TimeUnit; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.providers.statistics.StatsUtil; public class ManagementTest { - @Test + private final EhcacheStatisticsProviderConfiguration EHCACHE_STATS_CONFIG = new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES); + + @Test (timeout=5000) public void usingManagementRegistry() throws Exception { // tag::usingManagementRegistry[] - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder().heap(1, EntryUnit.ENTRIES).offheap(10, MemoryUnit.MB)) + + CacheManager cacheManager = null; + try { + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager1"); // <1> + registryConfiguration.addConfiguration(EHCACHE_STATS_CONFIG); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); // <2> + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(1, MemoryUnit.MB)) .build(); - DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); // <1> - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); // <2> - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache", cacheConfiguration) - .using(managementRegistry) // <3> - .build(true); - - - Cache aCache = cacheManager.getCache("aCache", Long.class, String.class); - aCache.put(-1L, "-one"); - aCache.put(0L, "zero"); - aCache.get(-1L); // <4> - aCache.get(0L); // <4> - aCache.get(0L); - aCache.get(0L); - - Thread.sleep(1100); - - Context context = createContext(managementRegistry); // <5> - - ContextualStatistics counters = managementRegistry.withCapability("StatisticsCapability") // <6> - .queryStatistics(Arrays.asList("OnHeap:HitCount", "OnHeap:EvictionCount", "OffHeap:HitCount", "Cache:HitCount", "OffHeap:OccupiedBytesCount", "OffHeap:MappingCount")) - .on(context) - .build() - .execute() - .getSingleResult(); - - Assert.assertThat(counters.size(), Matchers.is(6)); - CounterHistory onHeapStore_Hit_Count = counters.getStatistic(CounterHistory.class, "OnHeap:HitCount"); - CounterHistory onHeapStore_Eviction_Count = counters.getStatistic(CounterHistory.class, "OnHeap:EvictionCount"); - CounterHistory offHeapStore_Hit_Count = counters.getStatistic(CounterHistory.class, "OffHeap:HitCount"); - CounterHistory cache_Hit_Count = counters.getStatistic(CounterHistory.class, "Cache:HitCount"); - CounterHistory offHeapStore_Mapping_Count = counters.getStatistic(CounterHistory.class, "OffHeap:MappingCount"); - CounterHistory offHeapStore_OccupiedBytes_Count = counters.getStatistic(CounterHistory.class, "OffHeap:OccupiedBytesCount"); - - Assert.assertThat(onHeapStore_Hit_Count.getValue()[0].getValue() + offHeapStore_Hit_Count.getValue()[0].getValue(), Matchers.equalTo(4L)); // <7> - Assert.assertThat(cache_Hit_Count.getValue()[0].getValue(), Matchers.equalTo(4L)); // <7> - - System.out.println("onheap evictions: " + onHeapStore_Eviction_Count.getValue()[0].getValue()); - System.out.println("offheap mappings: " + offHeapStore_Mapping_Count.getValue()[0].getValue()); - System.out.println("offheap used bytes: " + offHeapStore_OccupiedBytes_Count.getValue()[0].getValue()); - - cacheManager.close(); + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("myCache", cacheConfiguration) + .using(managementRegistry) // <3> + .build(true); + + + Cache aCache = cacheManager.getCache("myCache", Long.class, String.class); + aCache.put(1L, "one"); + aCache.put(0L, "zero"); + aCache.get(1L); // <4> + aCache.get(0L); // <4> + aCache.get(0L); + aCache.get(0L); + + Thread.sleep(1000); + + Context context = StatsUtil.createContext(managementRegistry); // <5> + + ResultSet counters = managementRegistry.withCapability("StatisticsCapability") // <6> + .queryStatistic("Cache:HitCount") + .on(context) + .build() + .execute(); + + ContextualStatistics statisticsContext = counters.getResult(context); + + Assert.assertThat(counters.size(), Matchers.is(1)); + + CounterHistory onHeapStore_Hit_Count = statisticsContext.getStatistic(CounterHistory.class, "Cache:HitCount"); + while(!StatsUtil.isHistoryReady(onHeapStore_Hit_Count, 0L)) {} + int mostRecentIndex = onHeapStore_Hit_Count.getValue().length - 1; + long onHeapHitCount = onHeapStore_Hit_Count.getValue()[mostRecentIndex].getValue(); + + Assert.assertThat(onHeapHitCount, Matchers.equalTo(4L)); // <7> + } + finally { + if(cacheManager != null) cacheManager.close(); + } // end::usingManagementRegistry[] } @@ -106,41 +111,45 @@ public void capabilitiesAndContexts() throws Exception { CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10)) .build(); - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(); - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache", cacheConfiguration) - .using(managementRegistry) - .build(true); - - - Collection capabilities = managementRegistry.getCapabilities(); // <1> - Assert.assertThat(capabilities.isEmpty(), Matchers.is(false)); - Capability capability = capabilities.iterator().next(); - String capabilityName = capability.getName(); // <2> - Collection capabilityDescriptions = capability.getDescriptors(); // <3> - Assert.assertThat(capabilityDescriptions.isEmpty(), Matchers.is(false)); - CapabilityContext capabilityContext = capability.getCapabilityContext(); - Collection attributes = capabilityContext.getAttributes(); // <4> - Assert.assertThat(attributes.size(), Matchers.is(2)); - Iterator iterator = attributes.iterator(); - CapabilityContext.Attribute attribute1 = iterator.next(); - Assert.assertThat(attribute1.getName(), Matchers.equalTo("cacheManagerName")); // <5> - Assert.assertThat(attribute1.isRequired(), Matchers.is(true)); - CapabilityContext.Attribute attribute2 = iterator.next(); - Assert.assertThat(attribute2.getName(), Matchers.equalTo("cacheName")); // <6> - Assert.assertThat(attribute2.isRequired(), Matchers.is(true)); - - ContextContainer contextContainer = managementRegistry.getContextContainer(); // <7> - Assert.assertThat(contextContainer.getName(), Matchers.equalTo("cacheManagerName")); // <8> - Assert.assertThat(contextContainer.getValue(), Matchers.startsWith("cache-manager-")); - Collection subContexts = contextContainer.getSubContexts(); - Assert.assertThat(subContexts.size(), Matchers.is(1)); - ContextContainer subContextContainer = subContexts.iterator().next(); - Assert.assertThat(subContextContainer.getName(), Matchers.equalTo("cacheName")); // <9> - Assert.assertThat(subContextContainer.getValue(), Matchers.equalTo("aCache")); - - - cacheManager.close(); + CacheManager cacheManager = null; + try { + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(); + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .using(managementRegistry) + .build(true); + + + Collection capabilities = managementRegistry.getCapabilities(); // <1> + Assert.assertThat(capabilities.isEmpty(), Matchers.is(false)); + Capability capability = capabilities.iterator().next(); + String capabilityName = capability.getName(); // <2> + Collection capabilityDescriptions = capability.getDescriptors(); // <3> + Assert.assertThat(capabilityDescriptions.isEmpty(), Matchers.is(false)); + CapabilityContext capabilityContext = capability.getCapabilityContext(); + Collection attributes = capabilityContext.getAttributes(); // <4> + Assert.assertThat(attributes.size(), Matchers.is(2)); + Iterator iterator = attributes.iterator(); + CapabilityContext.Attribute attribute1 = iterator.next(); + Assert.assertThat(attribute1.getName(), Matchers.equalTo("cacheManagerName")); // <5> + Assert.assertThat(attribute1.isRequired(), Matchers.is(true)); + CapabilityContext.Attribute attribute2 = iterator.next(); + Assert.assertThat(attribute2.getName(), Matchers.equalTo("cacheName")); // <6> + Assert.assertThat(attribute2.isRequired(), Matchers.is(true)); + + ContextContainer contextContainer = managementRegistry.getContextContainer(); // <7> + Assert.assertThat(contextContainer.getName(), Matchers.equalTo("cacheManagerName")); // <8> + Assert.assertThat(contextContainer.getValue(), Matchers.startsWith("cache-manager-")); + Collection subContexts = contextContainer.getSubContexts(); + Assert.assertThat(subContexts.size(), Matchers.is(1)); + ContextContainer subContextContainer = subContexts.iterator().next(); + Assert.assertThat(subContextContainer.getName(), Matchers.equalTo("cacheName")); // <9> + Assert.assertThat(subContextContainer.getValue(), Matchers.equalTo("aCache")); + } + finally { + if(cacheManager != null) cacheManager.close(); + } + // end::capabilitiesAndContexts[] } @@ -150,77 +159,91 @@ public void actionCall() throws Exception { CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10)) .build(); - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(); - CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache", cacheConfiguration) - .using(managementRegistry) - .build(true); - - Cache aCache = cacheManager.getCache("aCache", Long.class, String.class); - aCache.put(0L, "zero"); // <1> - - Context context = createContext(managementRegistry); // <2> - - managementRegistry.withCapability("ActionsCapability") // <3> - .call("clear") - .on(context) - .build() - .execute(); - - Assert.assertThat(aCache.get(0L), Matchers.is(Matchers.nullValue())); // <4> - - cacheManager.close(); + CacheManager cacheManager = null; + try { + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(); + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .using(managementRegistry) + .build(true); + + Cache aCache = cacheManager.getCache("aCache", Long.class, String.class); + aCache.put(0L, "zero"); // <1> + + Context context = StatsUtil.createContext(managementRegistry); // <2> + + managementRegistry.withCapability("ActionsCapability") // <3> + .call("clear") + .on(context) + .build() + .execute(); + + Assert.assertThat(aCache.get(0L), Matchers.is(Matchers.nullValue())); // <4> + } + finally { + if(cacheManager != null) cacheManager.close(); + } // end::actionCall[] } - @Test + //TODO update managingMultipleCacheManagers() documentation/asciidoc + @Test (timeout = 5000) public void managingMultipleCacheManagers() throws Exception { // tag::managingMultipleCacheManagers[] CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10)) .build(); - SharedManagementService sharedManagementService = new DefaultSharedManagementService(); // <1> - CacheManager cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache", cacheConfiguration) - .using(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager-1")) - .using(sharedManagementService) // <2> - .build(true); - - CacheManager cacheManager2 = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache", cacheConfiguration) - .using(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager-2")) - .using(sharedManagementService) // <3> - .build(true); - - Context context1 = Context.empty() - .with("cacheManagerName", "myCacheManager-1") - .with("cacheName", "aCache"); - - Context context2 = Context.empty() - .with("cacheManagerName", "myCacheManager-2") - .with("cacheName", "aCache"); - - ResultSet counters = sharedManagementService.withCapability("StatisticsCapability") - .queryStatistic("GetCounter") - .on(context1) - .on(context2) - .build() - .execute(); - - ContextualStatistics statistics = counters.getResult(context1); - Counter counter = statistics.getStatistic(Counter.class, "GetCounter"); - - cacheManager2.close(); - cacheManager1.close(); - // end::managingMultipleCacheManagers[] - } + CacheManager cacheManager1 = null; + CacheManager cacheManager2 = null; + try { + SharedManagementService sharedManagementService = new DefaultSharedManagementService(); // <1> + cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .using(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager-1").addConfiguration(EHCACHE_STATS_CONFIG)) + .using(sharedManagementService) // <2> + .build(true); + + cacheManager2 = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .using(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager-2").addConfiguration(EHCACHE_STATS_CONFIG)) + .using(sharedManagementService) // <3> + .build(true); + + Context context1 = Context.empty() + .with("cacheManagerName", "myCacheManager-1") + .with("cacheName", "aCache"); + + Context context2 = Context.empty() + .with("cacheManagerName", "myCacheManager-2") + .with("cacheName", "aCache"); + + Cache cache = cacheManager1.getCache("aCache", Long.class, String.class); + cache.get(1L);//cache miss + cache.get(2L);//cache miss + + Thread.sleep(1000); + + ResultSet counters = sharedManagementService.withCapability("StatisticsCapability") + .queryStatistic("Cache:MissCount") + .on(context1) + .on(context2) + .build() + .execute(); + + ContextualStatistics statisticsContext1 = counters.getResult(context1); + + CounterHistory counterContext1 = statisticsContext1.getStatistic(CounterHistory.class, "Cache:MissCount");; + + while(!StatsUtil.isHistoryReady(counterContext1, 0L)) {} + int mostRecentSampleIndex = counterContext1.getValue().length - 1; + Assert.assertEquals(2L, counterContext1.getValue()[mostRecentSampleIndex].getValue().longValue()); + } + finally { + if(cacheManager2 != null) cacheManager2.close(); + if(cacheManager1 != null) cacheManager1.close(); + } - private static Context createContext(ManagementRegistryService managementRegistry) { - ContextContainer cacheManagerCtx = managementRegistry.getContextContainer(); - ContextContainer firstCacheCtx = cacheManagerCtx.getSubContexts().iterator().next(); - return Context.empty() - .with(cacheManagerCtx.getName(), cacheManagerCtx.getValue()) - .with(firstCacheCtx.getName(), firstCacheCtx.getValue()); + // end::managingMultipleCacheManagers[] } } diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java new file mode 100755 index 0000000000..453af75f31 --- /dev/null +++ b/management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java @@ -0,0 +1,179 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import static java.util.Arrays.asList; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.config.units.EntryUnit.ENTRIES; +import static org.ehcache.config.units.MemoryUnit.MB; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.Builder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.Configuration; +import org.ehcache.config.ResourcePools; +import org.ehcache.core.EhcacheManager; +import org.ehcache.core.config.DefaultConfiguration; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryService; +import org.ehcache.spi.service.Service; +import org.hamcrest.Matchers; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.model.stats.history.CounterHistory; + +/** + * + * + */ +@RunWith(Parameterized.class) +public class EvictionTest { + + @Parameterized.Parameters + public static Collection data() { + + char[] value = new char[1000000]; + Arrays.fill(value, 'x'); + + return asList(new Object[][] { + { newResourcePoolsBuilder().heap(1, ENTRIES), 2, Arrays.asList(1l), new String(value).getBytes(), Arrays.asList("OnHeap:EvictionCount")}, + { newResourcePoolsBuilder().offheap(1, MB), 2, Arrays.asList(1l), new String(value).getBytes(), Arrays.asList("OffHeap:EvictionCount")}, + { newResourcePoolsBuilder().heap(2, ENTRIES).offheap(1, MB), 3, Arrays.asList(0l,2l), new String(value).getBytes(), Arrays.asList("OnHeap:EvictionCount", "OffHeap:EvictionCount")}, + + //FAILS: org.ehcache.core.spi.store.StoreAccessException: The element with key '0' is too large to be stored in this offheap store. + //{ newResourcePoolsBuilder().disk(1, MB), 2, Arrays.asList(1l), new String(value).getBytes(), Arrays.asList("Disk:EvictionCount")}, + + //FAILS: org.ehcache.core.spi.store.StoreAccessException: The element with key '0' is too large to be stored in this offheap store. + //java.lang.IllegalStateException: No Store.Provider found to handle configured resource types [offheap, disk] from {org.ehcache.impl.internal.store.heap.OnHeapStore$Provider, org.ehcache.impl.internal.store.offheap.OffHeapStore$Provider, org.ehcache.impl.internal.store.disk.OffHeapDiskStore$Provider, org.ehcache.impl.internal.store.tiering.TieredStore$Provider, org.ehcache.clustered.client.internal.store.ClusteredStore$Provider} + //{ newResourcePoolsBuilder().offheap(1, MB).disk(2, MB), 3, Arrays.asList(0l,1l), new String(value).getBytes(), Arrays.asList("OffHeap:EvictionCount", "Disk:EvictionCount")}, + + //FAILS: Expects 1 eviction but it evicts twice. Value stored on disk must be > 1MB + //{ newResourcePoolsBuilder().heap(1, ENTRIES).offheap(1, MB).disk(2, MB), 3, Arrays.asList(0l,0l,1l), new String(value).getBytes(), Arrays.asList("OnHeap:EvictionCount","OffHeap:EvictionCount", "Disk:EvictionCount")}, + + //TODO need clustered tests: + //1. clustered + //2. offheap, clustered + //3. disk, clustered + //4. onheap, offheap, clustered (This is an invalid configuration) + }); + } + + private final ResourcePools resources; + private final int iterations; + private final List expected; //expectetd outcomes must be ordered from highest tier to lowest tier. e.g. OnHeap, OffHeap, Disk + private final byte[] value; + private final List stats; //must be ordered from highest tier to lowest tier. e.g. OnHeap, OffHeap, Disk + + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + + public EvictionTest(Builder resources, int iterations, List expected, byte[] value, List stats) { + this.resources = resources.build(); + this.iterations = iterations; + this.expected = expected; + this.value = value; + this.stats = stats; + } + + @Test + public void test() throws IOException, InterruptedException { + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); + registryConfiguration.addConfiguration(new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES)); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + + Configuration config = new DefaultConfiguration(EvictionTest.class.getClassLoader(), + new DefaultPersistenceConfiguration(diskPath.newFolder())); + + Collection services = new ArrayList(); + services.add(managementRegistry); + + CacheManager cacheManager = null; + + try { + cacheManager = new EhcacheManager(config, services); + CacheConfiguration cacheConfig = newCacheConfigurationBuilder(Long.class, byte[].class, resources).build(); + + cacheManager.init(); + Cache cache = cacheManager.createCache("myCache", cacheConfig); + + for(long i=0; i cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder( + Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(1, MemoryUnit.MB).offheap(10, MemoryUnit.MB)) + .build(); + + CacheManager cacheManager = null; + + try { + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .using(managementRegistry) + .build(true); + + Cache cache = cacheManager.getCache("aCache", Long.class, String.class); + cache.put(1L, "one"); + + cache.get(1L);//HIT + cache.get(1L);//HIT + cache.get(2L);//MISS + cache.get(3L);//MISS + + Thread.sleep(1000); + + Context context = StatsUtil.createContext(managementRegistry); + + ContextualStatistics missCounter = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(Arrays.asList(/*"Cache:MissRate",*/ "Cache:MissCount", "Cache:MissRatio"))//TODO add Cache:MissRate once understand how to verify correct + .on(context) + .build() + .execute() + .getSingleResult(); + + Assert.assertThat(missCounter.size(), Matchers.is(2)); + + /*RateHistory missRateHistory = missCounter.getStatistic(RateHistory.class, "Cache:MissRate"); + while(!isHistoryReady(missRateHistory, 0d)) {} + //TODO how can i calculate rate? miss/second + Assert.assertThat(missRateHistory.getValue()[mostRecentIndex].getValue(), Matchers.greaterThan(0d));*/ + + CounterHistory missCountCounterHistory = missCounter.getStatistic(CounterHistory.class, "Cache:MissCount"); + while(!StatsUtil.isHistoryReady(missCountCounterHistory, 0L)) {} + int mostRecentIndex = missCountCounterHistory.getValue().length - 1; + Assert.assertThat(missCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(2L)); + + RatioHistory ratioHistory = missCounter.getStatistic(RatioHistory.class, "Cache:MissRatio"); + while(!StatsUtil.isHistoryReady(ratioHistory, Double.POSITIVE_INFINITY)) {} + mostRecentIndex = ratioHistory.getValue().length - 1; + Assert.assertThat(ratioHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(1d)); + } + finally { + if(cacheManager != null) { + cacheManager.close(); + } + } + } + + @Test + public void statsCacheHitTest() throws InterruptedException { + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager2"); + registryConfiguration.addConfiguration(EHCACHE_STATS_CONFIG); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(1, MemoryUnit.MB).offheap(10, MemoryUnit.MB)) + .build(); + + CacheManager cacheManager = null; + + try { + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("bCache", cacheConfiguration) + .using(managementRegistry) + .build(true); + + Cache cache = cacheManager.getCache("bCache", Long.class, String.class); + cache.put(1L, "1"); + cache.put(2L, "2"); + cache.put(3L, "3"); + + cache.get(1L);//HIT + cache.get(2L);//HIT + cache.get(2L);//HIT + cache.get(4L);//need a MISS for ratio, otherwise you get infinity as a value + + Thread.sleep(1000); + + Context context = StatsUtil.createContext(managementRegistry); + + ContextualStatistics contextualStatistics = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(Arrays.asList(/*"Cache:HitRate",*/ "Cache:HitCount", "Cache:HitRatio"))//TODO add Cache:HitRate once understand how to verify correct + .on(context) + .build() + .execute() + .getSingleResult(); + + Assert.assertThat(contextualStatistics.size(), Matchers.is(2)); + + /*RateHistory hitRateHistory = hitCounter.getStatistic(RateHistory.class, "Cache:HitRate"); + while(!isHistoryReady(hitRateHistory, 0d)) {} + //TODO how can i calculate rate? hits/second + Assert.assertThat(hitRateHistory.getValue()[mostRecentIndex].getValue(), Matchers.greaterThan(0d));*/ + + CounterHistory hitCountCounterHistory = contextualStatistics.getStatistic(CounterHistory.class, "Cache:HitCount"); + while(!StatsUtil.isHistoryReady(hitCountCounterHistory, 0L)) {} + int mostRecentIndex = hitCountCounterHistory.getValue().length - 1; + Assert.assertThat(hitCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(3L)); + + RatioHistory ratioHistory = contextualStatistics.getStatistic(RatioHistory.class, "Cache:HitRatio"); + while(!StatsUtil.isHistoryReady(ratioHistory, Double.POSITIVE_INFINITY)) {} + mostRecentIndex = ratioHistory.getValue().length - 1; + Assert.assertThat(ratioHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(3d)); + } + finally { + if(cacheManager != null) { + cacheManager.close(); + } + } + } + + @Test + public void statsClearCacheTest() throws InterruptedException { + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(1, EntryUnit.ENTRIES).offheap(10, MemoryUnit.MB)) + .build(); + + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager3"); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + registryConfiguration.addConfiguration(EHCACHE_STATS_CONFIG); + + CacheManager cacheManager = null; + + try { + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("cCache", cacheConfiguration) + .using(managementRegistry) + .build(true); + + Cache aCache = cacheManager.getCache("cCache", Long.class, String.class); + aCache.put(1L, "one"); + Assert.assertTrue(aCache.containsKey(1L)); + aCache.clear(); + Assert.assertFalse(aCache.iterator().hasNext()); + + aCache.put(1L, "one"); + Assert.assertTrue(aCache.containsKey(1L)); + aCache.clear(); + Assert.assertFalse(aCache.iterator().hasNext()); + + Thread.sleep(1000); + + Context context = StatsUtil.createContext(managementRegistry); + + ContextualStatistics clearCounter = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(Arrays.asList("Cache:ClearCount")) + .on(context) + .build() + .execute() + .getSingleResult(); + + Assert.assertThat(clearCounter.size(), Matchers.is(1)); + CounterHistory cache_Clear_Count = clearCounter.getStatistic(CounterHistory.class, "Cache:ClearCount"); + + while(!StatsUtil.isHistoryReady(cache_Clear_Count, 0L)) {} + int mostRecentIndex = cache_Clear_Count.getValue().length - 1; + Assert.assertThat(cache_Clear_Count.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(2L)); + } + finally { + if(cacheManager != null) { + cacheManager.close(); + } + } + } +} diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java b/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java new file mode 100755 index 0000000000..c91de2737d --- /dev/null +++ b/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java @@ -0,0 +1,71 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import org.ehcache.management.ManagementRegistryService; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.model.context.ContextContainer; +import org.terracotta.management.model.stats.AbstractStatisticHistory; + +/** + * + * + */ +public class StatsUtil { + + public static boolean isHistoryReady(AbstractStatisticHistory counterHistory, Double defaultValue) { + + if(counterHistory.getValue().length > 0) { + int mostRecentIndex = counterHistory.getValue().length - 1; + if(defaultValue.equals(Double.POSITIVE_INFINITY)) { + if((Double)counterHistory.getValue()[mostRecentIndex].getValue() < defaultValue ) { + return true; + } + } else { + if((Double)counterHistory.getValue()[mostRecentIndex].getValue() > defaultValue ) { + return true; + } + } + + } + return false; + } + + public static boolean isHistoryReady(AbstractStatisticHistory counterHistory) { + if(counterHistory.getValue().length > 0) { + return true; + } + return false; + } + + public static boolean isHistoryReady(AbstractStatisticHistory counterHistory, Long defaultValue) { + if(counterHistory.getValue().length > 0) { + int mostRecentIndex = counterHistory.getValue().length - 1; + if((Long)counterHistory.getValue()[mostRecentIndex].getValue() > defaultValue) { + return true; + } + } + return false; + } + + public static Context createContext(ManagementRegistryService managementRegistry) { + ContextContainer cacheManagerCtx = managementRegistry.getContextContainer(); + ContextContainer firstCacheCtx = cacheManagerCtx.getSubContexts().iterator().next(); + return Context.empty() + .with(cacheManagerCtx.getName(), cacheManagerCtx.getValue()) + .with(firstCacheCtx.getName(), firstCacheCtx.getValue()); + } +} diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultCollectorServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultCollectorServiceTest.java index 5ce43ef388..8b680f03c3 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultCollectorServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultCollectorServiceTest.java @@ -134,7 +134,7 @@ void onEvent(Object event) { managementRegistry.withCapability("StatisticCollectorCapability") .call("updateCollectedStatistics", new Parameter("StatisticsCapability"), - new Parameter(asList("PutCounter", "InexistingRate"), Collection.class.getName())) + new Parameter(asList("Cache:HitCount", "Cache:MissCount"), Collection.class.getName())) .on(Context.create("cacheManagerName", "my-cm-1")) .build() .execute() @@ -144,7 +144,6 @@ void onEvent(Object event) { cache.put("key", "val"); num.await(); - cacheManager.removeCache("my-cache"); cacheManager.close(); diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java index 43eb16bfc7..c6c5049cb0 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java @@ -15,6 +15,20 @@ */ package org.ehcache.management.registry; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.config.units.MemoryUnit.MB; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.everyItem; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.collection.IsCollectionWithSize.hasSize; +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.assertThat; +import static org.junit.Assert.fail; + +import java.io.File; import org.ehcache.CacheManager; import org.ehcache.config.CacheConfiguration; import org.ehcache.config.builders.CacheConfigurationBuilder; @@ -31,48 +45,145 @@ import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.management.model.stats.Sample; import org.terracotta.management.model.stats.history.CounterHistory; -import org.terracotta.management.model.stats.primitive.Counter; - +import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; import java.util.NoSuchElementException; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; - -import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.everyItem; -import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.nullValue; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.collection.IsCollectionWithSize.hasSize; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.junit.Rule; +import org.junit.rules.TemporaryFolder; +import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.registry.StatisticQuery.Builder; public class DefaultManagementRegistryServiceTest { + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + @Test public void testCanGetContext() { - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) + CacheManager cacheManager1 = null; + try { + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) .build(); - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); - CacheManager cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache", cacheConfiguration) - .using(managementRegistry) - .build(true); + cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .using(managementRegistry) + .build(true); - assertThat(managementRegistry.getContextContainer().getName(), equalTo("cacheManagerName")); - assertThat(managementRegistry.getContextContainer().getValue(), equalTo("myCM")); - assertThat(managementRegistry.getContextContainer().getSubContexts(), hasSize(1)); - assertThat(managementRegistry.getContextContainer().getSubContexts().iterator().next().getName(), equalTo("cacheName")); - assertThat(managementRegistry.getContextContainer().getSubContexts().iterator().next().getValue(), equalTo("aCache")); + assertThat(managementRegistry.getContextContainer().getName(), equalTo("cacheManagerName")); + assertThat(managementRegistry.getContextContainer().getValue(), equalTo("myCM")); + assertThat(managementRegistry.getContextContainer().getSubContexts(), hasSize(1)); + assertThat(managementRegistry.getContextContainer().getSubContexts().iterator().next().getName(), equalTo("cacheName")); + assertThat(managementRegistry.getContextContainer().getSubContexts().iterator().next().getValue(), equalTo("aCache")); + } + finally { + if(cacheManager1 != null) cacheManager1.close(); + } + } + + @Test + public void descriptorOnHeapTest() { + CacheManager cacheManager1 = null; + try { + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) + .build(); + + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); + + cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .using(managementRegistry) + .build(true); + + assertThat(managementRegistry.getCapabilities(), hasSize(4)); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getName(), equalTo("ActionsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName(), equalTo("StatisticsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(2).getName(), equalTo("StatisticCollectorCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName(), equalTo("SettingsCapability")); + + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getDescriptors(), hasSize(4)); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getDescriptors(), hasSize(35)); + } + finally { + if(cacheManager1 != null) cacheManager1.close(); + } - cacheManager1.close(); } + @Test + public void descriptorOffHeapTest() { + CacheManager cacheManager1 = null; + try { + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, newResourcePoolsBuilder().heap(5, MB).offheap(10, MB)) + .build(); + + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); + + cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache", cacheConfiguration) + .using(managementRegistry) + .build(true); + + assertThat(managementRegistry.getCapabilities(), hasSize(4)); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getName(), equalTo("ActionsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName(), equalTo("StatisticsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(2).getName(), equalTo("StatisticCollectorCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName(), equalTo("SettingsCapability")); + + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getDescriptors(), hasSize(4)); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getDescriptors(), hasSize(55)); + } + finally { + if(cacheManager1 != null) cacheManager1.close(); + } + + } + + @Test + public void descriptorDiskStoreTest() throws URISyntaxException { + PersistentCacheManager persistentCacheManager = null; + try { + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); + + persistentCacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .with(CacheManagerBuilder.persistence(getStoragePath() + File.separator + "myData")) + .withCache("persistent-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .disk(10, MemoryUnit.MB, true)) + ) + .using(managementRegistry) + .build(true); + + assertThat(managementRegistry.getCapabilities(), hasSize(4)); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getName(), equalTo("ActionsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName(), equalTo("StatisticsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(2).getName(), equalTo("StatisticCollectorCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName(), equalTo("SettingsCapability")); + + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getDescriptors(), hasSize(4)); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getDescriptors(), hasSize(55)); + } + finally { + if(persistentCacheManager != null) persistentCacheManager.close(); + } + } + + private String getStoragePath() throws URISyntaxException { + return getClass().getClassLoader().getResource(".").toURI().getPath(); + } + + @Test public void testCanGetCapabilities() { CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) @@ -92,7 +203,7 @@ public void testCanGetCapabilities() { assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName(), equalTo("SettingsCapability")); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getDescriptors(), hasSize(4)); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getDescriptors(), hasSize(13)); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getDescriptors(), hasSize(35)); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getCapabilityContext().getAttributes(), hasSize(2)); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getCapabilityContext().getAttributes(), hasSize(2)); @@ -100,12 +211,23 @@ public void testCanGetCapabilities() { cacheManager1.close(); } - @Test + @Test (timeout = 5000) public void testCanGetStats() { + String queryStatisticName = "Cache:HitCount"; + + long averageWindowDuration = 1; + TimeUnit averageWindowUnit = TimeUnit.MINUTES; + int historySize = 100; + long historyInterval = 1; + TimeUnit historyIntervalUnit = TimeUnit.MILLISECONDS; + long timeToDisable = 10; + TimeUnit timeToDisableUnit = TimeUnit.MINUTES; + EhcacheStatisticsProviderConfiguration config = new EhcacheStatisticsProviderConfiguration(averageWindowDuration,averageWindowUnit,historySize,historyInterval,historyIntervalUnit,timeToDisable,timeToDisableUnit); + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) .build(); - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM").addConfiguration(config)); CacheManager cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() .withCache("aCache1", cacheConfiguration) @@ -121,40 +243,86 @@ public void testCanGetStats() { .with("cacheManagerName", "myCM") .with("cacheName", "aCache2"); - cacheManager1.getCache("aCache1", Long.class, String.class).put(1L, "1"); - cacheManager1.getCache("aCache1", Long.class, String.class).put(2L, "2"); - cacheManager1.getCache("aCache2", Long.class, String.class).put(3L, "3"); - cacheManager1.getCache("aCache2", Long.class, String.class).put(4L, "4"); - cacheManager1.getCache("aCache2", Long.class, String.class).put(5L, "5"); + Cache cache1 = cacheManager1.getCache("aCache1", Long.class, String.class); + Cache cache2 = cacheManager1.getCache("aCache2", Long.class, String.class); - ContextualStatistics counters = managementRegistry.withCapability("StatisticsCapability") - .queryStatistic("PutCounter") - .on(context1) - .build() - .execute() - .getResult(context1); + cache1.put(1L, "one"); + cache2.put(3L, "three"); + + cache1.get(1L); + cache1.get(2L); + cache2.get(3L); + cache2.get(4L); + + Builder builder1 = managementRegistry.withCapability("StatisticsCapability") + .queryStatistic(queryStatisticName) + .on(context1); + + ContextualStatistics counters = getResultSet(builder1, context1, null, CounterHistory.class, queryStatisticName).getResult(context1); + CounterHistory counterHistory1 = counters.getStatistic(CounterHistory.class, queryStatisticName); assertThat(counters.size(), equalTo(1)); - assertThat(counters.getStatistic(Counter.class).getValue(), equalTo(2L)); + int mostRecentSampleIndex = counterHistory1.getValue().length - 1; + assertThat(counterHistory1.getValue()[mostRecentSampleIndex].getValue(), equalTo(1L)); - ResultSet allCounters = managementRegistry.withCapability("StatisticsCapability") - .queryStatistic("PutCounter") + Builder builder2 = managementRegistry.withCapability("StatisticsCapability") + .queryStatistic(queryStatisticName) .on(context1) - .on(context2) - .build() - .execute(); + .on(context2); + ResultSet allCounters = getResultSet(builder2, context1, context2, CounterHistory.class, queryStatisticName); assertThat(allCounters.size(), equalTo(2)); assertThat(allCounters.getResult(context1).size(), equalTo(1)); - assertThat(allCounters.getResult(context2).size(), Matchers.equalTo(1)); - assertThat(allCounters.getResult(context1).getStatistic(Counter.class).getValue(), equalTo(2L)); - assertThat(allCounters.getResult(context2).getStatistic(Counter.class).getValue(), equalTo(3L)); + assertThat(allCounters.getResult(context2).size(), equalTo(1)); + + mostRecentSampleIndex = allCounters.getResult(context1).getStatistic(CounterHistory.class, queryStatisticName).getValue().length - 1; + assertThat(allCounters.getResult(context1).getStatistic(CounterHistory.class, queryStatisticName).getValue()[mostRecentSampleIndex].getValue(), equalTo(1L)); + + mostRecentSampleIndex = allCounters.getResult(context2).getStatistic(CounterHistory.class, queryStatisticName).getValue().length - 1; + assertThat(allCounters.getResult(context2).getStatistic(CounterHistory.class, queryStatisticName).getValue()[mostRecentSampleIndex].getValue(), equalTo(1L)); cacheManager1.close(); } - @Test + private static ResultSet getResultSet(Builder builder, Context context1, Context context2, Class type, String statisticsName) { + ResultSet counters; + + while(true) //wait till Counter history(s) is initialized and contains values. + { + counters = builder.build().execute(); + + ContextualStatistics statisticsContext1 = counters.getResult(context1); + CounterHistory counterHistoryContext1 = statisticsContext1.getStatistic(type, statisticsName); + + if(context2 != null) + { + ContextualStatistics statisticsContext2 = counters.getResult(context2); + CounterHistory counterHistoryContext2 = statisticsContext2.getStatistic(type, statisticsName); + + if(counterHistoryContext2.getValue().length > 0 && + counterHistoryContext2.getValue()[counterHistoryContext2.getValue().length - 1].getValue() > 0 && + counterHistoryContext1.getValue().length > 0 && + counterHistoryContext1.getValue()[counterHistoryContext1.getValue().length - 1].getValue() > 0) + { + break; + } + } + else + { + if(counterHistoryContext1.getValue().length > 0 && + counterHistoryContext1.getValue()[counterHistoryContext1.getValue().length - 1].getValue() > 0) + { + break; + } + } + } + + return counters; + } + + @Test (timeout=5000) public void testCanGetStatsSinceTime() throws InterruptedException { + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) .build(); @@ -172,11 +340,11 @@ public void testCanGetStatsSinceTime() throws InterruptedException { .with("cacheName", "aCache1"); StatisticQuery.Builder builder = managementRegistry.withCapability("StatisticsCapability") - .queryStatistic("AllCachePutCount") + .queryStatistic("Cache:MissCount") .on(context); ContextualStatistics statistics; - CounterHistory putCount; + CounterHistory getCount; long timestamp; // ------ @@ -186,32 +354,33 @@ public void testCanGetStatsSinceTime() throws InterruptedException { builder.build().execute(); // ------ - // 3 puts and we wait more than 1 second (history frequency) to be sure the scheduler thread has computed a new stat in the history + // 3 gets and we wait more than 1 second (history frequency) to be sure the scheduler thread has computed a new stat in the history // ------ - cacheManager1.getCache("aCache1", Long.class, String.class).put(1L, "1"); - cacheManager1.getCache("aCache1", Long.class, String.class).put(2L, "2"); - cacheManager1.getCache("aCache1", Long.class, String.class).put(2L, "2"); + cacheManager1.getCache("aCache1", Long.class, String.class).get(1L); + cacheManager1.getCache("aCache1", Long.class, String.class).get(2L); + cacheManager1.getCache("aCache1", Long.class, String.class).get(2L); do { Thread.sleep(100); statistics = builder.build().execute().getResult(context); - putCount = statistics.getStatistic(CounterHistory.class); - } while (putCount.getValue().length < 1); + getCount = statistics.getStatistic(CounterHistory.class); + } while (getCount.getValue().length < 1); - // within 1 second of history there has been 3 puts - assertThat(putCount.getValue()[0].getValue(), equalTo(3L)); + // within 1 second of history there has been 3 gets + int mostRecentIndex = getCount.getValue().length - 1; + assertThat(getCount.getValue()[mostRecentIndex].getValue(), equalTo(3L)); // keep time for next call (since) - timestamp = putCount.getValue()[0].getTimestamp(); + timestamp = getCount.getValue()[mostRecentIndex].getTimestamp(); // ------ - // 2 puts and we wait more than 1 second (history frequency) to be sure the scheduler thread has computed a new stat in the history + // 2 gets and we wait more than 1 second (history frequency) to be sure the scheduler thread has computed a new stat in the history // We will get only the stats SINCE last time // ------ - cacheManager1.getCache("aCache1", Long.class, String.class).put(1L, "1"); - cacheManager1.getCache("aCache1", Long.class, String.class).put(2L, "2"); + cacheManager1.getCache("aCache1", Long.class, String.class).get(1L); + cacheManager1.getCache("aCache1", Long.class, String.class).get(2L); // ------ // WITHOUT using since: the history will have 2 values @@ -220,30 +389,32 @@ public void testCanGetStatsSinceTime() throws InterruptedException { do { Thread.sleep(100); statistics = builder.build().execute().getResult(context); - putCount = statistics.getStatistic(CounterHistory.class); - } while (putCount.getValue().length < 2); + getCount = statistics.getStatistic(CounterHistory.class); + } while (getCount.getValue().length < 2); // ------ // WITH since: the history will have 1 value // ------ statistics = builder.since(timestamp + 1).build().execute().getResult(context); - putCount = statistics.getStatistic(CounterHistory.class); + getCount = statistics.getStatistic(CounterHistory.class); // get the counter for each computation at each 1 second - assertThat(Arrays.asList(putCount.getValue()), everyItem(Matchers.>hasProperty("timestamp", greaterThan(timestamp)))); + assertThat(Arrays.asList(getCount.getValue()), everyItem(Matchers.>hasProperty("timestamp", greaterThan(timestamp)))); cacheManager1.close(); } @Test public void testCall() throws ExecutionException { + CacheManager cacheManager1 = null; + try { CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) .build(); ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); - CacheManager cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() + cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() .withCache("aCache1", cacheConfiguration) .withCache("aCache2", cacheConfiguration) .using(managementRegistry) @@ -268,44 +439,52 @@ public void testCall() throws ExecutionException { assertThat(result.getValue(), is(nullValue())); assertThat(cacheManager1.getCache("aCache1", Long.class, String.class).get(1L), is(Matchers.nullValue())); + } + finally { + if(cacheManager1 != null) cacheManager1.close(); + } - cacheManager1.close(); } @Test public void testCallOnInexistignContext() { - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) - .build(); - - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); - - CacheManager cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache1", cacheConfiguration) - .withCache("aCache2", cacheConfiguration) - .using(managementRegistry) - .build(true); - - Context inexisting = Context.empty() - .with("cacheManagerName", "myCM2") - .with("cacheName", "aCache2"); - - ResultSet> results = managementRegistry.withCapability("ActionsCapability") - .call("clear") - .on(inexisting) - .build() - .execute(); - - assertThat(results.size(), equalTo(1)); - assertThat(results.getSingleResult().hasExecuted(), is(false)); - + CacheManager cacheManager1 = null; try { - results.getSingleResult().getValue(); - fail(); - } catch (Exception e) { - assertThat(e, instanceOf(NoSuchElementException.class)); + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) + .build(); + + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM")); + + cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("aCache1", cacheConfiguration) + .withCache("aCache2", cacheConfiguration) + .using(managementRegistry) + .build(true); + + Context inexisting = Context.empty() + .with("cacheManagerName", "myCM2") + .with("cacheName", "aCache2"); + + ResultSet> results = managementRegistry.withCapability("ActionsCapability") + .call("clear") + .on(inexisting) + .build() + .execute(); + + assertThat(results.size(), equalTo(1)); + assertThat(results.getSingleResult().hasExecuted(), is(false)); + + try { + results.getSingleResult().getValue(); + fail(); + } catch (Exception e) { + assertThat(e, instanceOf(NoSuchElementException.class)); + } + } + finally { + if(cacheManager1 != null) cacheManager1.close(); } - cacheManager1.close(); } } diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java index e48f878463..3819f8858b 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java @@ -19,6 +19,7 @@ import org.ehcache.config.CacheConfiguration; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; import org.ehcache.management.ManagementRegistryServiceConfiguration; import org.ehcache.management.SharedManagementService; import org.hamcrest.Matchers; @@ -31,9 +32,11 @@ import org.terracotta.management.model.capabilities.Capability; import org.terracotta.management.model.context.Context; import org.terracotta.management.model.context.ContextContainer; -import org.terracotta.management.registry.ResultSet; import org.terracotta.management.model.stats.ContextualStatistics; -import org.terracotta.management.model.stats.primitive.Counter; +import org.terracotta.management.model.stats.history.CounterHistory; +import org.terracotta.management.registry.ResultSet; +import org.terracotta.management.registry.StatisticQuery; +import org.terracotta.management.registry.StatisticQuery.Builder; import java.util.ArrayList; import java.util.Arrays; @@ -41,6 +44,7 @@ import java.util.List; import java.util.NoSuchElementException; import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.hamcrest.CoreMatchers.equalTo; @@ -66,6 +70,8 @@ public class DefaultSharedManagementServiceTest { @Before public void init() { + EhcacheStatisticsProviderConfiguration config = new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES); + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) .build(); @@ -74,14 +80,14 @@ public void init() { cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() .withCache("aCache1", cacheConfiguration) .using(service) - .using(config1 = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM1")) + .using(config1 = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM1").addConfiguration(config)) .build(true); cacheManager2 = CacheManagerBuilder.newCacheManagerBuilder() .withCache("aCache2", cacheConfiguration) .withCache("aCache3", cacheConfiguration) .using(service) - .using(config2 = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM2")) + .using(config2 = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM2").addConfiguration(config)) .build(true); // this serie of calls make sure the registry still works after a full init / close / init loop @@ -145,8 +151,10 @@ public void testSharedCapabilities() { assertThat(new ArrayList(capabilities2).get(3).getName(), equalTo("SettingsCapability")); } - @Test + @Test (timeout=10000) public void testStats() { + String statisticName = "Cache:MissCount"; + List contextList = Arrays.asList( Context.empty() .with("cacheManagerName", "myCM1") @@ -158,15 +166,14 @@ public void testStats() { .with("cacheManagerName", "myCM2") .with("cacheName", "aCache3")); - cacheManager1.getCache("aCache1", Long.class, String.class).put(1L, "1"); - cacheManager2.getCache("aCache2", Long.class, String.class).put(2L, "2"); - cacheManager2.getCache("aCache3", Long.class, String.class).put(3L, "3"); + cacheManager1.getCache("aCache1", Long.class, String.class).get(1L); + cacheManager2.getCache("aCache2", Long.class, String.class).get(2L); + cacheManager2.getCache("aCache3", Long.class, String.class).get(3L); - ResultSet allCounters = service.withCapability("StatisticsCapability") - .queryStatistic("PutCounter") - .on(contextList) - .build() - .execute(); + Builder builder = service.withCapability("StatisticsCapability") + .queryStatistic(statisticName) + .on(contextList); + ResultSet allCounters = getResultSet(builder, contextList, CounterHistory.class, statisticName); assertThat(allCounters.size(), equalTo(3)); @@ -174,9 +181,36 @@ public void testStats() { assertThat(allCounters.getResult(contextList.get(1)).size(), equalTo(1)); assertThat(allCounters.getResult(contextList.get(2)).size(), equalTo(1)); - assertThat(allCounters.getResult(contextList.get(0)).getStatistic(Counter.class).getValue(), equalTo(1L)); - assertThat(allCounters.getResult(contextList.get(1)).getStatistic(Counter.class).getValue(), equalTo(1L)); - assertThat(allCounters.getResult(contextList.get(2)).getStatistic(Counter.class).getValue(), equalTo(1L)); + + int mostRecentSampleIndex = allCounters.getResult(contextList.get(0)).getStatistic(CounterHistory.class, statisticName).getValue().length - 1; + assertThat(allCounters.getResult(contextList.get(0)).getStatistic(CounterHistory.class, statisticName).getValue()[mostRecentSampleIndex].getValue(), equalTo(1L)); + + mostRecentSampleIndex = allCounters.getResult(contextList.get(1)).getStatistic(CounterHistory.class, statisticName).getValue().length - 1; + assertThat(allCounters.getResult(contextList.get(1)).getStatistic(CounterHistory.class, statisticName).getValue()[mostRecentSampleIndex].getValue(), equalTo(1L)); + + mostRecentSampleIndex = allCounters.getResult(contextList.get(2)).getStatistic(CounterHistory.class, statisticName).getValue().length - 1; + assertThat(allCounters.getResult(contextList.get(2)).getStatistic(CounterHistory.class, statisticName).getValue()[mostRecentSampleIndex].getValue(), equalTo(1L)); + + } + + private static ResultSet getResultSet(StatisticQuery.Builder builder, List contextList, Class type, String statisticsName) { + ResultSet counters; + + //wait till Counter history is initialized and contains values > 0. + while(true) { + counters = builder.build().execute(); + + if(counters.getResult(contextList.get(0)).getStatistic(type, statisticsName).getValue().length > 0 && + counters.getResult(contextList.get(0)).getStatistic(type, statisticsName).getValue()[counters.getResult(contextList.get(0)).getStatistic(type, statisticsName).getValue().length - 1].getValue() > 0 && + counters.getResult(contextList.get(1)).getStatistic(type, statisticsName).getValue().length > 0 && + counters.getResult(contextList.get(1)).getStatistic(type, statisticsName).getValue()[counters.getResult(contextList.get(1)).getStatistic(type, statisticsName).getValue().length - 1].getValue() > 0 && + counters.getResult(contextList.get(2)).getStatistic(type, statisticsName).getValue().length > 0 && + counters.getResult(contextList.get(2)).getStatistic(type, statisticsName).getValue()[counters.getResult(contextList.get(2)).getStatistic(type, statisticsName).getValue().length - 1].getValue() > 0) { + break; + } + } + + return counters; } @Test From ccc1fedb2b6f54c806bc2d1101c07e05743fe320 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Tue, 6 Sep 2016 23:34:13 +0530 Subject: [PATCH 029/218] Client id & message id generation #1208 --- .../client/internal/EhcacheClientEntity.java | 50 +++++++------- .../internal/EhcacheClientEntityFactory.java | 13 ++-- .../service/DefaultClusteringService.java | 3 +- .../store/StrongServerStoreProxy.java | 4 +- .../EhcacheClientEntityFactoryTest.java | 18 ++--- ...usteredStateRepositoryReplicationTest.java | 1 + .../internal/store/ClusteredStoreTest.java | 4 +- .../store/EventualServerStoreProxyTest.java | 4 +- .../NoInvalidationServerStoreProxyTest.java | 4 +- .../store/StrongServerStoreProxyTest.java | 16 +++-- .../messages/EhcacheEntityMessage.java | 15 +++++ .../messages/LifeCycleMessageFactory.java | 19 ++++-- .../internal/messages/LifecycleMessage.java | 33 +++++++--- .../internal/messages/ReconnectData.java | 65 +++++++++++++++++++ .../internal/messages/ReconnectDataCodec.java | 32 +++++---- .../internal/messages/ServerStoreOpCodec.java | 47 +++++++++----- .../internal/messages/EhcacheCodecTest.java | 6 +- .../messages/ReconnectDataCodecTest.java | 24 +++---- .../messages/ServerStoreOpCodecTest.java | 8 ++- .../messages/ServerStoreOpMessageTest.java | 3 - ...cheClientEntityFactoryIntegrationTest.java | 30 +++++---- .../clustered/server/EhcacheActiveEntity.java | 4 +- .../server/EhcacheActiveEntityTest.java | 1 + .../repo/StateRepositoryManagerTest.java | 1 - 24 files changed, 278 insertions(+), 127 deletions(-) create mode 100644 clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectData.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java index 7fd43c4d1a..3f6c564586 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java @@ -33,7 +33,7 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.Failure; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.Type; import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; -import org.ehcache.clustered.common.internal.messages.LifecycleMessage; +import org.ehcache.clustered.common.internal.messages.ReconnectData; import org.ehcache.clustered.common.internal.messages.ReconnectDataCodec; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ServerStoreOp; import org.slf4j.Logger; @@ -47,7 +47,6 @@ import org.terracotta.exception.EntityException; import java.util.EnumSet; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -56,6 +55,7 @@ import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; +import java.util.concurrent.atomic.AtomicLong; import static org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ServerStoreOp.GET; import static org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ServerStoreOp.getServerStoreOp; @@ -69,8 +69,7 @@ public class EhcacheClientEntity implements Entity { private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheClientEntity.class); - private Set reconnectData = new HashSet(); - private int reconnectDatalen = 0; + private ReconnectData reconnectData = new ReconnectData(); public interface ResponseListener { void onResponse(T response); @@ -80,12 +79,15 @@ public interface DisconnectionListener { void onDisconnection(); } + private final AtomicLong sequenceGenerator = new AtomicLong(0L); + private final EntityClientEndpoint endpoint; private final LifeCycleMessageFactory messageFactory; private final Map, List>> responseListeners = new ConcurrentHashMap, List>>(); private final List disconnectionListeners = new CopyOnWriteArrayList(); private final ReconnectDataCodec reconnectDataCodec = new ReconnectDataCodec(); private volatile boolean connected = true; + private volatile UUID clientId; private Timeouts timeouts = Timeouts.builder().build(); @@ -102,7 +104,7 @@ public void handleMessage(EntityResponse messageFromServer) { @Override public byte[] createExtendedReconnectData() { - return reconnectDataCodec.encode(reconnectData, reconnectDatalen); + return reconnectDataCodec.encode(reconnectData); } @Override @@ -138,6 +140,12 @@ private void fireResponseEvent(EhcacheEntityResponse response) { } } + public void setClientId(UUID clientId) { + this.clientId = clientId; + this.messageFactory.setClientId(clientId); + this.reconnectData.setClientId(clientId); + } + public boolean isConnected() { return connected; } @@ -184,7 +192,7 @@ public void createCache(String name, ServerStoreConfiguration serverStoreConfigu throws ClusteredTierCreationException, TimeoutException { try { invokeInternal(timeouts.getLifecycleOperationTimeout(), messageFactory.createServerStore(name, serverStoreConfiguration), true); - addReconnectData(name); + reconnectData.add(name); } catch (ClusterException e) { throw new ClusteredTierCreationException("Error creating clustered tier '" + name + "'", e); } @@ -194,7 +202,7 @@ public void validateCache(String name, ServerStoreConfiguration serverStoreConfi throws ClusteredTierValidationException, TimeoutException { try { invokeInternal(timeouts.getLifecycleOperationTimeout(), messageFactory.validateServerStore(name , serverStoreConfiguration), false); - addReconnectData(name); + reconnectData.add(name); } catch (ClusterException e) { throw new ClusteredTierValidationException("Error validating clustered tier '" + name + "'", e); } @@ -203,7 +211,7 @@ public void validateCache(String name, ServerStoreConfiguration serverStoreConfi public void releaseCache(String name) throws ClusteredTierReleaseException, TimeoutException { try { invokeInternal(timeouts.getLifecycleOperationTimeout(), messageFactory.releaseServerStore(name), false); - removeReconnectData(name); + reconnectData.remove(name); } catch (ClusterException e) { throw new ClusteredTierReleaseException("Error releasing clustered tier '" + name + "'", e); } @@ -212,7 +220,7 @@ public void releaseCache(String name) throws ClusteredTierReleaseException, Time public void destroyCache(String name) throws ClusteredTierDestructionException, TimeoutException { try { invokeInternal(timeouts.getLifecycleOperationTimeout(), messageFactory.destroyServerStore(name), true); - removeReconnectData(name); + reconnectData.remove(name); } catch (ResourceBusyException e) { throw new ClusteredTierDestructionException(e.getMessage(), e); } catch (ClusterException e) { @@ -220,18 +228,6 @@ public void destroyCache(String name) throws ClusteredTierDestructionException, } } - private void addReconnectData(String name) { - reconnectData.add(name); - reconnectDatalen += name.length(); - } - - private void removeReconnectData(String name) { - if (!reconnectData.contains(name)) { - reconnectData.remove(name); - reconnectDatalen -= name.length(); - } - } - /** * Sends a message to the {@code EhcacheActiveEntity} associated with this {@code EhcacheClientEntity} and * awaits a response. @@ -283,12 +279,18 @@ private EhcacheEntityResponse invokeInternal(TimeoutDuration timeLimit, EhcacheE public InvokeFuture invokeAsync(EhcacheEntityMessage message, boolean replicate) throws MessageCodecException { + InvokeFuture invoke; + if (clientId == null) { + throw new IllegalStateException("Client ID cannot be null"); + } if (replicate) { - return endpoint.beginInvoke().message(message).replicate(true).invoke(); //TODO: remove replicate call once - //https://github.com/Terracotta-OSS/terracotta-apis/issues/139 is fixed + message.setId(sequenceGenerator.incrementAndGet()); + //TODO: remove the replicate call with latest passthrough upgrade + invoke = endpoint.beginInvoke().message(message).replicate(true).invoke(); } else { - return endpoint.beginInvoke().message(message).replicate(false).invoke(); + invoke = endpoint.beginInvoke().message(message).replicate(false).invoke(); } + return invoke; } private static T waitFor(TimeoutDuration timeLimit, InvokeFuture future) diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java index 3391af2f1f..966735aa9b 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java @@ -16,6 +16,7 @@ package org.ehcache.clustered.client.internal; +import org.ehcache.clustered.client.internal.EhcacheClientEntity.Timeouts; import org.ehcache.clustered.client.internal.service.ClusteredTierManagerConfigurationException; import org.ehcache.clustered.client.internal.service.ClusteredTierManagerValidationException; import org.ehcache.clustered.client.service.EntityBusyException; @@ -45,15 +46,17 @@ public class EhcacheClientEntityFactory { private final Connection connection; private final Map maintenanceHolds = new ConcurrentHashMap(); - private final EhcacheClientEntity.Timeouts entityTimeouts; + private final Timeouts entityTimeouts; + private final UUID clientId; - public EhcacheClientEntityFactory(Connection connection) { - this(connection, EhcacheClientEntity.Timeouts.builder().build()); + public EhcacheClientEntityFactory(Connection connection, UUID clientId) { + this(connection, clientId, Timeouts.builder().build()); } - public EhcacheClientEntityFactory(Connection connection, EhcacheClientEntity.Timeouts entityTimeouts) { + public EhcacheClientEntityFactory(Connection connection, UUID clientId, Timeouts entityTimeouts) { this.connection = connection; this.entityTimeouts = entityTimeouts; + this.clientId = clientId; } public boolean acquireLeadership(String entityIdentifier) { @@ -110,6 +113,7 @@ public void create(final String identifier, final ServerSideConfiguration config EhcacheClientEntity entity = ref.fetchEntity(); try { entity.setTimeouts(entityTimeouts); + entity.setClientId(clientId); entity.configure(config); return; } finally { @@ -168,6 +172,7 @@ public EhcacheClientEntity retrieve(String identifier, ServerSideConfiguration c boolean validated = false; try { entity.setTimeouts(entityTimeouts); + entity.setClientId(clientId); entity.validate(config); validated = true; return entity; diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java index 694e3500bc..1f81352a15 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java @@ -62,6 +62,7 @@ import java.net.URISyntaxException; import java.util.Arrays; import java.util.Properties; +import java.util.UUID; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeoutException; @@ -80,6 +81,7 @@ class DefaultClusteringService implements ClusteringService, EntityService { private final String entityIdentifier; private final ConcurrentMap knownPersistenceSpaces = new ConcurrentHashMap(); private final EhcacheClientEntity.Timeouts operationTimeouts; + private final UUID clientId = UUID.randomUUID(); private volatile Connection clusterConnection; private EhcacheClientEntityFactory entityFactory; @@ -208,7 +210,6 @@ private EhcacheClientEntity autoCreateEntity() throws EhcacheEntityValidationExc public void startForMaintenance(ServiceProvider serviceProvider) { initClusterConnection(); createEntityFactory(); - if (!entityFactory.acquireLeadership(entityIdentifier)) { entityFactory = null; closeConnection(); diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java index 2f9f34182a..cf06ad66b6 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java @@ -119,7 +119,7 @@ public void onResponse(EhcacheEntityResponse.ClientInvalidateHash response) { try { LOGGER.debug("CLIENT: ack'ing invalidation of hash {} from cache {} (ID {})", key, cacheId, invalidationId); - entity.invokeAsync(messageFactory.clientInvalidationAck(invalidationId), true); + entity.invokeAsync(messageFactory.clientInvalidationAck(invalidationId), false); } catch (Exception e) { //TODO: what should be done here? LOGGER.error("error acking client invalidation of hash {} on cache {}", key, cacheId, e); @@ -143,7 +143,7 @@ public void onResponse(EhcacheEntityResponse.ClientInvalidateAll response) { try { LOGGER.debug("CLIENT: ack'ing invalidation of all from cache {} (ID {})", cacheId, invalidationId); - entity.invokeAsync(messageFactory.clientInvalidationAck(invalidationId), true); + entity.invokeAsync(messageFactory.clientInvalidationAck(invalidationId), false); } catch (Exception e) { //TODO: what should be done here? LOGGER.error("error acking client invalidation of all on cache {}", cacheId, e); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactoryTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactoryTest.java index 765816809d..d5621744a4 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactoryTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactoryTest.java @@ -44,6 +44,8 @@ public class EhcacheClientEntityFactoryTest { + private static final UUID CLIENT_ID = UUID.randomUUID(); + @Test public void testCreate() throws Exception { EhcacheClientEntity entity = mock(EhcacheClientEntity.class); @@ -54,7 +56,7 @@ public void testCreate() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection, CLIENT_ID); factory.create("test", null); verify(entityRef).create(any(UUID.class)); verify(entity).configure(any(ServerSideConfiguration.class)); @@ -72,7 +74,7 @@ public void testCreateBadConfig() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection, CLIENT_ID); try { factory.create("test", null); fail("Expecting EhcacheEntityCreationException"); @@ -94,7 +96,7 @@ public void testCreateWhenExisting() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection, CLIENT_ID); try { factory.create("test", null); fail("Expected EntityAlreadyExistsException"); @@ -113,7 +115,7 @@ public void testRetrieve() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection, CLIENT_ID); assertThat(factory.retrieve("test", null), is(entity)); verify(entity).validate(any(ServerSideConfiguration.class)); verify(entity, never()).close(); @@ -130,7 +132,7 @@ public void testRetrieveFailedValidate() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection, CLIENT_ID); try { factory.retrieve("test", null); fail("Expecting IllegalArgumentException"); @@ -151,7 +153,7 @@ public void testRetrieveWhenNotExisting() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection, CLIENT_ID); try { factory.retrieve("test", null); fail("Expected EntityNotFoundException"); @@ -169,7 +171,7 @@ public void testDestroy() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection, CLIENT_ID); factory.destroy("test"); verify(entityRef).destroy(); } @@ -183,7 +185,7 @@ public void testDestroyWhenNotExisting() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection, CLIENT_ID); try { factory.destroy("test"); fail("Expected EhcacheEntityNotFoundException"); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepositoryReplicationTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepositoryReplicationTest.java index 01327283b7..346ad67705 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepositoryReplicationTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepositoryReplicationTest.java @@ -37,6 +37,7 @@ import java.lang.reflect.Field; import java.net.URI; +import java.util.UUID; import java.util.concurrent.ConcurrentMap; import static org.ehcache.clustered.client.internal.UnitTestConnectionService.getOffheapResourcesType; diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java index 824ac1e872..eca7bd0ee8 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java @@ -54,6 +54,7 @@ import java.util.HashSet; import java.util.Map; import java.util.Properties; +import java.util.UUID; import java.util.concurrent.TimeoutException; import static org.ehcache.clustered.util.StatisticsTestUtils.validateStat; @@ -75,6 +76,7 @@ public class ClusteredStoreTest { private static final String CACHE_IDENTIFIER = "testCache"; private static final URI CLUSTER_URI = URI.create("terracotta://localhost:9510"); + private static final UUID CLIENT_ID = UUID.randomUUID(); ClusteredStore store; @@ -86,7 +88,7 @@ public void setup() throws Exception { ); Connection connection = new UnitTestConnectionService().connect(CLUSTER_URI, new Properties()); - EhcacheClientEntityFactory entityFactory = new EhcacheClientEntityFactory(connection); + EhcacheClientEntityFactory entityFactory = new EhcacheClientEntityFactory(connection, CLIENT_ID); ServerSideConfiguration serverConfig = new ServerSideConfiguration("defaultResource", Collections.emptyMap()); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java index 347621cf9f..f15ee15a73 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java @@ -37,6 +37,7 @@ import java.util.Collections; import java.util.List; import java.util.Properties; +import java.util.UUID; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -54,6 +55,7 @@ public class EventualServerStoreProxyTest { private static final String CACHE_IDENTIFIER = "testCache"; private static final URI CLUSTER_URI = URI.create("terracotta://localhost:9510"); + private static final UUID CLIENT_ID = UUID.randomUUID(); private static EhcacheClientEntity clientEntity1; private static EhcacheClientEntity clientEntity2; @@ -68,7 +70,7 @@ public static void setUp() throws Exception { .build()); Connection connection = new UnitTestConnectionService().connect(CLUSTER_URI, new Properties()); - EhcacheClientEntityFactory entityFactory = new EhcacheClientEntityFactory(connection); + EhcacheClientEntityFactory entityFactory = new EhcacheClientEntityFactory(connection, CLIENT_ID); entityFactory.create("TestCacheManager", new ServerSideConfiguration("defaultResource", Collections.emptyMap())); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java index 1f9b9fe01c..858ac9b17c 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java @@ -37,6 +37,7 @@ import java.util.Collections; import java.util.Iterator; import java.util.Properties; +import java.util.UUID; import static org.ehcache.clustered.common.internal.store.Util.createPayload; import static org.ehcache.clustered.common.internal.store.Util.getChain; @@ -48,6 +49,7 @@ public class NoInvalidationServerStoreProxyTest { private static final String CACHE_IDENTIFIER = "testCache"; private static final URI CLUSTER_URI = URI.create("terracotta://localhost:9510"); + private static final UUID CLIENT_ID = UUID.randomUUID(); private static EhcacheClientEntity clientEntity; private static NoInvalidationServerStoreProxy serverStoreProxy; @@ -60,7 +62,7 @@ public static void setUp() throws Exception { .build()); Connection connection = new UnitTestConnectionService().connect(CLUSTER_URI, new Properties()); - EhcacheClientEntityFactory entityFactory = new EhcacheClientEntityFactory(connection); + EhcacheClientEntityFactory entityFactory = new EhcacheClientEntityFactory(connection, CLIENT_ID); ServerSideConfiguration serverConfig = new ServerSideConfiguration("defaultResource", Collections.emptyMap()); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java index cc0be0eec9..71ee3bf38c 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java @@ -38,6 +38,7 @@ import java.util.Collections; import java.util.List; import java.util.Properties; +import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; @@ -58,7 +59,8 @@ public class StrongServerStoreProxyTest { - private static final ExecutorService executorService = Executors.newCachedThreadPool(); + private static final ExecutorService EXECUTOR_SERVICE = Executors.newCachedThreadPool(); + private static final UUID CLIENT_ID = UUID.randomUUID(); private static final String CACHE_IDENTIFIER = "testCache"; private static final URI CLUSTER_URI = URI.create("terracotta://localhost:9510"); @@ -76,7 +78,7 @@ public static void setUp() throws Exception { .build()); Connection connection = new UnitTestConnectionService().connect(CLUSTER_URI, new Properties()); - EhcacheClientEntityFactory entityFactory = new EhcacheClientEntityFactory(connection); + EhcacheClientEntityFactory entityFactory = new EhcacheClientEntityFactory(connection, CLIENT_ID); entityFactory.create("TestCacheManager", new ServerSideConfiguration("defaultResource", Collections.emptyMap())); @@ -115,7 +117,7 @@ public static void tearDown() throws Exception { } UnitTestConnectionService.remove(CLUSTER_URI); - executorService.shutdown(); + EXECUTOR_SERVICE.shutdown(); } @Test @@ -227,14 +229,14 @@ public void onInvalidateAll() { }; serverStoreProxy2.addInvalidationListener(listener); - executorService.submit(new Callable() { + EXECUTOR_SERVICE.submit(new Callable() { @Override public Object call() throws Exception { serverStoreProxy1.append(1L, createPayload(1L)); return null; } }); - executorService.submit(new Callable() { + EXECUTOR_SERVICE.submit(new Callable() { @Override public Object call() throws Exception { serverStoreProxy1.append(1L, createPayload(1L)); @@ -320,14 +322,14 @@ public void onInvalidateAll() { }; serverStoreProxy2.addInvalidationListener(listener); - executorService.submit(new Callable() { + EXECUTOR_SERVICE.submit(new Callable() { @Override public Future call() throws Exception { serverStoreProxy1.clear(); return null; } }); - executorService.submit(new Callable() { + EXECUTOR_SERVICE.submit(new Callable() { @Override public Future call() throws Exception { serverStoreProxy1.clear(); diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java index 88daee109e..c0403bfbed 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java @@ -18,11 +18,17 @@ import org.terracotta.entity.EntityMessage; +import java.util.UUID; + /** * Defines messages for interactions with an {@code EhcacheActiveEntity}. */ public abstract class EhcacheEntityMessage implements EntityMessage { + public static final long NOT_REPLICATED = -1; + + private long id = NOT_REPLICATED; + /** * These types represent the top level Ehcache entity message types. * Each of these top level types can have subtypes of messages. @@ -62,4 +68,13 @@ public static Type toType(byte code) { public String toString() { return getType().toString(); } + + public void setId(long id) { + this.id = id; + } + + public long getId() { + return this.id; + } + } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageFactory.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageFactory.java index 2b34a52289..c099cdbfd7 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageFactory.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageFactory.java @@ -19,30 +19,37 @@ import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import java.util.UUID; + public class LifeCycleMessageFactory { + private UUID clientId; + public EhcacheEntityMessage validateStoreManager(ServerSideConfiguration configuration){ - return new LifecycleMessage.ValidateStoreManager(configuration); + return new LifecycleMessage.ValidateStoreManager(configuration, clientId); } public EhcacheEntityMessage configureStoreManager(ServerSideConfiguration configuration) { - return new LifecycleMessage.ConfigureStoreManager(configuration); + return new LifecycleMessage.ConfigureStoreManager(configuration, clientId); } public EhcacheEntityMessage createServerStore(String name, ServerStoreConfiguration serverStoreConfiguration) { - return new LifecycleMessage.CreateServerStore(name, serverStoreConfiguration); + return new LifecycleMessage.CreateServerStore(name, serverStoreConfiguration, clientId); } public EhcacheEntityMessage validateServerStore(String name, ServerStoreConfiguration serverStoreConfiguration) { - return new LifecycleMessage.ValidateServerStore(name, serverStoreConfiguration); + return new LifecycleMessage.ValidateServerStore(name, serverStoreConfiguration, clientId); } public EhcacheEntityMessage releaseServerStore(String name) { - return new LifecycleMessage.ReleaseServerStore(name); + return new LifecycleMessage.ReleaseServerStore(name, clientId); } public EhcacheEntityMessage destroyServerStore(String name) { - return new LifecycleMessage.DestroyServerStore(name); + return new LifecycleMessage.DestroyServerStore(name, clientId); } + public void setClientId(UUID clientId) { + this.clientId = clientId; + } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java index 9638ad5782..fc07cdcb4b 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java @@ -20,6 +20,7 @@ import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import java.io.Serializable; +import java.util.UUID; public abstract class LifecycleMessage extends EhcacheEntityMessage implements Serializable { @@ -32,6 +33,15 @@ public enum LifeCycleOp { DESTROY_SERVER_STORE, } + protected UUID clientId; + + public UUID getClientId() { + if (clientId == null) { + throw new AssertionError("Client Id cannot be null for lifecycle messages"); + } + return this.clientId; + } + @Override public byte getOpCode() { return getType().getCode(); @@ -54,8 +64,9 @@ public static class ValidateStoreManager extends LifecycleMessage { private final ServerSideConfiguration configuration; - ValidateStoreManager(ServerSideConfiguration config) { + ValidateStoreManager(ServerSideConfiguration config, UUID clientId) { this.configuration = config; + this.clientId = clientId; } @Override @@ -73,8 +84,9 @@ public static class ConfigureStoreManager extends LifecycleMessage { private final ServerSideConfiguration configuration; - ConfigureStoreManager(ServerSideConfiguration config) { + ConfigureStoreManager(ServerSideConfiguration config, UUID clientId) { this.configuration = config; + this.clientId = clientId; } @Override @@ -93,9 +105,10 @@ public abstract static class BaseServerStore extends LifecycleMessage { private final String name; private final ServerStoreConfiguration storeConfiguration; - protected BaseServerStore(String name, ServerStoreConfiguration storeConfiguration) { + protected BaseServerStore(String name, ServerStoreConfiguration storeConfiguration, UUID clientId) { this.name = name; this.storeConfiguration = storeConfiguration; + this.clientId = clientId; } public String getName() { @@ -114,8 +127,8 @@ public ServerStoreConfiguration getStoreConfiguration() { public static class CreateServerStore extends BaseServerStore { private static final long serialVersionUID = -5832725455629624613L; - CreateServerStore(String name, ServerStoreConfiguration storeConfiguration) { - super(name, storeConfiguration); + CreateServerStore(String name, ServerStoreConfiguration storeConfiguration, UUID clientId) { + super(name, storeConfiguration, clientId); } @Override @@ -130,8 +143,8 @@ public LifeCycleOp operation() { public static class ValidateServerStore extends BaseServerStore { private static final long serialVersionUID = 8762670006846832185L; - ValidateServerStore(String name, ServerStoreConfiguration storeConfiguration) { - super(name, storeConfiguration); + ValidateServerStore(String name, ServerStoreConfiguration storeConfiguration, UUID clientId) { + super(name, storeConfiguration, clientId); } @Override @@ -148,8 +161,9 @@ public static class ReleaseServerStore extends LifecycleMessage { private final String name; - ReleaseServerStore(String name) { + ReleaseServerStore(String name, UUID clientId) { this.name = name; + this.clientId = clientId; } @Override @@ -170,8 +184,9 @@ public static class DestroyServerStore extends LifecycleMessage { private final String name; - DestroyServerStore(String name) { + DestroyServerStore(String name, UUID clientId) { this.name = name; + this.clientId = clientId; } @Override diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectData.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectData.java new file mode 100644 index 0000000000..a28fdb049f --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectData.java @@ -0,0 +1,65 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import java.util.Collections; +import java.util.Set; +import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; + +public class ReconnectData { + + private static final byte CLIENT_ID_SIZE = 16; + private static final byte ENTRY_SIZE = 4; + + private volatile UUID clientId; + private final Set reconnectData = Collections.newSetFromMap(new ConcurrentHashMap()); + private final AtomicInteger reconnectDatalen = new AtomicInteger(CLIENT_ID_SIZE); + + public UUID getClientId() { + if (clientId == null) { + throw new AssertionError("Client ID cannot be null"); + } + return clientId; + } + + public void setClientId(UUID clientId) { + this.clientId = clientId; + } + + public void add(String name) { + reconnectData.add(name); + reconnectDatalen.addAndGet(2 * name.length() + ENTRY_SIZE); + } + + public void remove(String name) { + if (!reconnectData.contains(name)) { + reconnectData.remove(name); + reconnectDatalen.addAndGet(-(2 * name.length() + ENTRY_SIZE)); + } + } + + public Set getAllCaches() { + return Collections.unmodifiableSet(reconnectData); + } + + public int getDataLength() { + return reconnectDatalen.get(); + } + +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodec.java index b44bc82820..7a27b712a3 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodec.java @@ -17,32 +17,36 @@ package org.ehcache.clustered.common.internal.messages; +import org.ehcache.clustered.common.internal.ClusteredEhcacheIdentity; + import java.nio.ByteBuffer; -import java.util.HashSet; -import java.util.Set; +import java.util.UUID; public class ReconnectDataCodec { - private static final byte ENTRY_SIZE = 4; - - public byte[] encode(Set cacheIds, int length) { - ByteBuffer reconnectData = ByteBuffer.allocate(2 * length + cacheIds.size() * ENTRY_SIZE); - for (String cacheId : cacheIds) { - reconnectData.putInt(cacheId.length()); - CodecUtil.putStringAsCharArray(reconnectData, cacheId); + public byte[] encode(ReconnectData reconnectData) { + ByteBuffer encodedMsg = ByteBuffer.allocate(reconnectData.getDataLength()); + encodedMsg.put(ClusteredEhcacheIdentity.serialize(reconnectData.getClientId())); + for (String cacheId : reconnectData.getAllCaches()) { + encodedMsg.putInt(cacheId.length()); + CodecUtil.putStringAsCharArray(encodedMsg, cacheId); } - return reconnectData.array(); + return encodedMsg.array(); } - public Set decode(byte[] payload) { - Set cacheIds = new HashSet(); + public ReconnectData decode(byte[] payload) { + ReconnectData reconnectData = new ReconnectData(); ByteBuffer byteBuffer = ByteBuffer.wrap(payload); + long msb = byteBuffer.getLong(); + long lsb = byteBuffer.getLong(); + reconnectData.setClientId(new UUID(msb, lsb)); + while (byteBuffer.hasRemaining()) { int cacheIdSize = byteBuffer.getInt(); - cacheIds.add(CodecUtil.getStringFromBuffer(byteBuffer, cacheIdSize)); + reconnectData.add(CodecUtil.getStringFromBuffer(byteBuffer, cacheIdSize)); } - return cacheIds; + return reconnectData; } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java index 67ee534700..4d9736a9b2 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java @@ -33,6 +33,7 @@ class ServerStoreOpCodec { private static final byte KEY_SIZE = 8; private static final byte CHAIN_LEN_SIZE = 4; private static final byte INVALIDATION_ID_LEN_SIZE = 4; + private static final byte MESSAGE_ID_SIZE = 8; private final ChainCodec chainCodec; @@ -54,26 +55,26 @@ public byte[] encode(ServerStoreOpMessage message) { return encodedMsg.array(); case APPEND: AppendMessage appendMessage = (AppendMessage)message; - encodedMsg = ByteBuffer.allocate(STORE_OP_CODE_SIZE + CACHE_ID_LEN_SIZE + KEY_SIZE + 2 * cacheIdLen + appendMessage + encodedMsg = ByteBuffer.allocate(STORE_OP_CODE_SIZE + CACHE_ID_LEN_SIZE + KEY_SIZE + MESSAGE_ID_SIZE + 2 * cacheIdLen + appendMessage .getPayload() .remaining()); - putCacheIdKeyAndOpCode(encodedMsg, appendMessage.getCacheId(), appendMessage.getKey(), appendMessage.getOpCode()); + putCacheIdKeyAndOpCode(encodedMsg, appendMessage, appendMessage.getKey()); encodedMsg.put(appendMessage.getPayload()); return encodedMsg.array(); case GET_AND_APPEND: GetAndAppendMessage getAndAppendMessage = (GetAndAppendMessage)message; - encodedMsg = ByteBuffer.allocate(STORE_OP_CODE_SIZE + CACHE_ID_LEN_SIZE + KEY_SIZE + 2 * cacheIdLen + + encodedMsg = ByteBuffer.allocate(STORE_OP_CODE_SIZE + CACHE_ID_LEN_SIZE + KEY_SIZE + MESSAGE_ID_SIZE + 2 * cacheIdLen + getAndAppendMessage.getPayload().remaining()); - putCacheIdKeyAndOpCode(encodedMsg, getAndAppendMessage.getCacheId(), getAndAppendMessage.getKey(), getAndAppendMessage.getOpCode()); + putCacheIdKeyAndOpCode(encodedMsg, getAndAppendMessage, getAndAppendMessage.getKey()); encodedMsg.put(getAndAppendMessage.getPayload()); return encodedMsg.array(); case REPLACE: ReplaceAtHeadMessage replaceAtHeadMessage = (ReplaceAtHeadMessage)message; byte[] encodedExpectedChain = chainCodec.encode(replaceAtHeadMessage.getExpect()); byte[] encodedUpdatedChain = chainCodec.encode(replaceAtHeadMessage.getUpdate()); - encodedMsg = ByteBuffer.allocate(STORE_OP_CODE_SIZE + CACHE_ID_LEN_SIZE + KEY_SIZE + 2 * cacheIdLen + + encodedMsg = ByteBuffer.allocate(STORE_OP_CODE_SIZE + CACHE_ID_LEN_SIZE + KEY_SIZE + MESSAGE_ID_SIZE + 2 * cacheIdLen + CHAIN_LEN_SIZE + encodedExpectedChain.length + encodedUpdatedChain.length); - putCacheIdKeyAndOpCode(encodedMsg, replaceAtHeadMessage.getCacheId(), replaceAtHeadMessage.getKey(), replaceAtHeadMessage.getOpCode()); + putCacheIdKeyAndOpCode(encodedMsg, replaceAtHeadMessage, replaceAtHeadMessage.getKey()); encodedMsg.putInt(encodedExpectedChain.length); encodedMsg.put(encodedExpectedChain); encodedMsg.put(encodedUpdatedChain); @@ -87,8 +88,9 @@ public byte[] encode(ServerStoreOpMessage message) { return encodedMsg.array(); case CLEAR: ClearMessage clearMessage = (ClearMessage)message; - encodedMsg = ByteBuffer.allocate(STORE_OP_CODE_SIZE + 2 * cacheIdLen); + encodedMsg = ByteBuffer.allocate(STORE_OP_CODE_SIZE + MESSAGE_ID_SIZE + 2 * cacheIdLen); encodedMsg.put(clearMessage.getOpCode()); + encodedMsg.putLong(message.getId()); CodecUtil.putStringAsCharArray(encodedMsg, clearMessage.getCacheId()); return encodedMsg.array(); default: @@ -97,10 +99,11 @@ public byte[] encode(ServerStoreOpMessage message) { } // This assumes correct allocation and puts extracts common code - private static void putCacheIdKeyAndOpCode(ByteBuffer byteBuffer, String cacheId, long key, byte opcode) { - byteBuffer.put(opcode); - byteBuffer.putInt(cacheId.length()); - CodecUtil.putStringAsCharArray(byteBuffer, cacheId); + private static void putCacheIdKeyAndOpCode(ByteBuffer byteBuffer, ServerStoreOpMessage message, long key) { + byteBuffer.put(message.getOpCode()); + byteBuffer.putLong(message.getId()); + byteBuffer.putInt(message.getCacheId().length()); + CodecUtil.putStringAsCharArray(byteBuffer, message.getCacheId()); byteBuffer.putLong(key); } @@ -111,21 +114,30 @@ public EhcacheEntityMessage decode(byte[] payload) { long key; String cacheId; + long msgId; + EhcacheEntityMessage decodecMsg; switch (storeOp) { case GET: key = msg.getLong(); cacheId = CodecUtil.getStringFromBuffer(msg, msg.remaining() / 2); return new GetMessage(cacheId, key); case GET_AND_APPEND: + msgId = msg.getLong(); cacheId = readStringFromBufferWithSize(msg); key = msg.getLong(); - return new GetAndAppendMessage(cacheId, key, msg.slice().asReadOnlyBuffer()); + decodecMsg = new GetAndAppendMessage(cacheId, key, msg.slice().asReadOnlyBuffer()); + decodecMsg.setId(msgId); + return decodecMsg; case APPEND: + msgId = msg.getLong(); cacheId = readStringFromBufferWithSize(msg); key = msg.getLong(); - return new AppendMessage(cacheId, key, msg.slice().asReadOnlyBuffer()); + decodecMsg = new AppendMessage(cacheId, key, msg.slice().asReadOnlyBuffer()); + decodecMsg.setId(msgId); + return decodecMsg; case REPLACE: + msgId = msg.getLong(); cacheId = readStringFromBufferWithSize(msg); key = msg.getLong(); int expectChainLen = msg.getInt(); @@ -134,15 +146,20 @@ public EhcacheEntityMessage decode(byte[] payload) { int updateChainLen = msg.remaining(); byte[] encodedUpdateChain = new byte[updateChainLen]; msg.get(encodedUpdateChain); - return new ReplaceAtHeadMessage(cacheId, key, chainCodec.decode(encodedExpectChain), + decodecMsg = new ReplaceAtHeadMessage(cacheId, key, chainCodec.decode(encodedExpectChain), chainCodec.decode(encodedUpdateChain)); + decodecMsg.setId(msgId); + return decodecMsg; case CLIENT_INVALIDATION_ACK: int invalidationId = msg.getInt(); cacheId = CodecUtil.getStringFromBuffer(msg, msg.remaining() / 2); return new ClientInvalidationAck(cacheId, invalidationId); case CLEAR: + msgId = msg.getLong(); cacheId = CodecUtil.getStringFromBuffer(msg, msg.remaining() / 2); - return new ClearMessage(cacheId); + decodecMsg = new ClearMessage(cacheId); + decodecMsg.setId(msgId); + return decodecMsg; default: throw new UnsupportedOperationException("This operation code is not supported : " + opCode); } diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java index ef6fbc72d9..2d56876573 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java @@ -18,6 +18,8 @@ import org.junit.Test; +import java.util.UUID; + import static org.junit.Assert.*; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; @@ -29,6 +31,8 @@ public class EhcacheCodecTest { + private static final UUID clientId = UUID.randomUUID(); + @Test public void encodeMessage() throws Exception { ServerStoreOpCodec serverStoreOpCodec = mock(ServerStoreOpCodec.class); @@ -36,7 +40,7 @@ public void encodeMessage() throws Exception { StateRepositoryOpCodec stateRepositoryOpCodec = mock(StateRepositoryOpCodec.class); EhcacheCodec codec = new EhcacheCodec(serverStoreOpCodec, lifeCycleMessageCodec, stateRepositoryOpCodec, null); - LifecycleMessage.DestroyServerStore lifecycleMessage = new LifecycleMessage.DestroyServerStore("foo"); + LifecycleMessage.DestroyServerStore lifecycleMessage = new LifecycleMessage.DestroyServerStore("foo", clientId); codec.encodeMessage(lifecycleMessage); verify(lifeCycleMessageCodec, only()).encode(any(LifecycleMessage.class)); verify(serverStoreOpCodec, never()).encode(any(ServerStoreOpMessage.class)); diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodecTest.java index 32520467e5..922487a8ed 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodecTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodecTest.java @@ -16,31 +16,33 @@ package org.ehcache.clustered.common.internal.messages; -import org.hamcrest.Matchers; import org.junit.Test; -import java.util.HashSet; -import java.util.Set; +import java.util.UUID; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; import static org.junit.Assert.assertThat; public class ReconnectDataCodecTest { @Test public void testCodec() { - Set cacheIds = new HashSet(); - cacheIds.add("test"); - cacheIds.add("test1"); - cacheIds.add("test2"); + ReconnectData reconnectData = new ReconnectData(); + reconnectData.add("test"); + reconnectData.add("test1"); + reconnectData.add("test2"); - ReconnectDataCodec dataCodec = new ReconnectDataCodec(); + reconnectData.setClientId(UUID.randomUUID()); - Set decoded = dataCodec.decode(dataCodec.encode(cacheIds, 14)); + ReconnectDataCodec dataCodec = new ReconnectDataCodec(); - assertThat(decoded, Matchers.hasSize(3)); - assertThat(decoded, containsInAnyOrder("test", "test1", "test2")); + ReconnectData decoded = dataCodec.decode(dataCodec.encode(reconnectData)); + assertThat(decoded, notNullValue()); + assertThat(decoded.getClientId(), is(reconnectData.getClientId())); + assertThat(decoded.getAllCaches(), containsInAnyOrder("test", "test1", "test2")); } } diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodecTest.java index a656adfbd4..899dcecb04 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodecTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodecTest.java @@ -18,15 +18,13 @@ import org.junit.Test; + import static org.ehcache.clustered.common.internal.store.Util.createPayload; import static org.ehcache.clustered.common.internal.store.Util.getChain; import static org.ehcache.clustered.common.internal.store.Util.readPayLoad; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; -/** - * - */ public class ServerStoreOpCodecTest { private static final ServerStoreMessageFactory MESSAGE_FACTORY = new ServerStoreMessageFactory("test"); @@ -43,6 +41,7 @@ public void testAppendMessageCodec() { assertThat(decodedAppendMessage.getCacheId(), is("test")); assertThat(decodedAppendMessage.getKey(), is(1L)); assertThat(readPayLoad(decodedAppendMessage.getPayload()), is(1L)); + assertThat(decodedAppendMessage.getId(), is(-1L)); } @Test @@ -66,6 +65,7 @@ public void testGetAndAppendMessageCodec() { assertThat(decodedGetAndAppendMessage.getCacheId(), is("test")); assertThat(decodedGetAndAppendMessage.getKey(), is(10L)); assertThat(readPayLoad(decodedGetAndAppendMessage.getPayload()), is(10L)); + assertThat(decodedGetAndAppendMessage.getId(), is(-1L)); } @Test @@ -79,6 +79,7 @@ public void testReplaceAtHeadMessageCodec() { assertThat(decodedReplaceAtHeadMessage.getCacheId(), is("test")); assertThat(decodedReplaceAtHeadMessage.getKey(), is(10L)); + assertThat(decodedReplaceAtHeadMessage.getId(), is(-1L)); Util.assertChainHas(decodedReplaceAtHeadMessage.getExpect(), 10L, 100L, 1000L); Util.assertChainHas(decodedReplaceAtHeadMessage.getUpdate(), 2000L); } @@ -89,6 +90,7 @@ public void testClearMessageCodec() throws Exception { byte[] encodedBytes = STORE_OP_CODEC.encode((ServerStoreOpMessage)clearMessage); EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(encodedBytes); assertThat(((ServerStoreOpMessage)decodedMsg).getCacheId(), is("test")); + assertThat(decodedMsg.getId(), is(-1L)); } @Test diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java index ab0782169e..67aeaa4aae 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java @@ -27,9 +27,6 @@ import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNot.not; -/** - * @author Ludovic Orban - */ public class ServerStoreOpMessageTest { @Test diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java index b937b7a34b..e5bf01ae28 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java @@ -19,6 +19,7 @@ import java.io.IOException; import java.util.Collections; import java.util.Map; +import java.util.UUID; import org.ehcache.clustered.client.internal.EhcacheClientEntityFactory; import org.ehcache.clustered.client.internal.EhcacheEntityCreationException; @@ -44,6 +45,7 @@ public class EhcacheClientEntityFactoryIntegrationTest { private static final Map EMPTY_RESOURCE_MAP = Collections.emptyMap(); + private static final UUID CLIENT_ID = UUID.randomUUID(); private static final String RESOURCE_CONFIG = "" @@ -70,14 +72,14 @@ public static void closeConnection() throws IOException { @Test public void testCreate() throws Exception { - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); factory.create("testCreate", new ServerSideConfiguration(EMPTY_RESOURCE_MAP)); } @Test public void testCreateWhenExisting() throws Exception { - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); factory.create("testCreateWhenExisting", new ServerSideConfiguration(EMPTY_RESOURCE_MAP)); try { factory.create("testCreateWhenExisting", @@ -90,7 +92,7 @@ public void testCreateWhenExisting() throws Exception { @Test public void testCreateWithBadConfigCleansUp() throws Exception { - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); try { factory.create("testCreateWithBadConfigCleansUp", new ServerSideConfiguration("flargle", EMPTY_RESOURCE_MAP)); @@ -107,7 +109,7 @@ public void testCreateWithBadConfigCleansUp() throws Exception { @Test public void testRetrieveWithGoodConfig() throws Exception { - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); factory.create("testRetrieveWithGoodConfig", new ServerSideConfiguration(Collections.singletonMap("foo", new Pool(43L, "primary")))); assertThat(factory.retrieve("testRetrieveWithGoodConfig", @@ -116,7 +118,7 @@ public void testRetrieveWithGoodConfig() throws Exception { @Test public void testRetrieveWithBadConfig() throws Exception { - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); factory.create("testRetrieveWithBadConfig", new ServerSideConfiguration(Collections.singletonMap("foo", new Pool(42L, "primary")))); try { @@ -130,7 +132,7 @@ public void testRetrieveWithBadConfig() throws Exception { @Test public void testRetrieveWhenNotExisting() throws Exception { - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); try { factory.retrieve("testRetrieveWhenNotExisting", null); fail("Expected EntityNotFoundException"); @@ -141,14 +143,14 @@ public void testRetrieveWhenNotExisting() throws Exception { @Test public void testDestroy() throws Exception { - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); factory.create("testDestroy", new ServerSideConfiguration(Collections.emptyMap())); factory.destroy("testDestroy"); } @Test public void testDestroyWhenNotExisting() throws Exception { - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); try { factory.destroy("testDestroyWhenNotExisting"); fail("Expected EhcacheEntityNotFoundException"); @@ -159,7 +161,7 @@ public void testDestroyWhenNotExisting() throws Exception { @Test public void testAbandonLeadershipWhenNotOwning() throws Exception { - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); try { factory.abandonLeadership("testAbandonLeadershipWhenNotOwning"); fail("Expected IllegalMonitorStateException"); @@ -170,18 +172,18 @@ public void testAbandonLeadershipWhenNotOwning() throws Exception { @Test public void testAcquireLeadershipWhenAlone() throws Exception { - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); assertThat(factory.acquireLeadership("testAcquireLeadershipWhenAlone"), is(true)); } @Test public void testAcquireLeadershipWhenTaken() throws Exception { - EhcacheClientEntityFactory factoryA = new EhcacheClientEntityFactory(CONNECTION); + EhcacheClientEntityFactory factoryA = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); assertThat(factoryA.acquireLeadership("testAcquireLeadershipWhenTaken"), is(true)); Connection clientB = CLUSTER.newConnection(); try { - EhcacheClientEntityFactory factoryB = new EhcacheClientEntityFactory(clientB); + EhcacheClientEntityFactory factoryB = new EhcacheClientEntityFactory(clientB, UUID.randomUUID()); assertThat(factoryB.acquireLeadership("testAcquireLeadershipWhenTaken"), is(false)); } finally { clientB.close(); @@ -190,13 +192,13 @@ public void testAcquireLeadershipWhenTaken() throws Exception { @Test public void testAcquireLeadershipAfterAbandoned() throws Exception { - EhcacheClientEntityFactory factoryA = new EhcacheClientEntityFactory(CONNECTION); + EhcacheClientEntityFactory factoryA = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); factoryA.acquireLeadership("testAcquireLeadershipAfterAbandoned"); factoryA.abandonLeadership("testAcquireLeadershipAfterAbandoned"); Connection clientB = CLUSTER.newConnection(); try { - EhcacheClientEntityFactory factoryB = new EhcacheClientEntityFactory(clientB); + EhcacheClientEntityFactory factoryB = new EhcacheClientEntityFactory(clientB, UUID.randomUUID()); assertThat(factoryB.acquireLeadership("testAcquireLeadershipAfterAbandoned"), is(true)); } finally { clientB.close(); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 82a0ba342f..cd7fd7e8bf 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -43,6 +43,7 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponseFactory; import org.ehcache.clustered.common.internal.messages.LifecycleMessage; +import org.ehcache.clustered.common.internal.messages.ReconnectData; import org.ehcache.clustered.common.internal.messages.ReconnectDataCodec; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; @@ -257,7 +258,8 @@ public void handleReconnect(ClientDescriptor clientDescriptor, byte[] extendedRe throw new AssertionError("Client "+ clientDescriptor +" trying to reconnect is not connected to entity"); } clientState.attach(); - Set cacheIds = reconnectDataCodec.decode(extendedReconnectData); + ReconnectData reconnectData = reconnectDataCodec.decode(extendedReconnectData); + Set cacheIds = reconnectData.getAllCaches(); for (final String cacheId : cacheIds) { ServerStoreImpl serverStore = ehcacheStateService.getStore(cacheId); if (serverStore == null) { diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index 822d13256b..429385006c 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -77,6 +77,7 @@ */ public class EhcacheActiveEntityTest { + private static final UUID CLIENT_ID = UUID.randomUUID(); private static final byte[] ENTITY_ID = ClusteredEhcacheIdentity.serialize(UUID.randomUUID()); private static final LifeCycleMessageFactory MESSAGE_FACTORY = new LifeCycleMessageFactory(); diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/repo/StateRepositoryManagerTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/repo/StateRepositoryManagerTest.java index c6f59827e9..7e551be742 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/repo/StateRepositoryManagerTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/repo/StateRepositoryManagerTest.java @@ -16,7 +16,6 @@ package org.ehcache.clustered.server.repo; -import org.ehcache.clustered.common.internal.exceptions.LifecycleException; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; import org.junit.Test; From 06a07156bd91357755f73b8ce2a105c63f13e9b4 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Mon, 12 Sep 2016 02:31:17 +0530 Subject: [PATCH 030/218] MessageTracker for de duplication #1208 --- ...usteredStateRepositoryReplicationTest.java | 1 - .../messages/ServerStoreOpMessageTest.java | 3 + clustered/server/build.gradle | 7 + .../server/state/MessageTracker.java | 116 +++++++++++++ .../server/EhcacheActiveEntityTest.java | 1 - .../server/state/MessageTrackerTest.java | 159 ++++++++++++++++++ 6 files changed, 285 insertions(+), 2 deletions(-) create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/state/MessageTracker.java create mode 100644 clustered/server/src/test/java/org/ehcache/clustered/server/state/MessageTrackerTest.java diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepositoryReplicationTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepositoryReplicationTest.java index 346ad67705..01327283b7 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepositoryReplicationTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepositoryReplicationTest.java @@ -37,7 +37,6 @@ import java.lang.reflect.Field; import java.net.URI; -import java.util.UUID; import java.util.concurrent.ConcurrentMap; import static org.ehcache.clustered.client.internal.UnitTestConnectionService.getOffheapResourcesType; diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java index 67aeaa4aae..ab0782169e 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java @@ -27,6 +27,9 @@ import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNot.not; +/** + * @author Ludovic Orban + */ public class ServerStoreOpMessageTest { @Test diff --git a/clustered/server/build.gradle b/clustered/server/build.gradle index c7f9df290b..b4cf44d678 100644 --- a/clustered/server/build.gradle +++ b/clustered/server/build.gradle @@ -29,6 +29,13 @@ dependencies { provided "org.terracotta:standard-cluster-services:$parent.terracottaApisVersion" } +sourceCompatibility = 1.8 +targetCompatibility = 1.8 + +checkstyle { + toolVersion = '7.1' +} + sourceSets { main { compileClasspath += configurations.provided diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/MessageTracker.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/MessageTracker.java new file mode 100644 index 0000000000..4ddec64ae0 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/MessageTracker.java @@ -0,0 +1,116 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.state; + +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.locks.ReentrantLock; + +class MessageTracker { + + private final ConcurrentHashMap inProgressMessages = new ConcurrentHashMap<>(); + + private long lowerWaterMark = -1L; //Always to be updated under lock below + private final AtomicLong higerWaterMark = new AtomicLong(-1L); + private final ReentrantLock lwmLock = new ReentrantLock(); + + /** + * This method is only meant to be called by the Active Entity. + * This needs to be thread safe. + * This tells whether the message should be applied or not + * As and when messages are checked for deduplication, which is only + * done on newly promoted active, the non duplicate ones are cleared from + * inProgressMessages. + * + * @param msgId + * @return whether the entity should apply the message or not + */ + boolean shouldApply(long msgId) { + if (msgId < lowerWaterMark) { + return false; + } + if (msgId > higerWaterMark.get()) { + return true; + } + final AtomicBoolean shouldApply = new AtomicBoolean(false); + inProgressMessages.computeIfPresent(msgId, (id, state) -> { + if (state != true) { + shouldApply.set(true); + } + return null; + }); + return shouldApply.get(); + } + + /** + * Only to be invoked on Passive Entity + * @param msgId + */ + void track(long msgId) { + inProgressMessages.put(msgId, false); + updateHigherWaterMark(msgId); + } + + /** + * Only to be invoked on Passive Entity + * Assumes there are no message loss & + * message ids are ever increasing + * @param msgId + */ + void applied(long msgId) { + inProgressMessages.computeIfPresent(msgId, ((id, state) -> state = true)); + if (lwmLock.tryLock()) { + try { + for (long i = lowerWaterMark + 1; i<= higerWaterMark.get(); i++) { + final AtomicBoolean removed = new AtomicBoolean(false); + inProgressMessages.computeIfPresent(i, (id, state) -> { + if (state == true) { + removed.set(true); + return null; + } + return state; + }); + if (removed.get()) { + lowerWaterMark ++; + } else { + break; + } + } + } finally { + lwmLock.unlock(); + } + } + + } + + boolean isEmpty() { + return inProgressMessages.isEmpty(); + } + + private void updateHigherWaterMark(long msgId) { + if (msgId < higerWaterMark.get()) { + return; + } + while(true) { + long old = higerWaterMark.get(); + if (higerWaterMark.compareAndSet(old, msgId)) { + break; + } + } + } +} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index 429385006c..822d13256b 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -77,7 +77,6 @@ */ public class EhcacheActiveEntityTest { - private static final UUID CLIENT_ID = UUID.randomUUID(); private static final byte[] ENTITY_ID = ClusteredEhcacheIdentity.serialize(UUID.randomUUID()); private static final LifeCycleMessageFactory MESSAGE_FACTORY = new LifeCycleMessageFactory(); diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/state/MessageTrackerTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/state/MessageTrackerTest.java new file mode 100644 index 0000000000..e1d33a5ed6 --- /dev/null +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/state/MessageTrackerTest.java @@ -0,0 +1,159 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.state; + +import org.junit.Test; + +import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Random; +import java.util.Set; +import java.util.concurrent.Callable; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.ForkJoinTask; +import java.util.concurrent.Future; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.stream.LongStream; + +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertThat; + +public class MessageTrackerTest { + + @Test + public void testMessageTrackAndApplySingleThreaded() throws Exception { + + long[] input = getInputFor(0, 20); + + MessageTracker messageTracker = new MessageTracker(); + + for (int i = 0; i < input.length; i++) { + messageTracker.track(input[i]); + messageTracker.applied(input[i]); + } + + assertLowerWaterMark(messageTracker, 19); + + assertThat(messageTracker.isEmpty(), is(true)); + + LongStream.of(input).forEach(msg -> assertThat(messageTracker.shouldApply(msg), is(false))); + + } + + @Test + public void testMessageTrackAndApplyMultiThreaded() throws Exception { + + long[] input = getInputFor(0, 1000); + final MessageTracker messageTracker = new MessageTracker(); + + ExecutorService executorService = Executors.newWorkStealingPool(); + + List> results = new ArrayList<>(); + + for (int i = 0; i < 50 ; i++) { + int start = 20*i; + int end = start + 20; + results.add(executorService.submit(() -> { + for (int j = start; j < end; j++) { + messageTracker.track(input[j]); + messageTracker.applied(input[j]); + } + return null; + })); + } + + for (Future f : results) { + f.get(); + } + + assertLowerWaterMark(messageTracker, 999); + + assertThat(messageTracker.isEmpty(), is(true)); + + LongStream.of(input).forEach(msg -> assertThat(messageTracker.shouldApply(msg), is(false))); + + } + + @Test + public void testDuplicateMessagesForTrackedMessages() throws Exception { + + Random random = new Random(); + long[] input = getInputFor(0, 1000); + final MessageTracker messageTracker = new MessageTracker(); + + Set nonAppliedMsgs = Collections.newSetFromMap(new ConcurrentHashMap()); + + ExecutorService executorService = Executors.newWorkStealingPool(); + + List> results = new ArrayList<>(); + + for (int i = 0; i < 50 ; i++) { + int start = 20*i; + int end = start + 20; + int randomBreakingPoint = end - 1 - random.nextInt(5); + results.add(executorService.submit(() -> { + for (int j = start; j < end; j++) { + messageTracker.track(input[j]); + if (j < randomBreakingPoint) { + messageTracker.applied(input[j]); + } else { + nonAppliedMsgs.add(input[j]); + } + } + return null; + })); + } + + for (Future f : results) { + f.get(); + } + + assertThat(messageTracker.isEmpty(), is(false)); + + nonAppliedMsgs.forEach(x -> assertThat(messageTracker.shouldApply(x), is(true))); + +//TODO: assertThat(messageTracker.isEmpty(), is(true)); + + LongStream.of(input).filter(x -> !nonAppliedMsgs.contains(x)).forEach(x -> assertThat(messageTracker.shouldApply(x), is(false))); + + } + + /** + * + * @param start start of range + * @param end exclusive + * @return + */ + private static long[] getInputFor(int start, int end) { + Random random = new Random(); + return random.longs(start, end).unordered().distinct().limit(end - start).toArray(); + } + + private static void assertLowerWaterMark(MessageTracker messageTracker, long lwm) throws NoSuchFieldException, IllegalAccessException { + Field entity = messageTracker.getClass().getDeclaredField("lowerWaterMark"); + entity.setAccessible(true); + assertThat((Long)entity.get(messageTracker), is(lwm)); + } + +} From 3a8a7cc597369c92f84dd62666dae4e4759fffef Mon Sep 17 00:00:00 2001 From: Abhilash Date: Mon, 12 Sep 2016 18:08:41 +0530 Subject: [PATCH 031/218] Integrate message tracking with entity #1208 --- clustered/client/build.gradle | 9 +++ .../client/internal/EhcacheClientEntity.java | 2 +- .../service/DefaultClusteringService.java | 2 +- .../exceptions/InvalidClientIdException.java | 32 ++++++++ .../messages/EhcacheEntityMessage.java | 12 +-- .../messages/LifeCycleMessageCodec.java | 4 + .../internal/messages/LifecycleMessage.java | 14 +++- .../messages/ServerStoreOpMessage.java | 22 ++++++ .../messages/StateRepositoryOpMessage.java | 22 ++++++ clustered/server/build.gradle | 15 ++++ .../clustered/server/EhcacheActiveEntity.java | 39 ++++++++-- .../server/EhcachePassiveEntity.java | 23 +++++- .../server/EhcacheStateServiceImpl.java | 8 ++ .../server/state/ClientMessageTracker.java | 76 +++++++++++++++++++ .../server/state/EhcacheStateService.java | 2 + .../server/state/MessageTracker.java | 2 +- .../server/EhcacheActiveEntityTest.java | 60 +++++++++++++-- .../server/EhcachePassiveEntityTest.java | 7 ++ .../server/state/MessageTrackerTest.java | 2 +- 19 files changed, 326 insertions(+), 27 deletions(-) create mode 100644 clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidClientIdException.java create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java diff --git a/clustered/client/build.gradle b/clustered/client/build.gradle index 9e1c16d5d3..2eed57c888 100644 --- a/clustered/client/build.gradle +++ b/clustered/client/build.gradle @@ -31,3 +31,12 @@ dependencies { testCompile "org.terracotta:entity-test-lib:$parent.entityTestLibVersion" testCompile "org.terracotta:passthrough-server:$parent.terracottaPassthroughTestingVersion" } + +def java8 = { + JavaVersion.current().isJava8Compatible() +} + +compileTestJava { + options.fork = true; + options.forkOptions.executable = MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'javac') +} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java index 3f6c564586..685715eb0e 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java @@ -284,7 +284,7 @@ public InvokeFuture invokeAsync(EhcacheEntityMessage mess throw new IllegalStateException("Client ID cannot be null"); } if (replicate) { - message.setId(sequenceGenerator.incrementAndGet()); + message.setId(sequenceGenerator.getAndIncrement()); //TODO: remove the replicate call with latest passthrough upgrade invoke = endpoint.beginInvoke().message(message).replicate(true).invoke(); } else { diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java index 1f81352a15..935b194e52 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java @@ -166,7 +166,7 @@ public void start(final ServiceProvider serviceProvider) { } private void createEntityFactory() { - entityFactory = new EhcacheClientEntityFactory(clusterConnection, operationTimeouts); + entityFactory = new EhcacheClientEntityFactory(clusterConnection, clientId, operationTimeouts); } private void initClusterConnection() { diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidClientIdException.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidClientIdException.java new file mode 100644 index 0000000000..585a2297b8 --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidClientIdException.java @@ -0,0 +1,32 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.exceptions; + +/** + * Thrown when Active Entity fails to validate the unique client Id sent by client + */ +public class InvalidClientIdException extends ClusterException { + + public InvalidClientIdException(String message) { + super(message); + } + + @Override + public InvalidClientIdException withClientStackTrace() { + return new InvalidClientIdException(this.getMessage()); + } +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java index c0403bfbed..2036351ee9 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java @@ -27,8 +27,6 @@ public abstract class EhcacheEntityMessage implements EntityMessage { public static final long NOT_REPLICATED = -1; - private long id = NOT_REPLICATED; - /** * These types represent the top level Ehcache entity message types. * Each of these top level types can have subtypes of messages. @@ -69,12 +67,10 @@ public String toString() { return getType().toString(); } - public void setId(long id) { - this.id = id; - } + public abstract void setId(long id); - public long getId() { - return this.id; - } + public abstract long getId(); + + public abstract UUID getClientId(); } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java index 4c98eae64f..9b3de64990 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java @@ -25,6 +25,10 @@ class LifeCycleMessageCodec { private static final byte OPCODE_SIZE = 1; public byte[] encode(LifecycleMessage message) { + //For configure message id serves as message creation timestamp + if (message instanceof LifecycleMessage.ConfigureStoreManager) { + message.setId(System.nanoTime()); + } byte[] encodedMsg = Util.marshall(message); ByteBuffer buffer = ByteBuffer.allocate(OPCODE_SIZE + encodedMsg.length); buffer.put(message.getOpCode()); diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java index fc07cdcb4b..d42658592b 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java @@ -34,7 +34,9 @@ public enum LifeCycleOp { } protected UUID clientId; + protected long id = NOT_REPLICATED; + @Override public UUID getClientId() { if (clientId == null) { throw new AssertionError("Client Id cannot be null for lifecycle messages"); @@ -42,6 +44,16 @@ public UUID getClientId() { return this.clientId; } + @Override + public long getId() { + return this.id; + } + + @Override + public void setId(long id) { + this.id = id; + } + @Override public byte getOpCode() { return getType().getCode(); @@ -105,7 +117,7 @@ public abstract static class BaseServerStore extends LifecycleMessage { private final String name; private final ServerStoreConfiguration storeConfiguration; - protected BaseServerStore(String name, ServerStoreConfiguration storeConfiguration, UUID clientId) { + BaseServerStore(String name, ServerStoreConfiguration storeConfiguration, UUID clientId) { this.name = name; this.storeConfiguration = storeConfiguration; this.clientId = clientId; diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java index 44eed62261..86da8c8248 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java @@ -19,6 +19,7 @@ import org.ehcache.clustered.common.internal.store.Chain; import java.nio.ByteBuffer; +import java.util.UUID; public abstract class ServerStoreOpMessage extends EhcacheEntityMessage implements ConcurrentEntityMessage { public enum ServerStoreOp { @@ -62,6 +63,27 @@ public static ServerStoreOp getServerStoreOp(byte storeOpCode) { } + protected UUID clientId = null; //TODO: #1211 + protected long id = NOT_REPLICATED; + + @Override + public UUID getClientId() { + if (clientId == null) { + throw new AssertionError("Client Id cannot be null for lifecycle messages"); + } + return this.clientId; + } + + @Override + public long getId() { + return this.id; + } + + @Override + public void setId(long id) { + this.id = id; + } + private final String cacheId; private ServerStoreOpMessage(String cacheId) { diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java index 141a1082bc..f8632b33e4 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java @@ -17,6 +17,7 @@ package org.ehcache.clustered.common.internal.messages; import java.io.Serializable; +import java.util.UUID; public abstract class StateRepositoryOpMessage extends EhcacheEntityMessage implements Serializable { @@ -29,11 +30,32 @@ public enum StateRepositoryOp { private final String cacheId; private final String mapId; + protected UUID clientId = null; //TODO: #1211 + protected long id = NOT_REPLICATED; + private StateRepositoryOpMessage(String cacheId, String mapId) { this.cacheId = cacheId; this.mapId = mapId; } + @Override + public UUID getClientId() { + if (clientId == null) { + throw new AssertionError("Client Id cannot be null for lifecycle messages"); + } + return this.clientId; + } + + @Override + public long getId() { + return this.id; + } + + @Override + public void setId(long id) { + this.id = id; + } + public String getCacheId() { return cacheId; } diff --git a/clustered/server/build.gradle b/clustered/server/build.gradle index b4cf44d678..2d28c97450 100644 --- a/clustered/server/build.gradle +++ b/clustered/server/build.gradle @@ -29,6 +29,21 @@ dependencies { provided "org.terracotta:standard-cluster-services:$parent.terracottaApisVersion" } +def java8 = { + JavaVersion.current().isJava8Compatible() +} + + +compileJava { + options.fork = true; + options.forkOptions.executable = MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'javac') +} + +compileTestJava { + options.fork = true; + options.forkOptions.executable = MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'javac') +} + sourceCompatibility = 1.8 targetCompatibility = 1.8 diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index cd7fd7e8bf..57b605f451 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -25,6 +25,7 @@ import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import org.ehcache.clustered.common.Consistency; @@ -33,6 +34,7 @@ import org.ehcache.clustered.common.PoolAllocation; import org.ehcache.clustered.common.internal.exceptions.ClusterException; import org.ehcache.clustered.common.internal.exceptions.IllegalMessageException; +import org.ehcache.clustered.common.internal.exceptions.InvalidClientIdException; import org.ehcache.clustered.common.internal.exceptions.InvalidOperationException; import org.ehcache.clustered.common.internal.exceptions.InvalidStoreException; import org.ehcache.clustered.common.internal.exceptions.LifecycleException; @@ -94,6 +96,7 @@ class EhcacheActiveEntity implements ActiveServerEntity> storeClientMap = new ConcurrentHashMap>(); + private final ConcurrentHashMap clientIdMap = new ConcurrentHashMap<>(); private final ReconnectDataCodec reconnectDataCodec = new ReconnectDataCodec(); private final ServerStoreCompatibility storeCompatibility = new ServerStoreCompatibility(); private final EhcacheEntityResponseFactory responseFactory; @@ -259,6 +262,7 @@ public void handleReconnect(ClientDescriptor clientDescriptor, byte[] extendedRe } clientState.attach(); ReconnectData reconnectData = reconnectDataCodec.decode(extendedReconnectData); + clientIdMap.put(clientDescriptor, reconnectData.getClientId()); Set cacheIds = reconnectData.getAllCaches(); for (final String cacheId : cacheIds) { ServerStoreImpl serverStore = ehcacheStateService.getStore(cacheId); @@ -537,7 +541,9 @@ public void destroy() { */ private void configure(ClientDescriptor clientDescriptor, ConfigureStoreManager message) throws ClusterException { validateClientConnected(clientDescriptor); - ehcacheStateService.configure(message); + if (ehcacheStateService.getClientMessageTracker().shouldConfigure(message.getClientId(), message.getId())) { + ehcacheStateService.configure(message); + } this.clientStateMap.get(clientDescriptor).attach(); } @@ -551,6 +557,10 @@ private void configure(ClientDescriptor clientDescriptor, ConfigureStoreManager */ private void validate(ClientDescriptor clientDescriptor, ValidateStoreManager message) throws ClusterException { validateClientConnected(clientDescriptor); + if (clientIdMap.get(clientDescriptor) != null && clientIdMap.get(clientDescriptor).equals(message.getClientId())) { + throw new InvalidClientIdException("Client ID : " + message.getClientId() + " is already being tracked by Active"); + } + clientIdMap.put(clientDescriptor, message.getClientId()); ehcacheStateService.validate(message); this.clientStateMap.get(clientDescriptor).attach(); } @@ -578,14 +588,20 @@ private void createServerStore(ClientDescriptor clientDescriptor, CreateServerSt if(createServerStore.getStoreConfiguration().getPoolAllocation() instanceof PoolAllocation.Unknown) { throw new LifecycleException("Clustered tier can't be created with an Unknown resource pool"); } - + boolean isDuplicate = isLifeCycleMessageDuplicate(createServerStore); final String name = createServerStore.getName(); // client cache identifier/name + ServerStoreImpl serverStore; + if (!isDuplicate) { - LOGGER.info("Client {} creating new clustered tier '{}'", clientDescriptor, name); + LOGGER.info("Client {} creating new clustered tier '{}'", clientDescriptor, name); - ServerStoreConfiguration storeConfiguration = createServerStore.getStoreConfiguration(); + ServerStoreConfiguration storeConfiguration = createServerStore.getStoreConfiguration(); + + serverStore = ehcacheStateService.createStore(name, storeConfiguration); + } else { + serverStore = ehcacheStateService.getStore(name); + } - ServerStoreImpl serverStore = ehcacheStateService.createStore(name, storeConfiguration); serverStore.setEvictionListener(new ServerStoreEvictionListener() { @Override public void onEviction(long key) { @@ -668,11 +684,20 @@ private void destroyServerStore(ClientDescriptor clientDescriptor, DestroyServer throw new ResourceBusyException("Cannot destroy clustered tier '" + name + "': in use by " + clients.size() + " other client(s)"); } - LOGGER.info("Client {} destroying clustered tier '{}'", clientDescriptor, name); - ehcacheStateService.destroyServerStore(name); + boolean isDuplicate = isLifeCycleMessageDuplicate(destroyServerStore); + + if (!isDuplicate) { + LOGGER.info("Client {} destroying clustered tier '{}'", clientDescriptor, name); + ehcacheStateService.destroyServerStore(name); + } + storeClientMap.remove(name); } + private boolean isLifeCycleMessageDuplicate(LifecycleMessage message) { + return ehcacheStateService.getClientMessageTracker().isDuplicate(message.getId(), message.getClientId()); + } + /** * Establishes a registration of a client against a store. *

diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index d271e4bc19..71d5f15292 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -31,6 +31,7 @@ import org.ehcache.clustered.common.internal.messages.LifecycleMessage.DestroyServerStore; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; +import org.ehcache.clustered.server.state.ClientMessageTracker; import org.ehcache.clustered.server.state.EhcacheStateService; import org.ehcache.clustered.server.state.config.EhcacheStateServiceConfig; import org.slf4j.Logger; @@ -127,10 +128,10 @@ private void invokeServerStoreOperation(ServerStoreOpMessage message) throws Clu } } - private void invokeLifeCycleOperation(LifecycleMessage message) throws ClusterException{ + private void invokeLifeCycleOperation(LifecycleMessage message) throws ClusterException { switch (message.operation()) { case CONFIGURE: - ehcacheStateService.configure((ConfigureStoreManager) message); + configure((ConfigureStoreManager) message); break; case CREATE_SERVER_STORE: createServerStore((CreateServerStore) message); @@ -143,6 +144,20 @@ private void invokeLifeCycleOperation(LifecycleMessage message) throws ClusterEx } } + private void configure(ConfigureStoreManager message) throws ClusterException { + ehcacheStateService.configure(message); + ehcacheStateService.getClientMessageTracker().setEntityConfiguredStamp(message.getClientId(), message.getId()); + } + + private void trackAndApplyMessage(LifecycleMessage message) { + ClientMessageTracker clientMessageTracker = ehcacheStateService.getClientMessageTracker(); + if (!clientMessageTracker.isAdded(message.getClientId())) { + clientMessageTracker.add(message.getClientId()); + } + clientMessageTracker.track(message.getId(), message.getClientId()); + clientMessageTracker.applied(message.getId(), message.getClientId()); + } + private void createServerStore(CreateServerStore createServerStore) throws ClusterException { if (!ehcacheStateService.isConfigured()) { throw new LifecycleException("Clustered Tier Manager is not configured"); @@ -151,6 +166,8 @@ private void createServerStore(CreateServerStore createServerStore) throws Clust throw new LifecycleException("Clustered tier can't be created with an Unknown resource pool"); } + trackAndApplyMessage(createServerStore); + final String name = createServerStore.getName(); // client cache identifier/name LOGGER.info("Creating new clustered tier '{}'", name); @@ -165,6 +182,8 @@ private void destroyServerStore(DestroyServerStore destroyServerStore) throws Cl throw new LifecycleException("Clustered Tier Manager is not configured"); } + trackAndApplyMessage(destroyServerStore); + String name = destroyServerStore.getName(); LOGGER.info("Destroying clustered tier '{}'", name); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java index 4189092850..687c4dd6eb 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java @@ -21,6 +21,7 @@ import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.exceptions.ClusterException; import org.ehcache.clustered.server.repo.StateRepositoryManager; +import org.ehcache.clustered.server.state.ClientMessageTracker; import org.ehcache.clustered.server.state.EhcacheStateService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -82,6 +83,8 @@ public class EhcacheStateServiceImpl implements EhcacheStateService { */ private Map stores = Collections.emptyMap(); + private final ClientMessageTracker messageTracker = new ClientMessageTracker(); + private final StateRepositoryManager stateRepositoryManager; public EhcacheStateServiceImpl(ServiceRegistry services, Set offHeapResourceIdentifiers) { @@ -358,6 +361,11 @@ public StateRepositoryManager getStateRepositoryManager() throws ClusterExceptio return this.stateRepositoryManager; } + @Override + public ClientMessageTracker getClientMessageTracker() { + return this.messageTracker; + } + private static boolean nullSafeEquals(Object s1, Object s2) { return (s1 == null ? s2 == null : s1.equals(s2)); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java new file mode 100644 index 0000000000..c4b186d160 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java @@ -0,0 +1,76 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.state; + +import com.tc.classloader.CommonComponent; + +import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +@CommonComponent +public class ClientMessageTracker { + + private final ConcurrentMap messageTrackers = new ConcurrentHashMap<>(); + private UUID entityConfiguredStamp = null; + private long configuredTimestamp; + + public boolean isAdded(UUID clientId) { + return messageTrackers.containsKey(clientId); + } + + public void track(long msgId, UUID clientId) { + messageTrackers.get(clientId).track(msgId); + } + + public void applied(long msgId, UUID clientId){ + messageTrackers.get(clientId).applied(msgId); + } + + public boolean isDuplicate(long msgId, UUID clientId) { + if (messageTrackers.get(clientId) == null) { + return false; + } + return !messageTrackers.get(clientId).shouldApply(msgId); + } + + public void add(UUID clientId) { + if(messageTrackers.putIfAbsent(clientId, new MessageTracker()) != null) { + throw new IllegalStateException("Same client "+ clientId +" cannot be tracked twice"); + } + } + + public void remove(UUID clientId) { + messageTrackers.remove(clientId); + } + + public void setEntityConfiguredStamp(UUID clientId, long timestamp) { + this.entityConfiguredStamp = clientId; + this.configuredTimestamp = timestamp; + } + + public boolean shouldConfigure(UUID clientId, long timestamp) { + if (entityConfiguredStamp == null) { + return true; + } + if (clientId.equals(entityConfiguredStamp) && configuredTimestamp == timestamp) { + return false; + } + return true; + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java index 56eb427d02..20bf72228b 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java @@ -26,6 +26,7 @@ import com.tc.classloader.CommonComponent; import java.util.Set; +import java.util.UUID; @CommonComponent public interface EhcacheStateService { @@ -48,4 +49,5 @@ public interface EhcacheStateService { StateRepositoryManager getStateRepositoryManager() throws ClusterException; + ClientMessageTracker getClientMessageTracker(); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/MessageTracker.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/MessageTracker.java index 4ddec64ae0..86aaaa845c 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/MessageTracker.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/MessageTracker.java @@ -21,7 +21,7 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReentrantLock; -class MessageTracker { +public class MessageTracker { private final ConcurrentHashMap inProgressMessages = new ConcurrentHashMap<>(); diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index 822d13256b..a4beab7f45 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -21,7 +21,6 @@ import org.ehcache.clustered.common.ServerSideConfiguration.Pool; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.PoolAllocation; -import org.ehcache.clustered.common.internal.exceptions.ClusterException; import org.ehcache.clustered.common.internal.exceptions.InvalidServerSideConfigurationException; import org.ehcache.clustered.common.internal.exceptions.InvalidServerStoreConfigurationException; import org.ehcache.clustered.common.internal.exceptions.InvalidStoreException; @@ -38,6 +37,7 @@ import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; import org.ehcache.clustered.server.state.EhcacheStateService; import org.hamcrest.Matchers; +import org.junit.Before; import org.junit.Test; import org.terracotta.entity.ClientCommunicator; import org.terracotta.entity.ClientDescriptor; @@ -67,18 +67,20 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -import org.junit.Assert; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; -/** - * @author cdennis - */ public class EhcacheActiveEntityTest { private static final byte[] ENTITY_ID = ClusteredEhcacheIdentity.serialize(UUID.randomUUID()); private static final LifeCycleMessageFactory MESSAGE_FACTORY = new LifeCycleMessageFactory(); + private static final UUID CLIENT_ID = UUID.randomUUID(); + + @Before + public void setClientId() { + MESSAGE_FACTORY.setClientId(CLIENT_ID); + } @Test public void testConfigTooShort() { @@ -316,6 +318,9 @@ public void testAppendInvalidationAcksTakenIntoAccount() throws Exception { activeEntity.connected(client2); activeEntity.connected(client3); + UUID client2Id = UUID.randomUUID(); + UUID client3Id = UUID.randomUUID(); + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() .defaultResource("defaultServerResource") .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) @@ -341,20 +346,25 @@ public void testAppendInvalidationAcksTakenIntoAccount() throws Exception { assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); // attach to the store + MESSAGE_FACTORY.setClientId(CLIENT_ID); assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); @@ -407,6 +417,9 @@ public void testClearInvalidationAcksTakenIntoAccount() throws Exception { activeEntity.connected(client2); activeEntity.connected(client3); + UUID client2Id = UUID.randomUUID(); + UUID client3Id = UUID.randomUUID(); + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() .defaultResource("defaultServerResource") .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) @@ -432,20 +445,25 @@ public void testClearInvalidationAcksTakenIntoAccount() throws Exception { assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); // attach to the store + MESSAGE_FACTORY.setClientId(CLIENT_ID); assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); @@ -498,6 +516,9 @@ public void testAppendInvalidationDisconnectionOfInvalidatingClientsTakenIntoAcc activeEntity.connected(client2); activeEntity.connected(client3); + UUID client2Id = UUID.randomUUID(); + UUID client3Id = UUID.randomUUID(); + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() .defaultResource("defaultServerResource") .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) @@ -523,20 +544,25 @@ public void testAppendInvalidationDisconnectionOfInvalidatingClientsTakenIntoAcc assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); // attach to the store + MESSAGE_FACTORY.setClientId(CLIENT_ID); assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); @@ -585,6 +611,9 @@ public void testClearInvalidationDisconnectionOfInvalidatingClientsTakenIntoAcco activeEntity.connected(client2); activeEntity.connected(client3); + UUID client2Id = UUID.randomUUID(); + UUID client3Id = UUID.randomUUID(); + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() .defaultResource("defaultServerResource") .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) @@ -610,20 +639,25 @@ public void testClearInvalidationDisconnectionOfInvalidatingClientsTakenIntoAcco assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); // attach to the store + MESSAGE_FACTORY.setClientId(CLIENT_ID); assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); @@ -672,6 +706,9 @@ public void testAppendInvalidationDisconnectionOfBlockingClientTakenIntoAccount( activeEntity.connected(client2); activeEntity.connected(client3); + UUID client2Id = UUID.randomUUID(); + UUID client3Id = UUID.randomUUID(); + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() .defaultResource("defaultServerResource") .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) @@ -697,20 +734,25 @@ public void testAppendInvalidationDisconnectionOfBlockingClientTakenIntoAccount( assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); // attach to the store + MESSAGE_FACTORY.setClientId(CLIENT_ID); assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); @@ -749,6 +791,9 @@ public void testClearInvalidationDisconnectionOfBlockingClientTakenIntoAccount() activeEntity.connected(client2); activeEntity.connected(client3); + UUID client2Id = UUID.randomUUID(); + UUID client3Id = UUID.randomUUID(); + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() .defaultResource("defaultServerResource") .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) @@ -774,20 +819,25 @@ public void testClearInvalidationDisconnectionOfBlockingClientTakenIntoAccount() assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)) ); // attach to the store + MESSAGE_FACTORY.setClientId(CLIENT_ID); assertSuccess( activeEntity.invoke(client1, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client2Id); assertSuccess( activeEntity.invoke(client2, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); + MESSAGE_FACTORY.setClientId(client3Id); assertSuccess( activeEntity.invoke(client3, MESSAGE_FACTORY.validateServerStore("testDisconnection", serverStoreConfiguration)) ); diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java index ee64e46700..4de2c86a54 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java @@ -24,6 +24,7 @@ import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; import org.ehcache.clustered.server.state.EhcacheStateService; import org.hamcrest.Matchers; +import org.junit.Before; import org.junit.Test; import org.terracotta.entity.ServiceConfiguration; import org.terracotta.entity.ServiceRegistry; @@ -51,6 +52,12 @@ public class EhcachePassiveEntityTest { private static final byte[] ENTITY_ID = ClusteredEhcacheIdentity.serialize(UUID.randomUUID()); private static final LifeCycleMessageFactory MESSAGE_FACTORY = new LifeCycleMessageFactory(); + private static final UUID CLIENT_ID = UUID.randomUUID(); + + @Before + public void setClientId() { + MESSAGE_FACTORY.setClientId(CLIENT_ID); + } @Test public void testConfigTooShort() { diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/state/MessageTrackerTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/state/MessageTrackerTest.java index e1d33a5ed6..7046a6d2bb 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/state/MessageTrackerTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/state/MessageTrackerTest.java @@ -133,7 +133,7 @@ public void testDuplicateMessagesForTrackedMessages() throws Exception { nonAppliedMsgs.forEach(x -> assertThat(messageTracker.shouldApply(x), is(true))); -//TODO: assertThat(messageTracker.isEmpty(), is(true)); +//TODO: #1211 assertThat(messageTracker.isEmpty(), is(true)); LongStream.of(input).filter(x -> !nonAppliedMsgs.contains(x)).forEach(x -> assertThat(messageTracker.shouldApply(x), is(false))); From cd219eab7da269f3a107d15f28b49a8e620cc2a4 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Mon, 12 Sep 2016 23:58:22 +0530 Subject: [PATCH 032/218] ClientID lifecycle with client descriptor #1208 --- clustered/client/build.gradle | 5 ++ .../internal/EhcacheClientEntityFactory.java | 14 +++--- .../service/DefaultClusteringService.java | 27 +++++++--- .../EhcacheClientEntityFactoryTest.java | 28 +++++------ .../internal/store/ClusteredStoreTest.java | 8 ++- .../store/EventualServerStoreProxyTest.java | 28 +++++++---- .../NoInvalidationServerStoreProxyTest.java | 6 +-- .../store/StrongServerStoreProxyTest.java | 27 ++++++---- .../ObservableEhcacheServerEntityService.java | 2 + ...cheClientEntityFactoryIntegrationTest.java | 50 +++++++++---------- clustered/server/build.gradle | 11 +++- .../clustered/server/EhcacheActiveEntity.java | 10 +++- 12 files changed, 132 insertions(+), 84 deletions(-) diff --git a/clustered/client/build.gradle b/clustered/client/build.gradle index 2eed57c888..cd6f7b6d19 100644 --- a/clustered/client/build.gradle +++ b/clustered/client/build.gradle @@ -40,3 +40,8 @@ compileTestJava { options.fork = true; options.forkOptions.executable = MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'javac') } + +test { + executable = MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'java') + environment 'JAVA_HOME', MavenToolchain.javaHome(JavaVersion.VERSION_1_8) +} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java index 966735aa9b..3c89dc5957 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java @@ -47,16 +47,14 @@ public class EhcacheClientEntityFactory { private final Map maintenanceHolds = new ConcurrentHashMap(); private final Timeouts entityTimeouts; - private final UUID clientId; - public EhcacheClientEntityFactory(Connection connection, UUID clientId) { - this(connection, clientId, Timeouts.builder().build()); + public EhcacheClientEntityFactory(Connection connection) { + this(connection, Timeouts.builder().build()); } - public EhcacheClientEntityFactory(Connection connection, UUID clientId, Timeouts entityTimeouts) { + public EhcacheClientEntityFactory(Connection connection, Timeouts entityTimeouts) { this.connection = connection; this.entityTimeouts = entityTimeouts; - this.clientId = clientId; } public boolean acquireLeadership(String entityIdentifier) { @@ -85,6 +83,7 @@ public void abandonLeadership(String entityIdentifier) { * * @param identifier the instance identifier for the {@code EhcacheActiveEntity} * @param config the {@code EhcacheActiveEntity} configuration to use for creation + * @param clientId UUID generated for the client * * @throws EntityAlreadyExistsException if the {@code EhcacheActiveEntity} for {@code identifier} already exists * @throws EhcacheEntityCreationException if an error preventing {@code EhcacheActiveEntity} creation was raised @@ -93,7 +92,7 @@ public void abandonLeadership(String entityIdentifier) { * @throws TimeoutException if the creation and configuration of the {@code EhcacheActiveEntity} exceed the * lifecycle operation timeout */ - public void create(final String identifier, final ServerSideConfiguration config) + public void create(final String identifier, final ServerSideConfiguration config, final UUID clientId) throws EntityAlreadyExistsException, EhcacheEntityCreationException, EntityBusyException, TimeoutException { Hold existingMaintenance = maintenanceHolds.get(identifier); Hold localMaintenance = null; @@ -150,6 +149,7 @@ public void create(final String identifier, final ServerSideConfiguration config * * @param identifier the instance identifier for the {@code EhcacheActiveEntity} * @param config the {@code EhcacheActiveEntity} configuration to use for access checking + * @param clientId UUID generated for the client * * @return an {@code EhcacheClientEntity} providing access to the {@code EhcacheActiveEntity} identified by * {@code identifier} @@ -159,7 +159,7 @@ public void create(final String identifier, final ServerSideConfiguration config * @throws TimeoutException if the creation and configuration of the {@code EhcacheActiveEntity} exceed the * lifecycle operation timeout */ - public EhcacheClientEntity retrieve(String identifier, ServerSideConfiguration config) + public EhcacheClientEntity retrieve(String identifier, ServerSideConfiguration config, UUID clientId) throws EntityNotFoundException, EhcacheEntityValidationException, TimeoutException { try { Hold fetchHold = createAccessLockFor(identifier).readLock(); diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java index 935b194e52..8bc7985a8c 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java @@ -36,6 +36,7 @@ import org.ehcache.clustered.client.service.EntityService; import org.ehcache.clustered.common.Consistency; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.exceptions.InvalidClientIdException; import org.ehcache.clustered.common.internal.exceptions.InvalidStoreException; import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; import org.ehcache.config.CacheConfiguration; @@ -81,7 +82,7 @@ class DefaultClusteringService implements ClusteringService, EntityService { private final String entityIdentifier; private final ConcurrentMap knownPersistenceSpaces = new ConcurrentHashMap(); private final EhcacheClientEntity.Timeouts operationTimeouts; - private final UUID clientId = UUID.randomUUID(); + private UUID clientId = UUID.randomUUID(); private volatile Connection clusterConnection; private EhcacheClientEntityFactory entityFactory; @@ -149,7 +150,7 @@ public void start(final ServiceProvider serviceProvider) { entity = autoCreateEntity(); } else { try { - entity = entityFactory.retrieve(entityIdentifier, configuration.getServerConfiguration()); + entity = entityFactory.retrieve(entityIdentifier, configuration.getServerConfiguration(), clientId); } catch (EntityNotFoundException e) { throw new IllegalStateException("The clustered tier manager '" + entityIdentifier + "' does not exist." + " Please review your configuration.", e); @@ -166,7 +167,7 @@ public void start(final ServiceProvider serviceProvider) { } private void createEntityFactory() { - entityFactory = new EhcacheClientEntityFactory(clusterConnection, clientId, operationTimeouts); + entityFactory = new EhcacheClientEntityFactory(clusterConnection, operationTimeouts); } private void initClusterConnection() { @@ -184,7 +185,7 @@ private void initClusterConnection() { private EhcacheClientEntity autoCreateEntity() throws EhcacheEntityValidationException, IllegalStateException { while (true) { try { - entityFactory.create(entityIdentifier, configuration.getServerConfiguration()); + entityFactory.create(entityIdentifier, configuration.getServerConfiguration(), clientId); } catch (EhcacheEntityCreationException e) { throw new IllegalStateException("Could not create the clustered tier manager '" + entityIdentifier + "'.", e); } catch (EntityAlreadyExistsException e) { @@ -196,7 +197,7 @@ private EhcacheClientEntity autoCreateEntity() throws EhcacheEntityValidationExc + "'; create operation timed out", e); } try { - return entityFactory.retrieve(entityIdentifier, configuration.getServerConfiguration()); + return retrieveUntilClientIdIsUnique(); } catch (EntityNotFoundException e) { //ignore - loop and try to create } catch (TimeoutException e) { @@ -206,6 +207,20 @@ private EhcacheClientEntity autoCreateEntity() throws EhcacheEntityValidationExc } } + private EhcacheClientEntity retrieveUntilClientIdIsUnique() throws TimeoutException, EntityNotFoundException { + while(true) { + try { + return entityFactory.retrieve(entityIdentifier, configuration.getServerConfiguration(), clientId); + } catch (EhcacheEntityValidationException e) { + if (!(e.getCause().getCause() instanceof InvalidClientIdException)) { + throw e; + } else { + this.clientId = UUID.randomUUID(); + } + } + } + } + @Override public void startForMaintenance(ServiceProvider serviceProvider) { initClusterConnection(); @@ -312,7 +327,7 @@ public void destroy(String name) throws CachePersistenceException { initClusterConnection(); createEntityFactory(); try { - entity = entityFactory.retrieve(entityIdentifier, configuration.getServerConfiguration()); + entity = entityFactory.retrieve(entityIdentifier, configuration.getServerConfiguration(), clientId); } catch (EntityNotFoundException e) { // No entity on the server, so no need to destroy anything } catch (TimeoutException e) { diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactoryTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactoryTest.java index d5621744a4..98406eb30b 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactoryTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactoryTest.java @@ -56,8 +56,8 @@ public void testCreate() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection, CLIENT_ID); - factory.create("test", null); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection); + factory.create("test", null, CLIENT_ID); verify(entityRef).create(any(UUID.class)); verify(entity).configure(any(ServerSideConfiguration.class)); verify(entity).close(); @@ -74,9 +74,9 @@ public void testCreateBadConfig() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection, CLIENT_ID); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection); try { - factory.create("test", null); + factory.create("test", null, CLIENT_ID); fail("Expecting EhcacheEntityCreationException"); } catch (EhcacheEntityCreationException e) { // expected @@ -96,9 +96,9 @@ public void testCreateWhenExisting() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection, CLIENT_ID); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection); try { - factory.create("test", null); + factory.create("test", null, CLIENT_ID); fail("Expected EntityAlreadyExistsException"); } catch (EntityAlreadyExistsException e) { //expected @@ -115,8 +115,8 @@ public void testRetrieve() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection, CLIENT_ID); - assertThat(factory.retrieve("test", null), is(entity)); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection); + assertThat(factory.retrieve("test", null, CLIENT_ID), is(entity)); verify(entity).validate(any(ServerSideConfiguration.class)); verify(entity, never()).close(); } @@ -132,9 +132,9 @@ public void testRetrieveFailedValidate() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection, CLIENT_ID); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection); try { - factory.retrieve("test", null); + factory.retrieve("test", null, CLIENT_ID); fail("Expecting IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected @@ -153,9 +153,9 @@ public void testRetrieveWhenNotExisting() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection, CLIENT_ID); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection); try { - factory.retrieve("test", null); + factory.retrieve("test", null, CLIENT_ID); fail("Expected EntityNotFoundException"); } catch (EntityNotFoundException e) { //expected @@ -171,7 +171,7 @@ public void testDestroy() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection, CLIENT_ID); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection); factory.destroy("test"); verify(entityRef).destroy(); } @@ -185,7 +185,7 @@ public void testDestroyWhenNotExisting() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection, CLIENT_ID); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection); try { factory.destroy("test"); fail("Expected EhcacheEntityNotFoundException"); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java index eca7bd0ee8..dfe7ef8581 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java @@ -34,7 +34,6 @@ import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.StoreAccessException; import org.ehcache.core.spi.store.StoreAccessTimeoutException; -import org.ehcache.core.spi.time.SystemTimeSource; import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.statistics.StoreOperationOutcomes; import org.ehcache.expiry.Expirations; @@ -59,7 +58,6 @@ import static org.ehcache.clustered.util.StatisticsTestUtils.validateStat; import static org.ehcache.clustered.util.StatisticsTestUtils.validateStats; -import static org.ehcache.expiry.Expirations.noExpiration; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.junit.Assert.*; @@ -88,13 +86,13 @@ public void setup() throws Exception { ); Connection connection = new UnitTestConnectionService().connect(CLUSTER_URI, new Properties()); - EhcacheClientEntityFactory entityFactory = new EhcacheClientEntityFactory(connection, CLIENT_ID); + EhcacheClientEntityFactory entityFactory = new EhcacheClientEntityFactory(connection); ServerSideConfiguration serverConfig = new ServerSideConfiguration("defaultResource", Collections.emptyMap()); - entityFactory.create("TestCacheManager", serverConfig); + entityFactory.create("TestCacheManager", serverConfig, CLIENT_ID); - EhcacheClientEntity clientEntity = entityFactory.retrieve("TestCacheManager", serverConfig); + EhcacheClientEntity clientEntity = entityFactory.retrieve("TestCacheManager", serverConfig, CLIENT_ID); ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(4, MemoryUnit.MB); ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfiguration(resourcePool.getPoolAllocation(), diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java index f15ee15a73..20e6704720 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java @@ -55,7 +55,7 @@ public class EventualServerStoreProxyTest { private static final String CACHE_IDENTIFIER = "testCache"; private static final URI CLUSTER_URI = URI.create("terracotta://localhost:9510"); - private static final UUID CLIENT_ID = UUID.randomUUID(); + private static EhcacheClientEntity clientEntity1; private static EhcacheClientEntity clientEntity2; @@ -68,16 +68,22 @@ public static void setUp() throws Exception { new PassthroughServerBuilder() .resource("defaultResource", 128, MemoryUnit.MB) .build()); - Connection connection = new UnitTestConnectionService().connect(CLUSTER_URI, new Properties()); - - EhcacheClientEntityFactory entityFactory = new EhcacheClientEntityFactory(connection, CLIENT_ID); - - entityFactory.create("TestCacheManager", - new ServerSideConfiguration("defaultResource", Collections.emptyMap())); - clientEntity1 = entityFactory.retrieve("TestCacheManager", - new ServerSideConfiguration("defaultResource", Collections.emptyMap())); - clientEntity2 = entityFactory.retrieve("TestCacheManager", - new ServerSideConfiguration("defaultResource", Collections.emptyMap())); + UnitTestConnectionService unitTestConnectionService = new UnitTestConnectionService(); + Connection connection1 = unitTestConnectionService.connect(CLUSTER_URI, new Properties()); + Connection connection2 = unitTestConnectionService.connect(CLUSTER_URI, new Properties()); + + EhcacheClientEntityFactory entityFactory1 = new EhcacheClientEntityFactory(connection1); + EhcacheClientEntityFactory entityFactory2 = new EhcacheClientEntityFactory(connection2); + + UUID clientId1 = UUID.randomUUID(); + UUID clientId2 = UUID.randomUUID(); + + entityFactory1.create("TestCacheManager", + new ServerSideConfiguration("defaultResource", Collections.emptyMap()), clientId1); + clientEntity1 = entityFactory1.retrieve("TestCacheManager", + new ServerSideConfiguration("defaultResource", Collections.emptyMap()), clientId1); + clientEntity2 = entityFactory2.retrieve("TestCacheManager", + new ServerSideConfiguration("defaultResource", Collections.emptyMap()), clientId2); ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(16L, MemoryUnit.MB); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java index 858ac9b17c..a3091f278d 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java @@ -62,12 +62,12 @@ public static void setUp() throws Exception { .build()); Connection connection = new UnitTestConnectionService().connect(CLUSTER_URI, new Properties()); - EhcacheClientEntityFactory entityFactory = new EhcacheClientEntityFactory(connection, CLIENT_ID); + EhcacheClientEntityFactory entityFactory = new EhcacheClientEntityFactory(connection); ServerSideConfiguration serverConfig = new ServerSideConfiguration("defaultResource", Collections.emptyMap()); - entityFactory.create("TestCacheManager", serverConfig); - clientEntity = entityFactory.retrieve("TestCacheManager", serverConfig); + entityFactory.create("TestCacheManager", serverConfig, CLIENT_ID); + clientEntity = entityFactory.retrieve("TestCacheManager", serverConfig, CLIENT_ID); ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(16L, MemoryUnit.MB); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java index 71ee3bf38c..bfba0924c8 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java @@ -60,7 +60,6 @@ public class StrongServerStoreProxyTest { private static final ExecutorService EXECUTOR_SERVICE = Executors.newCachedThreadPool(); - private static final UUID CLIENT_ID = UUID.randomUUID(); private static final String CACHE_IDENTIFIER = "testCache"; private static final URI CLUSTER_URI = URI.create("terracotta://localhost:9510"); @@ -76,16 +75,22 @@ public static void setUp() throws Exception { new PassthroughServerBuilder() .resource("defaultResource", 128, MemoryUnit.MB) .build()); - Connection connection = new UnitTestConnectionService().connect(CLUSTER_URI, new Properties()); - - EhcacheClientEntityFactory entityFactory = new EhcacheClientEntityFactory(connection, CLIENT_ID); - - entityFactory.create("TestCacheManager", - new ServerSideConfiguration("defaultResource", Collections.emptyMap())); - clientEntity1 = entityFactory.retrieve("TestCacheManager", - new ServerSideConfiguration("defaultResource", Collections.emptyMap())); - clientEntity2 = entityFactory.retrieve("TestCacheManager", - new ServerSideConfiguration("defaultResource", Collections.emptyMap())); + UnitTestConnectionService unitTestConnectionService = new UnitTestConnectionService(); + Connection connection1 = unitTestConnectionService.connect(CLUSTER_URI, new Properties()); + Connection connection2 = unitTestConnectionService.connect(CLUSTER_URI, new Properties()); + + EhcacheClientEntityFactory entityFactory1 = new EhcacheClientEntityFactory(connection1); + EhcacheClientEntityFactory entityFactory2 = new EhcacheClientEntityFactory(connection2); + + UUID clientId1 = UUID.randomUUID(); + UUID clientId2 = UUID.randomUUID(); + + entityFactory1.create("TestCacheManager", + new ServerSideConfiguration("defaultResource", Collections.emptyMap()), clientId1); + clientEntity1 = entityFactory1.retrieve("TestCacheManager", + new ServerSideConfiguration("defaultResource", Collections.emptyMap()), clientId1); + clientEntity2 = entityFactory2.retrieve("TestCacheManager", + new ServerSideConfiguration("defaultResource", Collections.emptyMap()), clientId2); ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(4L, MemoryUnit.MB); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java b/clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java index e61b3895df..3694a43a8b 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java @@ -18,6 +18,7 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.ehcache.clustered.server.state.ClientMessageTracker; import org.terracotta.entity.ActiveServerEntity; import org.terracotta.entity.ClientDescriptor; import org.terracotta.entity.ConcurrencyStrategy; @@ -136,5 +137,6 @@ public Set getSharedResourcePoolIds() { public Set getDedicatedResourcePoolIds() { return ehcacheStateService.getDedicatedResourcePoolIds(); } + } } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java index e5bf01ae28..d4b663fd7b 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java @@ -72,18 +72,18 @@ public static void closeConnection() throws IOException { @Test public void testCreate() throws Exception { - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); - factory.create("testCreate", new ServerSideConfiguration(EMPTY_RESOURCE_MAP)); + factory.create("testCreate", new ServerSideConfiguration(EMPTY_RESOURCE_MAP), CLIENT_ID); } @Test public void testCreateWhenExisting() throws Exception { - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); - factory.create("testCreateWhenExisting", new ServerSideConfiguration(EMPTY_RESOURCE_MAP)); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); + factory.create("testCreateWhenExisting", new ServerSideConfiguration(EMPTY_RESOURCE_MAP), CLIENT_ID); try { factory.create("testCreateWhenExisting", - new ServerSideConfiguration(Collections.singletonMap("foo", new Pool(42L, "bar")))); + new ServerSideConfiguration(Collections.singletonMap("foo", new Pool(42L, "bar"))), CLIENT_ID); fail("Expected EntityAlreadyExistsException"); } catch (EntityAlreadyExistsException e) { //expected @@ -92,14 +92,14 @@ public void testCreateWhenExisting() throws Exception { @Test public void testCreateWithBadConfigCleansUp() throws Exception { - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); try { - factory.create("testCreateWithBadConfigCleansUp", new ServerSideConfiguration("flargle", EMPTY_RESOURCE_MAP)); + factory.create("testCreateWithBadConfigCleansUp", new ServerSideConfiguration("flargle", EMPTY_RESOURCE_MAP), CLIENT_ID); fail("Expected EhcacheEntityCreationException"); } catch (EhcacheEntityCreationException e) { try { - factory.retrieve("testCreateWithBadConfigCleansUp", null); + factory.retrieve("testCreateWithBadConfigCleansUp", null, CLIENT_ID); fail("Expected EntityNotFoundException"); } catch (EntityNotFoundException f) { //expected @@ -109,21 +109,21 @@ public void testCreateWithBadConfigCleansUp() throws Exception { @Test public void testRetrieveWithGoodConfig() throws Exception { - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); factory.create("testRetrieveWithGoodConfig", - new ServerSideConfiguration(Collections.singletonMap("foo", new Pool(43L, "primary")))); + new ServerSideConfiguration(Collections.singletonMap("foo", new Pool(43L, "primary"))), CLIENT_ID); assertThat(factory.retrieve("testRetrieveWithGoodConfig", - new ServerSideConfiguration(Collections.singletonMap("foo", new Pool(43L, "primary")))), notNullValue()); + new ServerSideConfiguration(Collections.singletonMap("foo", new Pool(43L, "primary"))), CLIENT_ID), notNullValue()); } @Test public void testRetrieveWithBadConfig() throws Exception { - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); factory.create("testRetrieveWithBadConfig", - new ServerSideConfiguration(Collections.singletonMap("foo", new Pool(42L, "primary")))); + new ServerSideConfiguration(Collections.singletonMap("foo", new Pool(42L, "primary"))), CLIENT_ID); try { factory.retrieve("testRetrieveWithBadConfig", - new ServerSideConfiguration(Collections.singletonMap("bar", new Pool(42L, "primary")))); + new ServerSideConfiguration(Collections.singletonMap("bar", new Pool(42L, "primary"))), CLIENT_ID); fail("Expected EhcacheEntityValidationException"); } catch (EhcacheEntityValidationException e) { //expected @@ -132,9 +132,9 @@ public void testRetrieveWithBadConfig() throws Exception { @Test public void testRetrieveWhenNotExisting() throws Exception { - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); try { - factory.retrieve("testRetrieveWhenNotExisting", null); + factory.retrieve("testRetrieveWhenNotExisting", null, CLIENT_ID); fail("Expected EntityNotFoundException"); } catch (EntityNotFoundException e) { //expected @@ -143,14 +143,14 @@ public void testRetrieveWhenNotExisting() throws Exception { @Test public void testDestroy() throws Exception { - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); - factory.create("testDestroy", new ServerSideConfiguration(Collections.emptyMap())); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); + factory.create("testDestroy", new ServerSideConfiguration(Collections.emptyMap()), CLIENT_ID); factory.destroy("testDestroy"); } @Test public void testDestroyWhenNotExisting() throws Exception { - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); try { factory.destroy("testDestroyWhenNotExisting"); fail("Expected EhcacheEntityNotFoundException"); @@ -161,7 +161,7 @@ public void testDestroyWhenNotExisting() throws Exception { @Test public void testAbandonLeadershipWhenNotOwning() throws Exception { - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); try { factory.abandonLeadership("testAbandonLeadershipWhenNotOwning"); fail("Expected IllegalMonitorStateException"); @@ -172,18 +172,18 @@ public void testAbandonLeadershipWhenNotOwning() throws Exception { @Test public void testAcquireLeadershipWhenAlone() throws Exception { - EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); + EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); assertThat(factory.acquireLeadership("testAcquireLeadershipWhenAlone"), is(true)); } @Test public void testAcquireLeadershipWhenTaken() throws Exception { - EhcacheClientEntityFactory factoryA = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); + EhcacheClientEntityFactory factoryA = new EhcacheClientEntityFactory(CONNECTION); assertThat(factoryA.acquireLeadership("testAcquireLeadershipWhenTaken"), is(true)); Connection clientB = CLUSTER.newConnection(); try { - EhcacheClientEntityFactory factoryB = new EhcacheClientEntityFactory(clientB, UUID.randomUUID()); + EhcacheClientEntityFactory factoryB = new EhcacheClientEntityFactory(clientB); assertThat(factoryB.acquireLeadership("testAcquireLeadershipWhenTaken"), is(false)); } finally { clientB.close(); @@ -192,13 +192,13 @@ public void testAcquireLeadershipWhenTaken() throws Exception { @Test public void testAcquireLeadershipAfterAbandoned() throws Exception { - EhcacheClientEntityFactory factoryA = new EhcacheClientEntityFactory(CONNECTION, CLIENT_ID); + EhcacheClientEntityFactory factoryA = new EhcacheClientEntityFactory(CONNECTION); factoryA.acquireLeadership("testAcquireLeadershipAfterAbandoned"); factoryA.abandonLeadership("testAcquireLeadershipAfterAbandoned"); Connection clientB = CLUSTER.newConnection(); try { - EhcacheClientEntityFactory factoryB = new EhcacheClientEntityFactory(clientB, UUID.randomUUID()); + EhcacheClientEntityFactory factoryB = new EhcacheClientEntityFactory(clientB); assertThat(factoryB.acquireLeadership("testAcquireLeadershipAfterAbandoned"), is(true)); } finally { clientB.close(); diff --git a/clustered/server/build.gradle b/clustered/server/build.gradle index 2d28c97450..430006ccde 100644 --- a/clustered/server/build.gradle +++ b/clustered/server/build.gradle @@ -44,11 +44,15 @@ compileTestJava { options.forkOptions.executable = MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'javac') } +javadoc { + options.addStringOption('Xdoclint:none', '-quiet') +} + sourceCompatibility = 1.8 targetCompatibility = 1.8 checkstyle { - toolVersion = '7.1' + toolVersion = '5.9' } sourceSets { @@ -60,3 +64,8 @@ sourceSets { runtimeClasspath += configurations.provided } } + +test { + executable = MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'java') + environment 'JAVA_HOME', MavenToolchain.javaHome(JavaVersion.VERSION_1_8) +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 57b605f451..469abc631a 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -97,6 +97,7 @@ class EhcacheActiveEntity implements ActiveServerEntity>(); private final ConcurrentHashMap clientIdMap = new ConcurrentHashMap<>(); + private final Set invalidIds = Collections.newSetFromMap(new ConcurrentHashMap<>()); private final ReconnectDataCodec reconnectDataCodec = new ReconnectDataCodec(); private final ServerStoreCompatibility storeCompatibility = new ServerStoreCompatibility(); private final EhcacheEntityResponseFactory responseFactory; @@ -226,6 +227,11 @@ public void disconnected(ClientDescriptor clientDescriptor) { detachStore(clientDescriptor, storeId); } } + UUID clientId = clientIdMap.remove(clientDescriptor); + if (clientId != null) { + invalidIds.remove(clientId); + ehcacheStateService.getClientMessageTracker().remove(clientId); + } } @Override @@ -263,6 +269,7 @@ public void handleReconnect(ClientDescriptor clientDescriptor, byte[] extendedRe clientState.attach(); ReconnectData reconnectData = reconnectDataCodec.decode(extendedReconnectData); clientIdMap.put(clientDescriptor, reconnectData.getClientId()); + invalidIds.add(reconnectData.getClientId()); Set cacheIds = reconnectData.getAllCaches(); for (final String cacheId : cacheIds) { ServerStoreImpl serverStore = ehcacheStateService.getStore(cacheId); @@ -557,10 +564,11 @@ private void configure(ClientDescriptor clientDescriptor, ConfigureStoreManager */ private void validate(ClientDescriptor clientDescriptor, ValidateStoreManager message) throws ClusterException { validateClientConnected(clientDescriptor); - if (clientIdMap.get(clientDescriptor) != null && clientIdMap.get(clientDescriptor).equals(message.getClientId())) { + if (invalidIds.contains(message.getClientId())) { throw new InvalidClientIdException("Client ID : " + message.getClientId() + " is already being tracked by Active"); } clientIdMap.put(clientDescriptor, message.getClientId()); + invalidIds.add(message.getClientId()); ehcacheStateService.validate(message); this.clientStateMap.get(clientDescriptor).attach(); } From bc188e83ce441769b62567760186906c4e6ec665 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Tue, 13 Sep 2016 16:37:37 +0530 Subject: [PATCH 033/218] Adding identity notion to other messages #1211 --- .../client/internal/EhcacheClientEntity.java | 7 ++++++ .../service/ConcurrentClusteredMap.java | 3 ++- .../service/DefaultClusteringService.java | 2 +- .../internal/store/ClusteredStoreTest.java | 2 +- .../store/EventualServerStoreProxyTest.java | 4 +-- .../NoInvalidationServerStoreProxyTest.java | 2 +- .../store/StrongServerStoreProxyTest.java | 4 +-- .../messages/ServerStoreMessageFactory.java | 13 ++++++---- .../internal/messages/ServerStoreOpCodec.java | 25 +++++++++++++++---- .../messages/ServerStoreOpMessage.java | 16 +++++++----- .../StateRepositoryMessageFactory.java | 12 ++++++--- .../messages/StateRepositoryOpMessage.java | 23 +++++++++-------- .../internal/messages/EhcacheCodecTest.java | 10 +++----- .../ServerStoreMessageFactoryTest.java | 4 ++- .../messages/ServerStoreOpCodecTest.java | 11 +++++++- .../messages/ServerStoreOpMessageTest.java | 24 +++++++++--------- .../server/EhcacheActiveEntityTest.java | 18 ++++++------- .../repo/ServerStateRepositoryTest.java | 23 +++++++++-------- .../repo/StateRepositoryManagerTest.java | 8 ++++-- 19 files changed, 131 insertions(+), 80 deletions(-) diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java index 685715eb0e..26646f5ef8 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java @@ -146,6 +146,13 @@ public void setClientId(UUID clientId) { this.reconnectData.setClientId(clientId); } + public UUID getClientId() { + if (clientId == null) { + throw new IllegalStateException("Client Id cannot be null"); + } + return this.clientId; + } + public boolean isConnected() { return connected; } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ConcurrentClusteredMap.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ConcurrentClusteredMap.java index d8548549ae..ea23cf5beb 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ConcurrentClusteredMap.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ConcurrentClusteredMap.java @@ -25,6 +25,7 @@ import java.util.Collection; import java.util.Map; import java.util.Set; +import java.util.UUID; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeoutException; @@ -34,7 +35,7 @@ public class ConcurrentClusteredMap implements ConcurrentMap { private final EhcacheClientEntity entity; public ConcurrentClusteredMap(final String cacheId, final String mapId, final EhcacheClientEntity entity) { - this.messageFactory = new StateRepositoryMessageFactory(cacheId, mapId); + this.messageFactory = new StateRepositoryMessageFactory(cacheId, mapId, entity.getClientId()); this.entity = entity; } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java index 8bc7985a8c..4b3449d673 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java @@ -419,7 +419,7 @@ public ServerStoreProxy getServerStoreProxy(final ClusteredCacheIdentifie + "'; validate operation timed out", e); } - ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory(cacheId); + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory(cacheId, clientId); switch (configuredConsistency) { case STRONG: return new StrongServerStoreProxy(messageFactory, entity); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java index dfe7ef8581..4e262737f1 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java @@ -102,7 +102,7 @@ public void setup() throws Exception { null ); clientEntity.createCache(CACHE_IDENTIFIER, serverStoreConfiguration); - ServerStoreMessageFactory factory = new ServerStoreMessageFactory(CACHE_IDENTIFIER); + ServerStoreMessageFactory factory = new ServerStoreMessageFactory(CACHE_IDENTIFIER, CLIENT_ID); ServerStoreProxy serverStoreProxy = new NoInvalidationServerStoreProxy(factory, clientEntity); TestTimeSource testTimeSource = new TestTimeSource(); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java index 20e6704720..5a180766c7 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java @@ -96,8 +96,8 @@ public static void setUp() throws Exception { clientEntity1.validateCache(CACHE_IDENTIFIER, serverStoreConfiguration); clientEntity2.validateCache(CACHE_IDENTIFIER, serverStoreConfiguration); - serverStoreProxy1 = new EventualServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER), clientEntity1); - serverStoreProxy2 = new EventualServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER), clientEntity2); + serverStoreProxy1 = new EventualServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientId1), clientEntity1); + serverStoreProxy2 = new EventualServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientId2), clientEntity2); } @AfterClass diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java index a3091f278d..68fe2203a5 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java @@ -74,7 +74,7 @@ public static void setUp() throws Exception { clientEntity.createCache(CACHE_IDENTIFIER, new ServerStoreConfiguration(resourcePool.getPoolAllocation(), Long.class.getName(), Long.class.getName(), Long.class.getName(), Long.class.getName(), LongSerializer.class.getName(), LongSerializer.class .getName(), null)); - serverStoreProxy = new NoInvalidationServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER), clientEntity); + serverStoreProxy = new NoInvalidationServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER, CLIENT_ID), clientEntity); } @AfterClass diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java index bfba0924c8..e86e82d8c9 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java @@ -103,8 +103,8 @@ public static void setUp() throws Exception { clientEntity1.validateCache(CACHE_IDENTIFIER, serverStoreConfiguration); clientEntity2.validateCache(CACHE_IDENTIFIER, serverStoreConfiguration); - serverStoreProxy1 = new StrongServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER), clientEntity1); - serverStoreProxy2 = new StrongServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER), clientEntity2); + serverStoreProxy1 = new StrongServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientId1), clientEntity1); + serverStoreProxy2 = new StrongServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientId2), clientEntity2); } @AfterClass diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreMessageFactory.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreMessageFactory.java index 7b376c1675..edbd11ecef 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreMessageFactory.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreMessageFactory.java @@ -19,13 +19,16 @@ import org.ehcache.clustered.common.internal.store.Chain; import java.nio.ByteBuffer; +import java.util.UUID; public class ServerStoreMessageFactory { private final String cacheId; + private final UUID clientId; - public ServerStoreMessageFactory(String cacheId) { + public ServerStoreMessageFactory(String cacheId, UUID clientId) { this.cacheId = cacheId; + this.clientId = clientId; } public EhcacheEntityMessage getOperation(long key) { @@ -33,15 +36,15 @@ public EhcacheEntityMessage getOperation(long key) { } public EhcacheEntityMessage getAndAppendOperation(long key, ByteBuffer payload) { - return new ServerStoreOpMessage.GetAndAppendMessage(this.cacheId, key, payload); + return new ServerStoreOpMessage.GetAndAppendMessage(this.cacheId, key, payload, clientId); } public EhcacheEntityMessage appendOperation(long key, ByteBuffer payload) { - return new ServerStoreOpMessage.AppendMessage(this.cacheId, key, payload); + return new ServerStoreOpMessage.AppendMessage(this.cacheId, key, payload, clientId); } public EhcacheEntityMessage replaceAtHeadOperation(long key, Chain expect, Chain update) { - return new ServerStoreOpMessage.ReplaceAtHeadMessage(this.cacheId, key, expect, update); + return new ServerStoreOpMessage.ReplaceAtHeadMessage(this.cacheId, key, expect, update, clientId); } public EhcacheEntityMessage clientInvalidationAck(int invalidationId) { @@ -49,7 +52,7 @@ public EhcacheEntityMessage clientInvalidationAck(int invalidationId) { } public EhcacheEntityMessage clearOperation() { - return new ServerStoreOpMessage.ClearMessage(this.cacheId); + return new ServerStoreOpMessage.ClearMessage(this.cacheId, clientId); } public String getCacheId() { diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java index 4d9736a9b2..bdb525484b 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java @@ -16,6 +16,7 @@ package org.ehcache.clustered.common.internal.messages; +import org.ehcache.clustered.common.internal.ClusteredEhcacheIdentity; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.AppendMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClearMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.GetAndAppendMessage; @@ -25,6 +26,7 @@ import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ServerStoreOp; import java.nio.ByteBuffer; +import java.util.UUID; class ServerStoreOpCodec { @@ -33,7 +35,7 @@ class ServerStoreOpCodec { private static final byte KEY_SIZE = 8; private static final byte CHAIN_LEN_SIZE = 4; private static final byte INVALIDATION_ID_LEN_SIZE = 4; - private static final byte MESSAGE_ID_SIZE = 8; + private static final byte MESSAGE_ID_SIZE = 24; private final ChainCodec chainCodec; @@ -90,6 +92,7 @@ public byte[] encode(ServerStoreOpMessage message) { ClearMessage clearMessage = (ClearMessage)message; encodedMsg = ByteBuffer.allocate(STORE_OP_CODE_SIZE + MESSAGE_ID_SIZE + 2 * cacheIdLen); encodedMsg.put(clearMessage.getOpCode()); + encodedMsg.put(ClusteredEhcacheIdentity.serialize(message.getClientId())); encodedMsg.putLong(message.getId()); CodecUtil.putStringAsCharArray(encodedMsg, clearMessage.getCacheId()); return encodedMsg.array(); @@ -101,6 +104,7 @@ public byte[] encode(ServerStoreOpMessage message) { // This assumes correct allocation and puts extracts common code private static void putCacheIdKeyAndOpCode(ByteBuffer byteBuffer, ServerStoreOpMessage message, long key) { byteBuffer.put(message.getOpCode()); + byteBuffer.put(ClusteredEhcacheIdentity.serialize(message.getClientId())); byteBuffer.putLong(message.getId()); byteBuffer.putInt(message.getCacheId().length()); CodecUtil.putStringAsCharArray(byteBuffer, message.getCacheId()); @@ -114,6 +118,7 @@ public EhcacheEntityMessage decode(byte[] payload) { long key; String cacheId; + UUID clientId; long msgId; EhcacheEntityMessage decodecMsg; @@ -123,20 +128,23 @@ public EhcacheEntityMessage decode(byte[] payload) { cacheId = CodecUtil.getStringFromBuffer(msg, msg.remaining() / 2); return new GetMessage(cacheId, key); case GET_AND_APPEND: + clientId = getClientId(msg); msgId = msg.getLong(); cacheId = readStringFromBufferWithSize(msg); key = msg.getLong(); - decodecMsg = new GetAndAppendMessage(cacheId, key, msg.slice().asReadOnlyBuffer()); + decodecMsg = new GetAndAppendMessage(cacheId, key, msg.slice().asReadOnlyBuffer(), clientId); decodecMsg.setId(msgId); return decodecMsg; case APPEND: + clientId = getClientId(msg); msgId = msg.getLong(); cacheId = readStringFromBufferWithSize(msg); key = msg.getLong(); - decodecMsg = new AppendMessage(cacheId, key, msg.slice().asReadOnlyBuffer()); + decodecMsg = new AppendMessage(cacheId, key, msg.slice().asReadOnlyBuffer(), clientId); decodecMsg.setId(msgId); return decodecMsg; case REPLACE: + clientId = getClientId(msg); msgId = msg.getLong(); cacheId = readStringFromBufferWithSize(msg); key = msg.getLong(); @@ -147,7 +155,7 @@ public EhcacheEntityMessage decode(byte[] payload) { byte[] encodedUpdateChain = new byte[updateChainLen]; msg.get(encodedUpdateChain); decodecMsg = new ReplaceAtHeadMessage(cacheId, key, chainCodec.decode(encodedExpectChain), - chainCodec.decode(encodedUpdateChain)); + chainCodec.decode(encodedUpdateChain), clientId); decodecMsg.setId(msgId); return decodecMsg; case CLIENT_INVALIDATION_ACK: @@ -155,9 +163,10 @@ public EhcacheEntityMessage decode(byte[] payload) { cacheId = CodecUtil.getStringFromBuffer(msg, msg.remaining() / 2); return new ClientInvalidationAck(cacheId, invalidationId); case CLEAR: + clientId = getClientId(msg); msgId = msg.getLong(); cacheId = CodecUtil.getStringFromBuffer(msg, msg.remaining() / 2); - decodecMsg = new ClearMessage(cacheId); + decodecMsg = new ClearMessage(cacheId, clientId); decodecMsg.setId(msgId); return decodecMsg; default: @@ -170,4 +179,10 @@ private static String readStringFromBufferWithSize(ByteBuffer buffer) { return CodecUtil.getStringFromBuffer(buffer, length); } + private static UUID getClientId(ByteBuffer payload) { + long msb = payload.getLong(); + long lsb = payload.getLong(); + return new UUID(msb, lsb); + } + } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java index 86da8c8248..c5b67baf61 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java @@ -63,13 +63,13 @@ public static ServerStoreOp getServerStoreOp(byte storeOpCode) { } - protected UUID clientId = null; //TODO: #1211 + protected UUID clientId; protected long id = NOT_REPLICATED; @Override public UUID getClientId() { if (clientId == null) { - throw new AssertionError("Client Id cannot be null for lifecycle messages"); + throw new AssertionError("Client Id is not supported for message type " + this.operation() ); } return this.clientId; } @@ -151,9 +151,10 @@ public static class GetAndAppendMessage extends KeyBasedServerStoreOpMessage { private final ByteBuffer payload; - GetAndAppendMessage(String cacheId, long key, ByteBuffer payload) { + GetAndAppendMessage(String cacheId, long key, ByteBuffer payload, UUID clientId) { super(cacheId, key); this.payload = payload; + this.clientId = clientId; } @Override @@ -171,9 +172,10 @@ public static class AppendMessage extends KeyBasedServerStoreOpMessage { private final ByteBuffer payload; - AppendMessage(String cacheId, long key, ByteBuffer payload) { + AppendMessage(String cacheId, long key, ByteBuffer payload, UUID clientId) { super(cacheId, key); this.payload = payload; + this.clientId = clientId; } @Override @@ -192,10 +194,11 @@ public static class ReplaceAtHeadMessage extends KeyBasedServerStoreOpMessage { private final Chain expect; private final Chain update; - ReplaceAtHeadMessage(String cacheId, long key, Chain expect, Chain update) { + ReplaceAtHeadMessage(String cacheId, long key, Chain expect, Chain update, UUID clientId) { super(cacheId, key); this.expect = expect; this.update = update; + this.clientId = clientId; } @Override @@ -233,8 +236,9 @@ public ServerStoreOp operation() { static class ClearMessage extends ServerStoreOpMessage { - ClearMessage(final String cacheId) { + ClearMessage(String cacheId, UUID clientId) { super(cacheId); + this.clientId = clientId; } @Override diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryMessageFactory.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryMessageFactory.java index c1ae209398..c03d77cedb 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryMessageFactory.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryMessageFactory.java @@ -16,26 +16,30 @@ package org.ehcache.clustered.common.internal.messages; +import java.util.UUID; + public class StateRepositoryMessageFactory { private final String cacheId; private final String mapId; + private final UUID clientId; - public StateRepositoryMessageFactory(String cacheId, String mapId) { + public StateRepositoryMessageFactory(String cacheId, String mapId, UUID clientId) { this.cacheId = cacheId; this.mapId = mapId; + this.clientId = clientId; } public StateRepositoryOpMessage getMessage(Object key) { - return new StateRepositoryOpMessage.GetMessage(cacheId, mapId, key); + return new StateRepositoryOpMessage.GetMessage(cacheId, mapId, key, clientId); } public StateRepositoryOpMessage putIfAbsentMessage(Object key, Object value) { - return new StateRepositoryOpMessage.PutIfAbsentMessage(cacheId, mapId, key, value); + return new StateRepositoryOpMessage.PutIfAbsentMessage(cacheId, mapId, key, value, clientId); } public StateRepositoryOpMessage entrySetMessage() { - return new StateRepositoryOpMessage.EntrySetMessage(cacheId, mapId); + return new StateRepositoryOpMessage.EntrySetMessage(cacheId, mapId, clientId); } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java index f8632b33e4..e2ad6ee732 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java @@ -30,18 +30,19 @@ public enum StateRepositoryOp { private final String cacheId; private final String mapId; - protected UUID clientId = null; //TODO: #1211 + private UUID clientId; protected long id = NOT_REPLICATED; - private StateRepositoryOpMessage(String cacheId, String mapId) { + private StateRepositoryOpMessage(String cacheId, String mapId, UUID clientId) { this.cacheId = cacheId; this.mapId = mapId; + this.clientId = clientId; } @Override public UUID getClientId() { if (clientId == null) { - throw new AssertionError("Client Id cannot be null for lifecycle messages"); + throw new AssertionError("Client Id cannot be null for StateRepository messages"); } return this.clientId; } @@ -85,8 +86,8 @@ private static abstract class KeyBasedMessage extends StateRepositoryOpMessage { private final Object key; - private KeyBasedMessage(final String cacheId, final String mapId, final Object key) { - super(cacheId, mapId); + private KeyBasedMessage(final String cacheId, final String mapId, final Object key, final UUID clientId) { + super(cacheId, mapId, clientId); this.key = key; } @@ -98,8 +99,8 @@ public Object getKey() { public static class GetMessage extends KeyBasedMessage { - public GetMessage(final String cacheId, final String mapId, final Object key) { - super(cacheId, mapId, key); + public GetMessage(final String cacheId, final String mapId, final Object key, final UUID clientId) { + super(cacheId, mapId, key, clientId); } @Override @@ -112,8 +113,8 @@ public static class PutIfAbsentMessage extends KeyBasedMessage { private final Object value; - public PutIfAbsentMessage(final String cacheId, final String mapId, final Object key, final Object value) { - super(cacheId, mapId, key); + public PutIfAbsentMessage(final String cacheId, final String mapId, final Object key, final Object value, final UUID clientId) { + super(cacheId, mapId, key, clientId); this.value = value; } @@ -129,8 +130,8 @@ public StateRepositoryOp operation() { public static class EntrySetMessage extends StateRepositoryOpMessage { - public EntrySetMessage(final String cacheId, final String mapId) { - super(cacheId, mapId); + public EntrySetMessage(final String cacheId, final String mapId, final UUID clientId) { + super(cacheId, mapId, clientId); } @Override diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java index 2d56876573..24736ee310 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java @@ -20,18 +20,16 @@ import java.util.UUID; -import static org.junit.Assert.*; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.only; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; public class EhcacheCodecTest { - private static final UUID clientId = UUID.randomUUID(); + private static final UUID CLIENT_ID = UUID.randomUUID(); @Test public void encodeMessage() throws Exception { @@ -40,19 +38,19 @@ public void encodeMessage() throws Exception { StateRepositoryOpCodec stateRepositoryOpCodec = mock(StateRepositoryOpCodec.class); EhcacheCodec codec = new EhcacheCodec(serverStoreOpCodec, lifeCycleMessageCodec, stateRepositoryOpCodec, null); - LifecycleMessage.DestroyServerStore lifecycleMessage = new LifecycleMessage.DestroyServerStore("foo", clientId); + LifecycleMessage.DestroyServerStore lifecycleMessage = new LifecycleMessage.DestroyServerStore("foo", CLIENT_ID); codec.encodeMessage(lifecycleMessage); verify(lifeCycleMessageCodec, only()).encode(any(LifecycleMessage.class)); verify(serverStoreOpCodec, never()).encode(any(ServerStoreOpMessage.class)); verify(stateRepositoryOpCodec, never()).encode(any(StateRepositoryOpMessage.class)); - ServerStoreOpMessage.ClearMessage serverStoreOpMessage = new ServerStoreOpMessage.ClearMessage("foo"); + ServerStoreOpMessage.ClearMessage serverStoreOpMessage = new ServerStoreOpMessage.ClearMessage("foo", CLIENT_ID); codec.encodeMessage(serverStoreOpMessage); verify(lifeCycleMessageCodec, only()).encode(any(LifecycleMessage.class)); verify(serverStoreOpCodec, only()).encode(any(ServerStoreOpMessage.class)); verify(stateRepositoryOpCodec, never()).encode(any(StateRepositoryOpMessage.class)); - StateRepositoryOpMessage.EntrySetMessage stateRepositoryOpMessage = new StateRepositoryOpMessage.EntrySetMessage("foo", "bar"); + StateRepositoryOpMessage.EntrySetMessage stateRepositoryOpMessage = new StateRepositoryOpMessage.EntrySetMessage("foo", "bar", CLIENT_ID); codec.encodeMessage(stateRepositoryOpMessage); verify(lifeCycleMessageCodec, only()).encode(any(LifecycleMessage.class)); verify(serverStoreOpCodec, only()).encode(any(ServerStoreOpMessage.class)); diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreMessageFactoryTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreMessageFactoryTest.java index bfa0b140a3..3fab7dd997 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreMessageFactoryTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreMessageFactoryTest.java @@ -18,6 +18,8 @@ import org.junit.Test; +import java.util.UUID; + import static org.junit.Assert.assertThat; import static org.hamcrest.Matchers.is; import static org.ehcache.clustered.common.internal.store.Util.createPayload; @@ -26,7 +28,7 @@ public class ServerStoreMessageFactoryTest { - private static final ServerStoreMessageFactory MESSAGE_FACTORY = new ServerStoreMessageFactory("test"); + private static final ServerStoreMessageFactory MESSAGE_FACTORY = new ServerStoreMessageFactory("test", UUID.randomUUID()); @Test public void testAppendMessage() { diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodecTest.java index 899dcecb04..12e58585c9 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodecTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodecTest.java @@ -19,15 +19,19 @@ import org.junit.Test; +import java.util.UUID; + import static org.ehcache.clustered.common.internal.store.Util.createPayload; import static org.ehcache.clustered.common.internal.store.Util.getChain; import static org.ehcache.clustered.common.internal.store.Util.readPayLoad; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; public class ServerStoreOpCodecTest { - private static final ServerStoreMessageFactory MESSAGE_FACTORY = new ServerStoreMessageFactory("test"); + private static final ServerStoreMessageFactory MESSAGE_FACTORY = new ServerStoreMessageFactory("test", UUID.randomUUID()); private static final ServerStoreOpCodec STORE_OP_CODEC = new ServerStoreOpCodec(); @Test @@ -42,6 +46,7 @@ public void testAppendMessageCodec() { assertThat(decodedAppendMessage.getKey(), is(1L)); assertThat(readPayLoad(decodedAppendMessage.getPayload()), is(1L)); assertThat(decodedAppendMessage.getId(), is(-1L)); + assertEquals(appendMessage.getClientId(), decodedAppendMessage.getClientId()); } @Test @@ -53,6 +58,7 @@ public void testGetMessageCodec() { assertThat(decodedGetMessage.getCacheId(), is("test")); assertThat(decodedGetMessage.getKey(), is(2L)); + } @Test @@ -66,6 +72,7 @@ public void testGetAndAppendMessageCodec() { assertThat(decodedGetAndAppendMessage.getKey(), is(10L)); assertThat(readPayLoad(decodedGetAndAppendMessage.getPayload()), is(10L)); assertThat(decodedGetAndAppendMessage.getId(), is(-1L)); + assertEquals(getAndAppendMessage.getClientId(), decodedGetAndAppendMessage.getClientId()); } @Test @@ -82,6 +89,7 @@ public void testReplaceAtHeadMessageCodec() { assertThat(decodedReplaceAtHeadMessage.getId(), is(-1L)); Util.assertChainHas(decodedReplaceAtHeadMessage.getExpect(), 10L, 100L, 1000L); Util.assertChainHas(decodedReplaceAtHeadMessage.getUpdate(), 2000L); + assertEquals(replaceAtHeadMessage.getClientId(), decodedReplaceAtHeadMessage.getClientId()); } @Test @@ -91,6 +99,7 @@ public void testClearMessageCodec() throws Exception { EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(encodedBytes); assertThat(((ServerStoreOpMessage)decodedMsg).getCacheId(), is("test")); assertThat(decodedMsg.getId(), is(-1L)); + assertEquals(clearMessage.getClientId(), decodedMsg.getClientId()); } @Test diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java index ab0782169e..7890e99b34 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java @@ -20,6 +20,7 @@ import org.junit.Test; import java.util.Collections; +import java.util.UUID; import static org.ehcache.clustered.common.internal.store.Util.createPayload; import static org.ehcache.clustered.common.internal.store.Util.getChain; @@ -27,14 +28,13 @@ import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNot.not; -/** - * @author Ludovic Orban - */ public class ServerStoreOpMessageTest { + private static final UUID CLIENT_ID = UUID.randomUUID(); + @Test public void testConcurrencyKeysEqualForSameCache() throws Exception { - ServerStoreOpMessage m1 = new ServerStoreOpMessage.ClearMessage("cache1"); + ServerStoreOpMessage m1 = new ServerStoreOpMessage.ClearMessage("cache1", CLIENT_ID); ServerStoreOpMessage m2 = new ServerStoreOpMessage.ClientInvalidationAck("cache1", 1); assertThat(m1.concurrencyKey(), is(m2.concurrencyKey())); @@ -42,10 +42,10 @@ public void testConcurrencyKeysEqualForSameCache() throws Exception { @Test public void testConcurrencyKeysEqualForSameCacheAndKey() throws Exception { - ServerStoreOpMessage m1 = new ServerStoreOpMessage.AppendMessage("cache1", 1L, createPayload(1L)); - ServerStoreOpMessage m2 = new ServerStoreOpMessage.GetAndAppendMessage("cache1", 1L, createPayload(1L)); + ServerStoreOpMessage m1 = new ServerStoreOpMessage.AppendMessage("cache1", 1L, createPayload(1L), CLIENT_ID); + ServerStoreOpMessage m2 = new ServerStoreOpMessage.GetAndAppendMessage("cache1", 1L, createPayload(1L), CLIENT_ID); ServerStoreOpMessage m3 = new ServerStoreOpMessage.GetMessage("cache1", 1L); - ServerStoreOpMessage m4 = new ServerStoreOpMessage.ReplaceAtHeadMessage("cache1", 1L, getChain(Collections.emptyList()), getChain(Collections.emptyList())); + ServerStoreOpMessage m4 = new ServerStoreOpMessage.ReplaceAtHeadMessage("cache1", 1L, getChain(Collections.emptyList()), getChain(Collections.emptyList()), CLIENT_ID); assertThat(m1.concurrencyKey(), is(m2.concurrencyKey())); assertThat(m2.concurrencyKey(), is(m3.concurrencyKey())); @@ -54,17 +54,17 @@ public void testConcurrencyKeysEqualForSameCacheAndKey() throws Exception { @Test public void testConcurrencyKeysNotEqualForDifferentCaches() throws Exception { - ServerStoreOpMessage m1 = new ServerStoreOpMessage.AppendMessage("cache1", 1L, createPayload(1L)); - ServerStoreOpMessage m2 = new ServerStoreOpMessage.GetAndAppendMessage("cache2", 1L, createPayload(1L)); + ServerStoreOpMessage m1 = new ServerStoreOpMessage.AppendMessage("cache1", 1L, createPayload(1L), CLIENT_ID); + ServerStoreOpMessage m2 = new ServerStoreOpMessage.GetAndAppendMessage("cache2", 1L, createPayload(1L), CLIENT_ID); assertThat(m1.concurrencyKey(), not(m2.concurrencyKey())); } @Test public void testConcurrencyKeysNotEqualForDifferentCachesAndKeys() throws Exception { - ServerStoreOpMessage m1 = new ServerStoreOpMessage.AppendMessage("cache1", 1L, createPayload(1L)); - ServerStoreOpMessage m2 = new ServerStoreOpMessage.GetAndAppendMessage("cache2", 1L, createPayload(1L)); - ServerStoreOpMessage m3 = new ServerStoreOpMessage.AppendMessage("cache1", 2L, createPayload(1L)); + ServerStoreOpMessage m1 = new ServerStoreOpMessage.AppendMessage("cache1", 1L, createPayload(1L), CLIENT_ID); + ServerStoreOpMessage m2 = new ServerStoreOpMessage.GetAndAppendMessage("cache2", 1L, createPayload(1L), CLIENT_ID); + ServerStoreOpMessage m3 = new ServerStoreOpMessage.AppendMessage("cache1", 2L, createPayload(1L), CLIENT_ID); assertThat(m1.concurrencyKey(), not(m2.concurrencyKey())); assertThat(m1.concurrencyKey(), not(m3.concurrencyKey())); diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index a4beab7f45..0b8da0e668 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -295,7 +295,7 @@ public void testNoAttachementFailsToInvokeServerStoreOperation() throws Exceptio activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); - ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testNoAttachementFailsToInvokeServerStoreOperation"); + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testNoAttachementFailsToInvokeServerStoreOperation", CLIENT_ID); assertFailure( activeEntity.invoke(client, messageFactory.appendOperation(1L, createPayload(1L))), @@ -340,7 +340,7 @@ public void testAppendInvalidationAcksTakenIntoAccount() throws Exception { MESSAGE_FACTORY.createServerStore("testDisconnection", serverStoreConfiguration)) ); - ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection"); + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection", CLIENT_ID); // attach the clients assertSuccess( @@ -439,7 +439,7 @@ public void testClearInvalidationAcksTakenIntoAccount() throws Exception { MESSAGE_FACTORY.createServerStore("testDisconnection", serverStoreConfiguration)) ); - ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection"); + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection", CLIENT_ID); // attach the clients assertSuccess( @@ -538,7 +538,7 @@ public void testAppendInvalidationDisconnectionOfInvalidatingClientsTakenIntoAcc MESSAGE_FACTORY.createServerStore("testDisconnection", serverStoreConfiguration)) ); - ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection"); + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection", CLIENT_ID); // attach the clients assertSuccess( @@ -633,7 +633,7 @@ public void testClearInvalidationDisconnectionOfInvalidatingClientsTakenIntoAcco MESSAGE_FACTORY.createServerStore("testDisconnection", serverStoreConfiguration)) ); - ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection"); + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection", CLIENT_ID); // attach the clients assertSuccess( @@ -728,7 +728,7 @@ public void testAppendInvalidationDisconnectionOfBlockingClientTakenIntoAccount( MESSAGE_FACTORY.createServerStore("testDisconnection", serverStoreConfiguration)) ); - ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection"); + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection", CLIENT_ID); // attach the clients assertSuccess( @@ -813,7 +813,7 @@ public void testClearInvalidationDisconnectionOfBlockingClientTakenIntoAccount() MESSAGE_FACTORY.createServerStore("testDisconnection", serverStoreConfiguration)) ); - ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection"); + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testDisconnection", CLIENT_ID); // attach the clients assertSuccess( @@ -897,7 +897,7 @@ public void testAttachedClientButNotStoreFailsInvokingServerStoreOperation() thr activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); - ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testAttachedClientButNotStoreFailsInvokingServerStoreOperation"); + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testAttachedClientButNotStoreFailsInvokingServerStoreOperation", CLIENT_ID); // attach the client assertSuccess( @@ -946,7 +946,7 @@ public void testWithAttachmentSucceedsInvokingServerStoreOperation() throws Exce activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); - ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testWithAttachmentSucceedsInvokingServerStoreOperation"); + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testWithAttachmentSucceedsInvokingServerStoreOperation", CLIENT_ID); // attach the client assertSuccess( diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/repo/ServerStateRepositoryTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/repo/ServerStateRepositoryTest.java index 82d5123166..98eaea0082 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/repo/ServerStateRepositoryTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/repo/ServerStateRepositoryTest.java @@ -23,6 +23,7 @@ import java.util.AbstractMap; import java.util.Map; import java.util.Set; +import java.util.UUID; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.is; @@ -31,14 +32,16 @@ public class ServerStateRepositoryTest { + private static final UUID CLIENT_ID = UUID.randomUUID(); + @Test public void testInvokeOnNonExistentRepositorySucceeds() throws Exception { ServerStateRepository repository = new ServerStateRepository(); EhcacheEntityResponse.MapValue response = (EhcacheEntityResponse.MapValue) repository.invoke( - new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value1")); + new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value1", CLIENT_ID)); assertThat(response.getValue(), nullValue()); response = (EhcacheEntityResponse.MapValue) repository.invoke( - new StateRepositoryOpMessage.GetMessage("foo", "bar", "key1")); + new StateRepositoryOpMessage.GetMessage("foo", "bar", "key1", CLIENT_ID)); assertThat(response.getValue(), is((Object)"value1")); } @@ -46,33 +49,33 @@ public void testInvokeOnNonExistentRepositorySucceeds() throws Exception { public void testInvokePutIfAbsent() throws Exception { ServerStateRepository repository = new ServerStateRepository(); EhcacheEntityResponse.MapValue response = (EhcacheEntityResponse.MapValue) repository.invoke( - new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value1")); + new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value1", CLIENT_ID)); assertThat(response.getValue(), nullValue()); response = (EhcacheEntityResponse.MapValue) repository.invoke( - new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value2")); + new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value2", CLIENT_ID)); assertThat(response.getValue(), is((Object)"value1")); } @Test public void testInvokeGet() throws Exception { ServerStateRepository repository = new ServerStateRepository(); - repository.invoke(new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value1")); + repository.invoke(new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value1", CLIENT_ID)); EhcacheEntityResponse.MapValue response = (EhcacheEntityResponse.MapValue) repository.invoke( - new StateRepositoryOpMessage.GetMessage("foo", "bar", "key1")); + new StateRepositoryOpMessage.GetMessage("foo", "bar", "key1", CLIENT_ID)); assertThat(response.getValue(), is((Object)"value1")); } @Test public void testInvokeEntrySet() throws Exception { ServerStateRepository repository = new ServerStateRepository(); - repository.invoke(new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value1")); - repository.invoke(new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key2", "value2")); - repository.invoke(new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key3", "value3")); + repository.invoke(new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value1", CLIENT_ID)); + repository.invoke(new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key2", "value2", CLIENT_ID)); + repository.invoke(new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key3", "value3", CLIENT_ID)); EhcacheEntityResponse.MapValue response = (EhcacheEntityResponse.MapValue) repository.invoke( - new StateRepositoryOpMessage.EntrySetMessage("foo", "bar")); + new StateRepositoryOpMessage.EntrySetMessage("foo", "bar", CLIENT_ID)); Set> entrySet = (Set>) response.getValue(); assertThat(entrySet.size(), is(3)); Map.Entry entry1 = new AbstractMap.SimpleEntry("key1", "value1"); diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/repo/StateRepositoryManagerTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/repo/StateRepositoryManagerTest.java index 7e551be742..cf76233156 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/repo/StateRepositoryManagerTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/repo/StateRepositoryManagerTest.java @@ -20,20 +20,24 @@ import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; import org.junit.Test; +import java.util.UUID; + import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.assertThat; public class StateRepositoryManagerTest { + private static final UUID CLIENT_ID = UUID.randomUUID(); + @Test public void testInvokeOnNonExistentRepositorySucceeds() throws Exception { StateRepositoryManager manager = new StateRepositoryManager(); EhcacheEntityResponse.MapValue response = (EhcacheEntityResponse.MapValue) manager.invoke( - new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value1")); + new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value1", CLIENT_ID)); assertThat(response.getValue(), nullValue()); response = (EhcacheEntityResponse.MapValue) manager.invoke( - new StateRepositoryOpMessage.GetMessage("foo", "bar", "key1")); + new StateRepositoryOpMessage.GetMessage("foo", "bar", "key1", CLIENT_ID)); assertThat(response.getValue(), is((Object)"value1")); } } \ No newline at end of file From d55482d5c4d2e932012c026db84c16f10578b720 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Wed, 14 Sep 2016 01:34:22 +0530 Subject: [PATCH 034/218] Moving client id to EhcacheClientEntity #1208 --- clustered/client/build.gradle | 4 - .../client/internal/EhcacheClientEntity.java | 35 ++++----- .../internal/EhcacheClientEntityFactory.java | 15 ++-- .../service/DefaultClusteringService.java | 27 ++----- .../EhcacheClientEntityFactoryTest.java | 14 ++-- .../internal/store/ClusteredStoreTest.java | 8 +- .../store/EventualServerStoreProxyTest.java | 14 ++-- .../NoInvalidationServerStoreProxyTest.java | 8 +- .../store/StrongServerStoreProxyTest.java | 14 ++-- .../ObservableEhcacheServerEntityService.java | 2 - ...cheClientEntityFactoryIntegrationTest.java | 24 +++--- clustered/server/build.gradle | 7 +- .../clustered/server/EhcacheActiveEntity.java | 14 ++-- .../server/state/ClientMessageTracker.java | 2 +- .../server/state/MessageTracker.java | 73 +++++++++++-------- .../server/state/MessageTrackerTest.java | 2 +- 16 files changed, 115 insertions(+), 148 deletions(-) diff --git a/clustered/client/build.gradle b/clustered/client/build.gradle index cd6f7b6d19..43af57bf61 100644 --- a/clustered/client/build.gradle +++ b/clustered/client/build.gradle @@ -32,10 +32,6 @@ dependencies { testCompile "org.terracotta:passthrough-server:$parent.terracottaPassthroughTestingVersion" } -def java8 = { - JavaVersion.current().isJava8Compatible() -} - compileTestJava { options.fork = true; options.forkOptions.executable = MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'javac') diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java index 26646f5ef8..852936ce93 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java @@ -27,6 +27,7 @@ import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.exceptions.ClusterException; +import org.ehcache.clustered.common.internal.exceptions.InvalidClientIdException; import org.ehcache.clustered.common.internal.exceptions.ResourceBusyException; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; @@ -69,8 +70,6 @@ public class EhcacheClientEntity implements Entity { private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheClientEntity.class); - private ReconnectData reconnectData = new ReconnectData(); - public interface ResponseListener { void onResponse(T response); } @@ -87,6 +86,7 @@ public interface DisconnectionListener { private final List disconnectionListeners = new CopyOnWriteArrayList(); private final ReconnectDataCodec reconnectDataCodec = new ReconnectDataCodec(); private volatile boolean connected = true; + private final ReconnectData reconnectData = new ReconnectData(); private volatile UUID clientId; private Timeouts timeouts = Timeouts.builder().build(); @@ -140,12 +140,6 @@ private void fireResponseEvent(EhcacheEntityResponse response) { } } - public void setClientId(UUID clientId) { - this.clientId = clientId; - this.messageFactory.setClientId(clientId); - this.reconnectData.setClientId(clientId); - } - public UUID getClientId() { if (clientId == null) { throw new IllegalStateException("Client Id cannot be null"); @@ -181,7 +175,17 @@ public void close() { public void validate(ServerSideConfiguration config) throws ClusteredTierManagerValidationException, TimeoutException { try { - invokeInternal(timeouts.getLifecycleOperationTimeout(), messageFactory.validateStoreManager(config), false); + while (true) { + try { + clientId = UUID.randomUUID(); + this.messageFactory.setClientId(clientId); + this.reconnectData.setClientId(clientId); + invokeInternal(timeouts.getLifecycleOperationTimeout(), messageFactory.validateStoreManager(config), false); + break; + } catch (InvalidClientIdException e) { + //nothing to do - loop again since the earlier generated UUID is being already tracked by the server + } + } } catch (ClusterException e) { throw new ClusteredTierManagerValidationException("Error validating server clustered tier manager", e); } @@ -189,6 +193,9 @@ public void validate(ServerSideConfiguration config) throws ClusteredTierManager public void configure(ServerSideConfiguration config) throws ClusteredTierManagerConfigurationException, TimeoutException { try { + clientId = UUID.randomUUID(); + this.messageFactory.setClientId(clientId); + this.reconnectData.setClientId(clientId); invokeInternal(timeouts.getLifecycleOperationTimeout(), messageFactory.configureStoreManager(config), true); } catch (ClusterException e) { throw new ClusteredTierManagerConfigurationException("Error configuring clustered tier manager", e); @@ -287,17 +294,11 @@ private EhcacheEntityResponse invokeInternal(TimeoutDuration timeLimit, EhcacheE public InvokeFuture invokeAsync(EhcacheEntityMessage message, boolean replicate) throws MessageCodecException { InvokeFuture invoke; - if (clientId == null) { - throw new IllegalStateException("Client ID cannot be null"); - } + getClientId(); if (replicate) { message.setId(sequenceGenerator.getAndIncrement()); - //TODO: remove the replicate call with latest passthrough upgrade - invoke = endpoint.beginInvoke().message(message).replicate(true).invoke(); - } else { - invoke = endpoint.beginInvoke().message(message).replicate(false).invoke(); } - return invoke; + return endpoint.beginInvoke().message(message).replicate(replicate).invoke(); } private static T waitFor(TimeoutDuration timeLimit, InvokeFuture future) diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java index 3c89dc5957..3391af2f1f 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java @@ -16,7 +16,6 @@ package org.ehcache.clustered.client.internal; -import org.ehcache.clustered.client.internal.EhcacheClientEntity.Timeouts; import org.ehcache.clustered.client.internal.service.ClusteredTierManagerConfigurationException; import org.ehcache.clustered.client.internal.service.ClusteredTierManagerValidationException; import org.ehcache.clustered.client.service.EntityBusyException; @@ -46,13 +45,13 @@ public class EhcacheClientEntityFactory { private final Connection connection; private final Map maintenanceHolds = new ConcurrentHashMap(); - private final Timeouts entityTimeouts; + private final EhcacheClientEntity.Timeouts entityTimeouts; public EhcacheClientEntityFactory(Connection connection) { - this(connection, Timeouts.builder().build()); + this(connection, EhcacheClientEntity.Timeouts.builder().build()); } - public EhcacheClientEntityFactory(Connection connection, Timeouts entityTimeouts) { + public EhcacheClientEntityFactory(Connection connection, EhcacheClientEntity.Timeouts entityTimeouts) { this.connection = connection; this.entityTimeouts = entityTimeouts; } @@ -83,7 +82,6 @@ public void abandonLeadership(String entityIdentifier) { * * @param identifier the instance identifier for the {@code EhcacheActiveEntity} * @param config the {@code EhcacheActiveEntity} configuration to use for creation - * @param clientId UUID generated for the client * * @throws EntityAlreadyExistsException if the {@code EhcacheActiveEntity} for {@code identifier} already exists * @throws EhcacheEntityCreationException if an error preventing {@code EhcacheActiveEntity} creation was raised @@ -92,7 +90,7 @@ public void abandonLeadership(String entityIdentifier) { * @throws TimeoutException if the creation and configuration of the {@code EhcacheActiveEntity} exceed the * lifecycle operation timeout */ - public void create(final String identifier, final ServerSideConfiguration config, final UUID clientId) + public void create(final String identifier, final ServerSideConfiguration config) throws EntityAlreadyExistsException, EhcacheEntityCreationException, EntityBusyException, TimeoutException { Hold existingMaintenance = maintenanceHolds.get(identifier); Hold localMaintenance = null; @@ -112,7 +110,6 @@ public void create(final String identifier, final ServerSideConfiguration config EhcacheClientEntity entity = ref.fetchEntity(); try { entity.setTimeouts(entityTimeouts); - entity.setClientId(clientId); entity.configure(config); return; } finally { @@ -149,7 +146,6 @@ public void create(final String identifier, final ServerSideConfiguration config * * @param identifier the instance identifier for the {@code EhcacheActiveEntity} * @param config the {@code EhcacheActiveEntity} configuration to use for access checking - * @param clientId UUID generated for the client * * @return an {@code EhcacheClientEntity} providing access to the {@code EhcacheActiveEntity} identified by * {@code identifier} @@ -159,7 +155,7 @@ public void create(final String identifier, final ServerSideConfiguration config * @throws TimeoutException if the creation and configuration of the {@code EhcacheActiveEntity} exceed the * lifecycle operation timeout */ - public EhcacheClientEntity retrieve(String identifier, ServerSideConfiguration config, UUID clientId) + public EhcacheClientEntity retrieve(String identifier, ServerSideConfiguration config) throws EntityNotFoundException, EhcacheEntityValidationException, TimeoutException { try { Hold fetchHold = createAccessLockFor(identifier).readLock(); @@ -172,7 +168,6 @@ public EhcacheClientEntity retrieve(String identifier, ServerSideConfiguration c boolean validated = false; try { entity.setTimeouts(entityTimeouts); - entity.setClientId(clientId); entity.validate(config); validated = true; return entity; diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java index 4b3449d673..439ac809b7 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java @@ -36,7 +36,6 @@ import org.ehcache.clustered.client.service.EntityService; import org.ehcache.clustered.common.Consistency; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; -import org.ehcache.clustered.common.internal.exceptions.InvalidClientIdException; import org.ehcache.clustered.common.internal.exceptions.InvalidStoreException; import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; import org.ehcache.config.CacheConfiguration; @@ -63,7 +62,6 @@ import java.net.URISyntaxException; import java.util.Arrays; import java.util.Properties; -import java.util.UUID; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeoutException; @@ -82,7 +80,6 @@ class DefaultClusteringService implements ClusteringService, EntityService { private final String entityIdentifier; private final ConcurrentMap knownPersistenceSpaces = new ConcurrentHashMap(); private final EhcacheClientEntity.Timeouts operationTimeouts; - private UUID clientId = UUID.randomUUID(); private volatile Connection clusterConnection; private EhcacheClientEntityFactory entityFactory; @@ -150,7 +147,7 @@ public void start(final ServiceProvider serviceProvider) { entity = autoCreateEntity(); } else { try { - entity = entityFactory.retrieve(entityIdentifier, configuration.getServerConfiguration(), clientId); + entity = entityFactory.retrieve(entityIdentifier, configuration.getServerConfiguration()); } catch (EntityNotFoundException e) { throw new IllegalStateException("The clustered tier manager '" + entityIdentifier + "' does not exist." + " Please review your configuration.", e); @@ -185,7 +182,7 @@ private void initClusterConnection() { private EhcacheClientEntity autoCreateEntity() throws EhcacheEntityValidationException, IllegalStateException { while (true) { try { - entityFactory.create(entityIdentifier, configuration.getServerConfiguration(), clientId); + entityFactory.create(entityIdentifier, configuration.getServerConfiguration()); } catch (EhcacheEntityCreationException e) { throw new IllegalStateException("Could not create the clustered tier manager '" + entityIdentifier + "'.", e); } catch (EntityAlreadyExistsException e) { @@ -197,7 +194,7 @@ private EhcacheClientEntity autoCreateEntity() throws EhcacheEntityValidationExc + "'; create operation timed out", e); } try { - return retrieveUntilClientIdIsUnique(); + return entityFactory.retrieve(entityIdentifier, configuration.getServerConfiguration()); } catch (EntityNotFoundException e) { //ignore - loop and try to create } catch (TimeoutException e) { @@ -207,20 +204,6 @@ private EhcacheClientEntity autoCreateEntity() throws EhcacheEntityValidationExc } } - private EhcacheClientEntity retrieveUntilClientIdIsUnique() throws TimeoutException, EntityNotFoundException { - while(true) { - try { - return entityFactory.retrieve(entityIdentifier, configuration.getServerConfiguration(), clientId); - } catch (EhcacheEntityValidationException e) { - if (!(e.getCause().getCause() instanceof InvalidClientIdException)) { - throw e; - } else { - this.clientId = UUID.randomUUID(); - } - } - } - } - @Override public void startForMaintenance(ServiceProvider serviceProvider) { initClusterConnection(); @@ -327,7 +310,7 @@ public void destroy(String name) throws CachePersistenceException { initClusterConnection(); createEntityFactory(); try { - entity = entityFactory.retrieve(entityIdentifier, configuration.getServerConfiguration(), clientId); + entity = entityFactory.retrieve(entityIdentifier, configuration.getServerConfiguration()); } catch (EntityNotFoundException e) { // No entity on the server, so no need to destroy anything } catch (TimeoutException e) { @@ -419,7 +402,7 @@ public ServerStoreProxy getServerStoreProxy(final ClusteredCacheIdentifie + "'; validate operation timed out", e); } - ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory(cacheId, clientId); + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory(cacheId, entity.getClientId()); switch (configuredConsistency) { case STRONG: return new StrongServerStoreProxy(messageFactory, entity); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactoryTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactoryTest.java index 98406eb30b..765816809d 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactoryTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactoryTest.java @@ -44,8 +44,6 @@ public class EhcacheClientEntityFactoryTest { - private static final UUID CLIENT_ID = UUID.randomUUID(); - @Test public void testCreate() throws Exception { EhcacheClientEntity entity = mock(EhcacheClientEntity.class); @@ -57,7 +55,7 @@ public void testCreate() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection); - factory.create("test", null, CLIENT_ID); + factory.create("test", null); verify(entityRef).create(any(UUID.class)); verify(entity).configure(any(ServerSideConfiguration.class)); verify(entity).close(); @@ -76,7 +74,7 @@ public void testCreateBadConfig() throws Exception { EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection); try { - factory.create("test", null, CLIENT_ID); + factory.create("test", null); fail("Expecting EhcacheEntityCreationException"); } catch (EhcacheEntityCreationException e) { // expected @@ -98,7 +96,7 @@ public void testCreateWhenExisting() throws Exception { EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection); try { - factory.create("test", null, CLIENT_ID); + factory.create("test", null); fail("Expected EntityAlreadyExistsException"); } catch (EntityAlreadyExistsException e) { //expected @@ -116,7 +114,7 @@ public void testRetrieve() throws Exception { addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection); - assertThat(factory.retrieve("test", null, CLIENT_ID), is(entity)); + assertThat(factory.retrieve("test", null), is(entity)); verify(entity).validate(any(ServerSideConfiguration.class)); verify(entity, never()).close(); } @@ -134,7 +132,7 @@ public void testRetrieveFailedValidate() throws Exception { EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection); try { - factory.retrieve("test", null, CLIENT_ID); + factory.retrieve("test", null); fail("Expecting IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected @@ -155,7 +153,7 @@ public void testRetrieveWhenNotExisting() throws Exception { EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(connection); try { - factory.retrieve("test", null, CLIENT_ID); + factory.retrieve("test", null); fail("Expected EntityNotFoundException"); } catch (EntityNotFoundException e) { //expected diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java index 4e262737f1..1464038cf7 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java @@ -53,7 +53,6 @@ import java.util.HashSet; import java.util.Map; import java.util.Properties; -import java.util.UUID; import java.util.concurrent.TimeoutException; import static org.ehcache.clustered.util.StatisticsTestUtils.validateStat; @@ -74,7 +73,6 @@ public class ClusteredStoreTest { private static final String CACHE_IDENTIFIER = "testCache"; private static final URI CLUSTER_URI = URI.create("terracotta://localhost:9510"); - private static final UUID CLIENT_ID = UUID.randomUUID(); ClusteredStore store; @@ -90,9 +88,9 @@ public void setup() throws Exception { ServerSideConfiguration serverConfig = new ServerSideConfiguration("defaultResource", Collections.emptyMap()); - entityFactory.create("TestCacheManager", serverConfig, CLIENT_ID); + entityFactory.create("TestCacheManager", serverConfig); - EhcacheClientEntity clientEntity = entityFactory.retrieve("TestCacheManager", serverConfig, CLIENT_ID); + EhcacheClientEntity clientEntity = entityFactory.retrieve("TestCacheManager", serverConfig); ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(4, MemoryUnit.MB); ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfiguration(resourcePool.getPoolAllocation(), @@ -102,7 +100,7 @@ public void setup() throws Exception { null ); clientEntity.createCache(CACHE_IDENTIFIER, serverStoreConfiguration); - ServerStoreMessageFactory factory = new ServerStoreMessageFactory(CACHE_IDENTIFIER, CLIENT_ID); + ServerStoreMessageFactory factory = new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientEntity.getClientId()); ServerStoreProxy serverStoreProxy = new NoInvalidationServerStoreProxy(factory, clientEntity); TestTimeSource testTimeSource = new TestTimeSource(); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java index 5a180766c7..52b0cfc674 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java @@ -37,7 +37,6 @@ import java.util.Collections; import java.util.List; import java.util.Properties; -import java.util.UUID; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -75,15 +74,12 @@ public static void setUp() throws Exception { EhcacheClientEntityFactory entityFactory1 = new EhcacheClientEntityFactory(connection1); EhcacheClientEntityFactory entityFactory2 = new EhcacheClientEntityFactory(connection2); - UUID clientId1 = UUID.randomUUID(); - UUID clientId2 = UUID.randomUUID(); - entityFactory1.create("TestCacheManager", - new ServerSideConfiguration("defaultResource", Collections.emptyMap()), clientId1); + new ServerSideConfiguration("defaultResource", Collections.emptyMap())); clientEntity1 = entityFactory1.retrieve("TestCacheManager", - new ServerSideConfiguration("defaultResource", Collections.emptyMap()), clientId1); + new ServerSideConfiguration("defaultResource", Collections.emptyMap())); clientEntity2 = entityFactory2.retrieve("TestCacheManager", - new ServerSideConfiguration("defaultResource", Collections.emptyMap()), clientId2); + new ServerSideConfiguration("defaultResource", Collections.emptyMap())); ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(16L, MemoryUnit.MB); @@ -96,8 +92,8 @@ public static void setUp() throws Exception { clientEntity1.validateCache(CACHE_IDENTIFIER, serverStoreConfiguration); clientEntity2.validateCache(CACHE_IDENTIFIER, serverStoreConfiguration); - serverStoreProxy1 = new EventualServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientId1), clientEntity1); - serverStoreProxy2 = new EventualServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientId2), clientEntity2); + serverStoreProxy1 = new EventualServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientEntity1.getClientId()), clientEntity1); + serverStoreProxy2 = new EventualServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientEntity2.getClientId()), clientEntity2); } @AfterClass diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java index 68fe2203a5..4b58bb1f0e 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java @@ -37,7 +37,6 @@ import java.util.Collections; import java.util.Iterator; import java.util.Properties; -import java.util.UUID; import static org.ehcache.clustered.common.internal.store.Util.createPayload; import static org.ehcache.clustered.common.internal.store.Util.getChain; @@ -49,7 +48,6 @@ public class NoInvalidationServerStoreProxyTest { private static final String CACHE_IDENTIFIER = "testCache"; private static final URI CLUSTER_URI = URI.create("terracotta://localhost:9510"); - private static final UUID CLIENT_ID = UUID.randomUUID(); private static EhcacheClientEntity clientEntity; private static NoInvalidationServerStoreProxy serverStoreProxy; @@ -66,15 +64,15 @@ public static void setUp() throws Exception { ServerSideConfiguration serverConfig = new ServerSideConfiguration("defaultResource", Collections.emptyMap()); - entityFactory.create("TestCacheManager", serverConfig, CLIENT_ID); - clientEntity = entityFactory.retrieve("TestCacheManager", serverConfig, CLIENT_ID); + entityFactory.create("TestCacheManager", serverConfig); + clientEntity = entityFactory.retrieve("TestCacheManager", serverConfig); ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(16L, MemoryUnit.MB); clientEntity.createCache(CACHE_IDENTIFIER, new ServerStoreConfiguration(resourcePool.getPoolAllocation(), Long.class.getName(), Long.class.getName(), Long.class.getName(), Long.class.getName(), LongSerializer.class.getName(), LongSerializer.class .getName(), null)); - serverStoreProxy = new NoInvalidationServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER, CLIENT_ID), clientEntity); + serverStoreProxy = new NoInvalidationServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientEntity.getClientId()), clientEntity); } @AfterClass diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java index e86e82d8c9..1ae2894efc 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxyTest.java @@ -38,7 +38,6 @@ import java.util.Collections; import java.util.List; import java.util.Properties; -import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; @@ -82,15 +81,12 @@ public static void setUp() throws Exception { EhcacheClientEntityFactory entityFactory1 = new EhcacheClientEntityFactory(connection1); EhcacheClientEntityFactory entityFactory2 = new EhcacheClientEntityFactory(connection2); - UUID clientId1 = UUID.randomUUID(); - UUID clientId2 = UUID.randomUUID(); - entityFactory1.create("TestCacheManager", - new ServerSideConfiguration("defaultResource", Collections.emptyMap()), clientId1); + new ServerSideConfiguration("defaultResource", Collections.emptyMap())); clientEntity1 = entityFactory1.retrieve("TestCacheManager", - new ServerSideConfiguration("defaultResource", Collections.emptyMap()), clientId1); + new ServerSideConfiguration("defaultResource", Collections.emptyMap())); clientEntity2 = entityFactory2.retrieve("TestCacheManager", - new ServerSideConfiguration("defaultResource", Collections.emptyMap()), clientId2); + new ServerSideConfiguration("defaultResource", Collections.emptyMap())); ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(4L, MemoryUnit.MB); @@ -103,8 +99,8 @@ public static void setUp() throws Exception { clientEntity1.validateCache(CACHE_IDENTIFIER, serverStoreConfiguration); clientEntity2.validateCache(CACHE_IDENTIFIER, serverStoreConfiguration); - serverStoreProxy1 = new StrongServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientId1), clientEntity1); - serverStoreProxy2 = new StrongServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientId2), clientEntity2); + serverStoreProxy1 = new StrongServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientEntity1.getClientId()), clientEntity1); + serverStoreProxy2 = new StrongServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientEntity2.getClientId()), clientEntity2); } @AfterClass diff --git a/clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java b/clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java index 3694a43a8b..e61b3895df 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java @@ -18,7 +18,6 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; -import org.ehcache.clustered.server.state.ClientMessageTracker; import org.terracotta.entity.ActiveServerEntity; import org.terracotta.entity.ClientDescriptor; import org.terracotta.entity.ConcurrencyStrategy; @@ -137,6 +136,5 @@ public Set getSharedResourcePoolIds() { public Set getDedicatedResourcePoolIds() { return ehcacheStateService.getDedicatedResourcePoolIds(); } - } } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java index d4b663fd7b..b937b7a34b 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.util.Collections; import java.util.Map; -import java.util.UUID; import org.ehcache.clustered.client.internal.EhcacheClientEntityFactory; import org.ehcache.clustered.client.internal.EhcacheEntityCreationException; @@ -45,7 +44,6 @@ public class EhcacheClientEntityFactoryIntegrationTest { private static final Map EMPTY_RESOURCE_MAP = Collections.emptyMap(); - private static final UUID CLIENT_ID = UUID.randomUUID(); private static final String RESOURCE_CONFIG = "" @@ -74,16 +72,16 @@ public static void closeConnection() throws IOException { public void testCreate() throws Exception { EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); - factory.create("testCreate", new ServerSideConfiguration(EMPTY_RESOURCE_MAP), CLIENT_ID); + factory.create("testCreate", new ServerSideConfiguration(EMPTY_RESOURCE_MAP)); } @Test public void testCreateWhenExisting() throws Exception { EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); - factory.create("testCreateWhenExisting", new ServerSideConfiguration(EMPTY_RESOURCE_MAP), CLIENT_ID); + factory.create("testCreateWhenExisting", new ServerSideConfiguration(EMPTY_RESOURCE_MAP)); try { factory.create("testCreateWhenExisting", - new ServerSideConfiguration(Collections.singletonMap("foo", new Pool(42L, "bar"))), CLIENT_ID); + new ServerSideConfiguration(Collections.singletonMap("foo", new Pool(42L, "bar")))); fail("Expected EntityAlreadyExistsException"); } catch (EntityAlreadyExistsException e) { //expected @@ -95,11 +93,11 @@ public void testCreateWithBadConfigCleansUp() throws Exception { EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); try { - factory.create("testCreateWithBadConfigCleansUp", new ServerSideConfiguration("flargle", EMPTY_RESOURCE_MAP), CLIENT_ID); + factory.create("testCreateWithBadConfigCleansUp", new ServerSideConfiguration("flargle", EMPTY_RESOURCE_MAP)); fail("Expected EhcacheEntityCreationException"); } catch (EhcacheEntityCreationException e) { try { - factory.retrieve("testCreateWithBadConfigCleansUp", null, CLIENT_ID); + factory.retrieve("testCreateWithBadConfigCleansUp", null); fail("Expected EntityNotFoundException"); } catch (EntityNotFoundException f) { //expected @@ -111,19 +109,19 @@ public void testCreateWithBadConfigCleansUp() throws Exception { public void testRetrieveWithGoodConfig() throws Exception { EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); factory.create("testRetrieveWithGoodConfig", - new ServerSideConfiguration(Collections.singletonMap("foo", new Pool(43L, "primary"))), CLIENT_ID); + new ServerSideConfiguration(Collections.singletonMap("foo", new Pool(43L, "primary")))); assertThat(factory.retrieve("testRetrieveWithGoodConfig", - new ServerSideConfiguration(Collections.singletonMap("foo", new Pool(43L, "primary"))), CLIENT_ID), notNullValue()); + new ServerSideConfiguration(Collections.singletonMap("foo", new Pool(43L, "primary")))), notNullValue()); } @Test public void testRetrieveWithBadConfig() throws Exception { EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); factory.create("testRetrieveWithBadConfig", - new ServerSideConfiguration(Collections.singletonMap("foo", new Pool(42L, "primary"))), CLIENT_ID); + new ServerSideConfiguration(Collections.singletonMap("foo", new Pool(42L, "primary")))); try { factory.retrieve("testRetrieveWithBadConfig", - new ServerSideConfiguration(Collections.singletonMap("bar", new Pool(42L, "primary"))), CLIENT_ID); + new ServerSideConfiguration(Collections.singletonMap("bar", new Pool(42L, "primary")))); fail("Expected EhcacheEntityValidationException"); } catch (EhcacheEntityValidationException e) { //expected @@ -134,7 +132,7 @@ public void testRetrieveWithBadConfig() throws Exception { public void testRetrieveWhenNotExisting() throws Exception { EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); try { - factory.retrieve("testRetrieveWhenNotExisting", null, CLIENT_ID); + factory.retrieve("testRetrieveWhenNotExisting", null); fail("Expected EntityNotFoundException"); } catch (EntityNotFoundException e) { //expected @@ -144,7 +142,7 @@ public void testRetrieveWhenNotExisting() throws Exception { @Test public void testDestroy() throws Exception { EhcacheClientEntityFactory factory = new EhcacheClientEntityFactory(CONNECTION); - factory.create("testDestroy", new ServerSideConfiguration(Collections.emptyMap()), CLIENT_ID); + factory.create("testDestroy", new ServerSideConfiguration(Collections.emptyMap())); factory.destroy("testDestroy"); } diff --git a/clustered/server/build.gradle b/clustered/server/build.gradle index 430006ccde..af6aac30b9 100644 --- a/clustered/server/build.gradle +++ b/clustered/server/build.gradle @@ -29,11 +29,6 @@ dependencies { provided "org.terracotta:standard-cluster-services:$parent.terracottaApisVersion" } -def java8 = { - JavaVersion.current().isJava8Compatible() -} - - compileJava { options.fork = true; options.forkOptions.executable = MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'javac') @@ -45,7 +40,7 @@ compileTestJava { } javadoc { - options.addStringOption('Xdoclint:none', '-quiet') + executable = MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'javadoc') } sourceCompatibility = 1.8 diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 469abc631a..5b0de6a98c 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -25,7 +25,6 @@ import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import org.ehcache.clustered.common.Consistency; @@ -268,8 +267,7 @@ public void handleReconnect(ClientDescriptor clientDescriptor, byte[] extendedRe } clientState.attach(); ReconnectData reconnectData = reconnectDataCodec.decode(extendedReconnectData); - clientIdMap.put(clientDescriptor, reconnectData.getClientId()); - invalidIds.add(reconnectData.getClientId()); + addClientId(clientDescriptor, reconnectData.getClientId()); Set cacheIds = reconnectData.getAllCaches(); for (final String cacheId : cacheIds) { ServerStoreImpl serverStore = ehcacheStateService.getStore(cacheId); @@ -548,7 +546,7 @@ public void destroy() { */ private void configure(ClientDescriptor clientDescriptor, ConfigureStoreManager message) throws ClusterException { validateClientConnected(clientDescriptor); - if (ehcacheStateService.getClientMessageTracker().shouldConfigure(message.getClientId(), message.getId())) { + if (ehcacheStateService.getClientMessageTracker().isConfigureApplicable(message.getClientId(), message.getId())) { ehcacheStateService.configure(message); } this.clientStateMap.get(clientDescriptor).attach(); @@ -567,12 +565,16 @@ private void validate(ClientDescriptor clientDescriptor, ValidateStoreManager me if (invalidIds.contains(message.getClientId())) { throw new InvalidClientIdException("Client ID : " + message.getClientId() + " is already being tracked by Active"); } - clientIdMap.put(clientDescriptor, message.getClientId()); - invalidIds.add(message.getClientId()); + addClientId(clientDescriptor, message.getClientId()); ehcacheStateService.validate(message); this.clientStateMap.get(clientDescriptor).attach(); } + private void addClientId(ClientDescriptor clientDescriptor, UUID clientId) { + clientIdMap.put(clientDescriptor, clientId); + invalidIds.add(clientId); + } + /** * Handles the {@link CreateServerStore CreateServerStore} message. This message is used by a client to * create a new {@link ServerStore}; if the {@code ServerStore} exists, a failure is returned to the client. diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java index c4b186d160..05c8fb14a3 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java @@ -63,7 +63,7 @@ public void setEntityConfiguredStamp(UUID clientId, long timestamp) { this.configuredTimestamp = timestamp; } - public boolean shouldConfigure(UUID clientId, long timestamp) { + public boolean isConfigureApplicable(UUID clientId, long timestamp) { if (entityConfiguredStamp == null) { return true; } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/MessageTracker.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/MessageTracker.java index 86aaaa845c..3ee0e5c844 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/MessageTracker.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/MessageTracker.java @@ -19,15 +19,17 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.locks.ReentrantLock; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; public class MessageTracker { private final ConcurrentHashMap inProgressMessages = new ConcurrentHashMap<>(); - private long lowerWaterMark = -1L; //Always to be updated under lock below - private final AtomicLong higerWaterMark = new AtomicLong(-1L); - private final ReentrantLock lwmLock = new ReentrantLock(); + private long lowerWaterMark = -1L; + private final AtomicLong higherWaterMark = new AtomicLong(-1L); + private final ReadWriteLock lwmLock = new ReentrantReadWriteLock(); /** * This method is only meant to be called by the Active Entity. @@ -41,19 +43,26 @@ public class MessageTracker { * @return whether the entity should apply the message or not */ boolean shouldApply(long msgId) { - if (msgId < lowerWaterMark) { - return false; + Lock lock = lwmLock.readLock(); + try { + lock.lock(); + if (msgId < lowerWaterMark) { + return false; + } + } finally { + lock.unlock(); } - if (msgId > higerWaterMark.get()) { + if (msgId > higherWaterMark.get()) { return true; } final AtomicBoolean shouldApply = new AtomicBoolean(false); inProgressMessages.computeIfPresent(msgId, (id, state) -> { - if (state != true) { + if (!state) { shouldApply.set(true); } - return null; + return true; }); + updateLowerWaterMark(); return shouldApply.get(); } @@ -74,12 +83,33 @@ void track(long msgId) { */ void applied(long msgId) { inProgressMessages.computeIfPresent(msgId, ((id, state) -> state = true)); - if (lwmLock.tryLock()) { + updateLowerWaterMark(); + } + + boolean isEmpty() { + return inProgressMessages.isEmpty(); + } + + private void updateHigherWaterMark(long msgId) { + while(true) { + long old = higherWaterMark.get(); + if (msgId < old) { + return; + } + if (higherWaterMark.compareAndSet(old, msgId)) { + break; + } + } + } + + private void updateLowerWaterMark() { + Lock lock = lwmLock.writeLock(); + if (lock.tryLock()) { try { - for (long i = lowerWaterMark + 1; i<= higerWaterMark.get(); i++) { + for (long i = lowerWaterMark + 1; i <= higherWaterMark.get(); i++) { final AtomicBoolean removed = new AtomicBoolean(false); inProgressMessages.computeIfPresent(i, (id, state) -> { - if (state == true) { + if (state) { removed.set(true); return null; } @@ -92,24 +122,7 @@ void applied(long msgId) { } } } finally { - lwmLock.unlock(); - } - } - - } - - boolean isEmpty() { - return inProgressMessages.isEmpty(); - } - - private void updateHigherWaterMark(long msgId) { - if (msgId < higerWaterMark.get()) { - return; - } - while(true) { - long old = higerWaterMark.get(); - if (higerWaterMark.compareAndSet(old, msgId)) { - break; + lock.unlock(); } } } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/state/MessageTrackerTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/state/MessageTrackerTest.java index 7046a6d2bb..f225d2ec84 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/state/MessageTrackerTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/state/MessageTrackerTest.java @@ -133,7 +133,7 @@ public void testDuplicateMessagesForTrackedMessages() throws Exception { nonAppliedMsgs.forEach(x -> assertThat(messageTracker.shouldApply(x), is(true))); -//TODO: #1211 assertThat(messageTracker.isEmpty(), is(true)); + assertThat(messageTracker.isEmpty(), is(true)); LongStream.of(input).filter(x -> !nonAppliedMsgs.contains(x)).forEach(x -> assertThat(messageTracker.shouldApply(x), is(false))); From b25cb36150709f8ef74c28523bcf43d20e23695b Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Mon, 19 Sep 2016 16:10:03 -0400 Subject: [PATCH 035/218] :green_heart: Fix non reliable ManagementTest --- docs/src/docs/asciidoc/user/management.adoc | 1 - .../java/org/ehcache/docs/ManagementTest.java | 56 ++++++++++--------- 2 files changed, 29 insertions(+), 28 deletions(-) diff --git a/docs/src/docs/asciidoc/user/management.adoc b/docs/src/docs/asciidoc/user/management.adoc index 07f9569d93..d514b74c9a 100644 --- a/docs/src/docs/asciidoc/user/management.adoc +++ b/docs/src/docs/asciidoc/user/management.adoc @@ -39,7 +39,6 @@ include::{sourcedir31}/management/src/test/java/org/ehcache/docs/ManagementTest. <4> Perform a few gets to increment the statistic's counter <5> Create the target statistic's context <6> Collect the get count statistic -<7> Check that the statistic reports the expected count Obviously, you may use the above technique to pass your own implementation of `ManagementRegistry`. diff --git a/management/src/test/java/org/ehcache/docs/ManagementTest.java b/management/src/test/java/org/ehcache/docs/ManagementTest.java index fd7243b967..0e12f0183a 100644 --- a/management/src/test/java/org/ehcache/docs/ManagementTest.java +++ b/management/src/test/java/org/ehcache/docs/ManagementTest.java @@ -24,6 +24,8 @@ import org.ehcache.config.units.MemoryUnit; import org.ehcache.management.ManagementRegistryService; import org.ehcache.management.SharedManagementService; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.providers.statistics.StatsUtil; import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryService; import org.ehcache.management.registry.DefaultSharedManagementService; @@ -38,13 +40,11 @@ import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.management.model.stats.history.CounterHistory; import org.terracotta.management.registry.ResultSet; +import org.terracotta.management.registry.StatisticQuery; import java.util.Collection; import java.util.Iterator; import java.util.concurrent.TimeUnit; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; -import org.ehcache.management.providers.statistics.StatsUtil; public class ManagementTest { @@ -78,26 +78,27 @@ public void usingManagementRegistry() throws Exception { aCache.get(0L); aCache.get(0L); - Thread.sleep(1000); - Context context = StatsUtil.createContext(managementRegistry); // <5> - ResultSet counters = managementRegistry.withCapability("StatisticsCapability") // <6> + StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") // <6> .queryStatistic("Cache:HitCount") .on(context) - .build() - .execute(); + .build(); + + long onHeapHitCount; + do { + ResultSet counters = query.execute(); + + ContextualStatistics statisticsContext = counters.getResult(context); - ContextualStatistics statisticsContext = counters.getResult(context); + Assert.assertThat(counters.size(), Matchers.is(1)); - Assert.assertThat(counters.size(), Matchers.is(1)); + CounterHistory onHeapStore_Hit_Count = statisticsContext.getStatistic(CounterHistory.class, "Cache:HitCount"); - CounterHistory onHeapStore_Hit_Count = statisticsContext.getStatistic(CounterHistory.class, "Cache:HitCount"); - while(!StatsUtil.isHistoryReady(onHeapStore_Hit_Count, 0L)) {} - int mostRecentIndex = onHeapStore_Hit_Count.getValue().length - 1; - long onHeapHitCount = onHeapStore_Hit_Count.getValue()[mostRecentIndex].getValue(); + int mostRecentIndex = onHeapStore_Hit_Count.getValue().length - 1; + onHeapHitCount = onHeapStore_Hit_Count.getValue()[mostRecentIndex].getValue(); - Assert.assertThat(onHeapHitCount, Matchers.equalTo(4L)); // <7> + } while (onHeapHitCount != 4L); } finally { if(cacheManager != null) cacheManager.close(); @@ -221,22 +222,23 @@ public void managingMultipleCacheManagers() throws Exception { cache.get(1L);//cache miss cache.get(2L);//cache miss - Thread.sleep(1000); + StatisticQuery query = sharedManagementService.withCapability("StatisticsCapability") + .queryStatistic("Cache:MissCount") + .on(context1) + .on(context2) + .build(); - ResultSet counters = sharedManagementService.withCapability("StatisticsCapability") - .queryStatistic("Cache:MissCount") - .on(context1) - .on(context2) - .build() - .execute(); + long val; + do { + ResultSet counters = query.execute(); - ContextualStatistics statisticsContext1 = counters.getResult(context1); + ContextualStatistics statisticsContext1 = counters.getResult(context1); - CounterHistory counterContext1 = statisticsContext1.getStatistic(CounterHistory.class, "Cache:MissCount");; + CounterHistory counterContext1 = statisticsContext1.getStatistic(CounterHistory.class, "Cache:MissCount"); - while(!StatsUtil.isHistoryReady(counterContext1, 0L)) {} - int mostRecentSampleIndex = counterContext1.getValue().length - 1; - Assert.assertEquals(2L, counterContext1.getValue()[mostRecentSampleIndex].getValue().longValue()); + int mostRecentSampleIndex = counterContext1.getValue().length - 1; + val = counterContext1.getValue()[mostRecentSampleIndex].getValue(); + } while(val != 2); } finally { if(cacheManager2 != null) cacheManager2.close(); From 5fbaffbb9b39b899572c552c66f79b57157cd062 Mon Sep 17 00:00:00 2001 From: Anthony Dahanne Date: Mon, 19 Sep 2016 17:30:48 -0400 Subject: [PATCH 036/218] Add comments --- .../src/test/java/org/ehcache/docs/ManagementTest.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/management/src/test/java/org/ehcache/docs/ManagementTest.java b/management/src/test/java/org/ehcache/docs/ManagementTest.java index 0e12f0183a..885c6e058d 100644 --- a/management/src/test/java/org/ehcache/docs/ManagementTest.java +++ b/management/src/test/java/org/ehcache/docs/ManagementTest.java @@ -86,6 +86,8 @@ public void usingManagementRegistry() throws Exception { .build(); long onHeapHitCount; + // it could be several seconds before the sampled stats could become available + // let's try until we find the correct value : 4 do { ResultSet counters = query.execute(); @@ -95,6 +97,8 @@ public void usingManagementRegistry() throws Exception { CounterHistory onHeapStore_Hit_Count = statisticsContext.getStatistic(CounterHistory.class, "Cache:HitCount"); + // hit count is a sampled stat, for example its values could be [0,0,3,4]. + // In the present case, only the last value is important to us , the cache was eventually hit 4 times int mostRecentIndex = onHeapStore_Hit_Count.getValue().length - 1; onHeapHitCount = onHeapStore_Hit_Count.getValue()[mostRecentIndex].getValue(); @@ -229,6 +233,8 @@ public void managingMultipleCacheManagers() throws Exception { .build(); long val; + // it could be several seconds before the sampled stats could become available + // let's try until we find the correct value : 2 do { ResultSet counters = query.execute(); @@ -236,6 +242,8 @@ public void managingMultipleCacheManagers() throws Exception { CounterHistory counterContext1 = statisticsContext1.getStatistic(CounterHistory.class, "Cache:MissCount"); + // miss count is a sampled stat, for example its values could be [0,1,2]. + // In the present case, only the last value is important to us , the cache was eventually missed 2 times int mostRecentSampleIndex = counterContext1.getValue().length - 1; val = counterContext1.getValue()[mostRecentSampleIndex].getValue(); } while(val != 2); From 515d7c6719f663771e1df2f4df8e6e8d8f70119d Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Mon, 19 Sep 2016 16:17:56 -0400 Subject: [PATCH 037/218] :arrow_up: Version update - terracotta-apis 1.0.7.beta - terracotta-configuration 10.0.7.beta - tc-passthrough-testing 1.0.7.beta - terracotta-core 5.0.7-beta - galvan 1.0.7-beta - platform 5.0.7.beta --- build.gradle | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/build.gradle b/build.gradle index da5b5c16d0..68b0c9c201 100644 --- a/build.gradle +++ b/build.gradle @@ -26,15 +26,15 @@ ext { sizeofVersion = '0.3.0' // Clustered - terracottaPlatformVersion = '5.0.6.beta7' + terracottaPlatformVersion = '5.0.7.beta' managementVersion = terracottaPlatformVersion - terracottaApisVersion = '1.0.6.beta' - terracottaCoreVersion = '5.0.6-beta2' + terracottaApisVersion = '1.0.7.beta' + terracottaCoreVersion = '5.0.7-beta' offheapResourceVersion = terracottaPlatformVersion entityApiVersion = terracottaApisVersion - terracottaPassthroughTestingVersion = '1.0.6.beta2' + terracottaPassthroughTestingVersion = '1.0.7.beta' entityTestLibVersion = terracottaPassthroughTestingVersion - galvanVersion = '1.0.6-beta2' + galvanVersion = '1.0.7-beta' utils = new Utils(baseVersion, logger) isReleaseVersion = !baseVersion.endsWith('SNAPSHOT') From 5d1ef12c17519493b6bac28215d931f09cc792b5 Mon Sep 17 00:00:00 2001 From: Anthony Dahanne Date: Tue, 20 Sep 2016 21:44:30 -0400 Subject: [PATCH 038/218] Fix unstable stats tests --- .../client/internal/store/ClusteredStore.java | 2 +- .../ClusteringManagementServiceTest.java | 38 ++++++++++++------- .../impl/internal/store/heap/OnHeapStore.java | 2 +- .../java/org/ehcache/docs/ManagementTest.java | 24 ++++++++---- .../DefaultManagementRegistryServiceTest.java | 1 - 5 files changed, 43 insertions(+), 24 deletions(-) diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java index 9dd06ea84b..dcd465a2d3 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java @@ -757,7 +757,7 @@ public AuthoritativeTier createAuthoritativeTier(Configuration[] samples = stats[0].getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); - assertThat(stats.length, equalTo(1)); - assertThat(stats[0].getContext().get("cacheName"), equalTo("cache-1")); - assertThat(samples[0].getValue(), equalTo(2L)); + long val = 0; + + // it could be several seconds before the sampled stats could become available + // let's try until we find the correct value : 2 + do { + + // get the stats (we are getting the primitive counter, not the sample history) + ContextualStatistics[] stats = waitForNextStats(); + Sample[] samples = stats[0].getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); + + if(stats.length == 1 && stats[0].getContext().get("cacheName").equals("cache-1") && samples.length > 0) { + val = samples[samples.length - 1].getValue(); + } + } while(val != 2); // do some other operations cache1.get("key1"); cache1.get("key2"); - stats = waitForNextStats(); - samples = stats[0].getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); + do { + + ContextualStatistics[] stats = waitForNextStats(); + Sample[] samples = stats[0].getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); + + if(stats.length == 1 && stats[0].getContext().get("cacheName").equals("cache-1") && samples.length > 0) { + val = samples[samples.length - 1].getValue(); + } + + } while(val != 4); - assertThat(stats.length, equalTo(1)); - assertThat(stats[0].getContext().get("cacheName"), equalTo("cache-1")); - assertThat(samples.length, greaterThanOrEqualTo(1)); - assertThat(samples[samples.length - 1].getValue(), equalTo(4L)); } -} \ No newline at end of file +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java index 0a42d93870..eecb66e345 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java @@ -1775,7 +1775,7 @@ public CachingTier createCachingTier(Configuration storeConfi put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); }}, "eviction", ResourceType.Core.HEAP.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(cachingTier); - tieredOps.add(get); + tieredOps.add(evict); this.tierOperationStatistics.put(cachingTier, tieredOps); return cachingTier; diff --git a/management/src/test/java/org/ehcache/docs/ManagementTest.java b/management/src/test/java/org/ehcache/docs/ManagementTest.java index 885c6e058d..8b6a8a0b11 100644 --- a/management/src/test/java/org/ehcache/docs/ManagementTest.java +++ b/management/src/test/java/org/ehcache/docs/ManagementTest.java @@ -31,7 +31,9 @@ import org.ehcache.management.registry.DefaultSharedManagementService; import org.hamcrest.Matchers; import org.junit.Assert; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.Timeout; import org.terracotta.management.model.capabilities.Capability; import org.terracotta.management.model.capabilities.context.CapabilityContext; import org.terracotta.management.model.capabilities.descriptors.Descriptor; @@ -50,7 +52,10 @@ public class ManagementTest { private final EhcacheStatisticsProviderConfiguration EHCACHE_STATS_CONFIG = new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES); - @Test (timeout=5000) + + @Rule + public final Timeout globalTimeout = Timeout.seconds(10); + public void usingManagementRegistry() throws Exception { // tag::usingManagementRegistry[] @@ -85,7 +90,7 @@ public void usingManagementRegistry() throws Exception { .on(context) .build(); - long onHeapHitCount; + long onHeapHitCount = 0; // it could be several seconds before the sampled stats could become available // let's try until we find the correct value : 4 do { @@ -99,8 +104,10 @@ public void usingManagementRegistry() throws Exception { // hit count is a sampled stat, for example its values could be [0,0,3,4]. // In the present case, only the last value is important to us , the cache was eventually hit 4 times - int mostRecentIndex = onHeapStore_Hit_Count.getValue().length - 1; - onHeapHitCount = onHeapStore_Hit_Count.getValue()[mostRecentIndex].getValue(); + if (onHeapStore_Hit_Count.getValue().length > 0) { + int mostRecentIndex = onHeapStore_Hit_Count.getValue().length - 1; + onHeapHitCount = onHeapStore_Hit_Count.getValue()[mostRecentIndex].getValue(); + } } while (onHeapHitCount != 4L); } @@ -192,7 +199,6 @@ public void actionCall() throws Exception { } //TODO update managingMultipleCacheManagers() documentation/asciidoc - @Test (timeout = 5000) public void managingMultipleCacheManagers() throws Exception { // tag::managingMultipleCacheManagers[] CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10)) @@ -232,7 +238,7 @@ public void managingMultipleCacheManagers() throws Exception { .on(context2) .build(); - long val; + long val = 0; // it could be several seconds before the sampled stats could become available // let's try until we find the correct value : 2 do { @@ -244,8 +250,10 @@ public void managingMultipleCacheManagers() throws Exception { // miss count is a sampled stat, for example its values could be [0,1,2]. // In the present case, only the last value is important to us , the cache was eventually missed 2 times - int mostRecentSampleIndex = counterContext1.getValue().length - 1; - val = counterContext1.getValue()[mostRecentSampleIndex].getValue(); + if (counterContext1.getValue().length > 0) { + int mostRecentSampleIndex = counterContext1.getValue().length - 1; + val = counterContext1.getValue()[mostRecentSampleIndex].getValue(); + } } while(val != 2); } finally { diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java index c6c5049cb0..b16078358f 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java @@ -58,7 +58,6 @@ import org.ehcache.config.units.MemoryUnit; import org.junit.Rule; import org.junit.rules.TemporaryFolder; -import org.terracotta.management.model.capabilities.descriptors.Descriptor; import org.terracotta.management.registry.StatisticQuery.Builder; public class DefaultManagementRegistryServiceTest { From 4ac7cac990b23387e806217ffed9b4a6ec85044a Mon Sep 17 00:00:00 2001 From: Ramesh Kavanappillil Date: Tue, 20 Sep 2016 19:49:46 +0530 Subject: [PATCH 039/218] Closes #1452 bump up offheapVersion --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 68b0c9c201..60e3f30ed8 100644 --- a/build.gradle +++ b/build.gradle @@ -19,7 +19,7 @@ ext { baseVersion = '3.2.0-SNAPSHOT' // Third parties - offheapVersion = '2.2.2' + offheapVersion = '2.3.0' statisticVersion = '1.2.0' jcacheVersion = '1.0.0' slf4jVersion = '1.7.7' From 858a47940d83cdea041ec5eb52bb356f066ff782 Mon Sep 17 00:00:00 2001 From: Albin Suresh Date: Wed, 14 Sep 2016 15:51:43 +0530 Subject: [PATCH 040/218] Closes #1433 StateRepository provides StateHolder instead of ConcurrentMap --- .../ehcache/spi/persistence/StateHolder.java | 57 ++++++++++++++ .../spi/persistence/StateRepository.java | 19 +++-- ...eredMap.java => ClusteredStateHolder.java} | 76 ++----------------- .../service/ClusteredStateRepository.java | 6 +- ...usteredStateRepositoryReplicationTest.java | 12 +-- .../asciidoc/user/serializers-copiers.adoc | 8 +- .../persistence/FileBasedStateRepository.java | 38 +++++----- .../serialization/CompactJavaSerializer.java | 8 +- .../serialization/TransientStateHolder.java | 61 +++++++++++++++ .../TransientStateRepository.java | 21 ++--- .../FileBasedStateRepositoryTest.java | 50 ++++++------ .../TransientStateRepositoryTest.java | 12 ++- 12 files changed, 209 insertions(+), 159 deletions(-) create mode 100644 api/src/main/java/org/ehcache/spi/persistence/StateHolder.java rename clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/{ConcurrentClusteredMap.java => ClusteredStateHolder.java} (51%) create mode 100644 impl/src/main/java/org/ehcache/impl/serialization/TransientStateHolder.java diff --git a/api/src/main/java/org/ehcache/spi/persistence/StateHolder.java b/api/src/main/java/org/ehcache/spi/persistence/StateHolder.java new file mode 100644 index 0000000000..70d3c94dc9 --- /dev/null +++ b/api/src/main/java/org/ehcache/spi/persistence/StateHolder.java @@ -0,0 +1,57 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.spi.persistence; + +import java.util.Map; +import java.util.Set; + +/** + * A {@code Map} like structure that can hold key value mappings. + * + * @param type of Keys + * @param type of Values + */ +public interface StateHolder { + + /** + * If the specified key is not already associated with a value (or is mapped + * to {@code null}) associates it with the given value and returns + * {@code null}, else returns the current value. + * + * @param key a key + * @param value a value + * @return the previous value associated with the specified key, or + * {@code null} if there was no mapping for the key. + */ + V putIfAbsent(K key, V value); + + /** + * Retrieves the value mapped to the given {@code key} + * + * @param key a key + * @return the value mapped to the key + */ + V get(K key); + + /** + * Retrieves all the entries in the {@code StateHolder} as a {@code Set} of {@code Map.Entry} instances. + * + * @return the set of this {@code StateHolder} mappings + */ + Set> entrySet(); + +} diff --git a/api/src/main/java/org/ehcache/spi/persistence/StateRepository.java b/api/src/main/java/org/ehcache/spi/persistence/StateRepository.java index 898a89a8ba..25ea836c67 100644 --- a/api/src/main/java/org/ehcache/spi/persistence/StateRepository.java +++ b/api/src/main/java/org/ehcache/spi/persistence/StateRepository.java @@ -17,7 +17,6 @@ package org.ehcache.spi.persistence; import java.io.Serializable; -import java.util.concurrent.ConcurrentMap; /** * A repository allowing to preserve state in the context of a {@link org.ehcache.Cache}. @@ -25,17 +24,17 @@ public interface StateRepository { /** - * Gets a named persistent map rooted in the current {@code StateRepository}. + * Gets a named state holder rooted in the current {@code StateRepository}. *

- * If the map existed already, it is returned with its content fully available. + * If the state holder existed already, it is returned with its content fully available. *

* - * @param name the map name - * @param keyClass concrete map key type - * @param valueClass concrete map value type - * @param the map key type, must be {@code Serializable} - * @param the map value type, must be {@code Serializable} - * @return a map + * @param name the state holder name + * @param keyClass concrete key type + * @param valueClass concrete value type + * @param the key type, must be {@code Serializable} + * @param the value type, must be {@code Serializable} + * @return a state holder */ - ConcurrentMap getPersistentConcurrentMap(String name, Class keyClass, Class valueClass); + StateHolder getPersistentStateHolder(String name, Class keyClass, Class valueClass); } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ConcurrentClusteredMap.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateHolder.java similarity index 51% rename from clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ConcurrentClusteredMap.java rename to clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateHolder.java index ea23cf5beb..abcb2fa98d 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ConcurrentClusteredMap.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateHolder.java @@ -21,44 +21,22 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; import org.ehcache.clustered.common.internal.messages.StateRepositoryMessageFactory; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; +import org.ehcache.spi.persistence.StateHolder; -import java.util.Collection; import java.util.Map; import java.util.Set; -import java.util.UUID; -import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeoutException; -public class ConcurrentClusteredMap implements ConcurrentMap { +public class ClusteredStateHolder implements StateHolder { private final StateRepositoryMessageFactory messageFactory; private final EhcacheClientEntity entity; - public ConcurrentClusteredMap(final String cacheId, final String mapId, final EhcacheClientEntity entity) { + public ClusteredStateHolder(final String cacheId, final String mapId, final EhcacheClientEntity entity) { this.messageFactory = new StateRepositoryMessageFactory(cacheId, mapId, entity.getClientId()); this.entity = entity; } - @Override - public int size() { - throw new UnsupportedOperationException("TODO"); - } - - @Override - public boolean isEmpty() { - throw new UnsupportedOperationException("TODO"); - } - - @Override - public boolean containsKey(final Object key) { - throw new UnsupportedOperationException("TODO"); - } - - @Override - public boolean containsValue(final Object value) { - throw new UnsupportedOperationException("TODO"); - } - @Override public V get(final Object key) { return (V) getResponse(messageFactory.getMessage(key)); @@ -76,38 +54,8 @@ private Object getResponse(StateRepositoryOpMessage message) { } @Override - public V put(final K key, final V value) { - throw new UnsupportedOperationException("TODO"); - } - - @Override - public V remove(final Object key) { - throw new UnsupportedOperationException("TODO"); - } - - @Override - public void putAll(final Map m) { - throw new UnsupportedOperationException("TODO"); - } - - @Override - public void clear() { - throw new UnsupportedOperationException("TODO"); - } - - @Override - public Set keySet() { - throw new UnsupportedOperationException("TODO"); - } - - @Override - public Collection values() { - throw new UnsupportedOperationException("TODO"); - } - - @Override - public Set> entrySet() { - return (Set>) getResponse(messageFactory.entrySetMessage()); + public Set> entrySet() { + return (Set>) getResponse(messageFactory.entrySetMessage()); } @Override @@ -115,18 +63,4 @@ public V putIfAbsent(final K key, final V value) { return (V) getResponse(messageFactory.putIfAbsentMessage(key, value)); } - @Override - public boolean remove(final Object key, final Object value) { - throw new UnsupportedOperationException("TODO"); - } - - @Override - public boolean replace(final K key, final V oldValue, final V newValue) { - throw new UnsupportedOperationException("TODO"); - } - - @Override - public V replace(final K key, final V value) { - throw new UnsupportedOperationException("TODO"); - } } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepository.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepository.java index 5885ee523b..ca70c4cef3 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepository.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepository.java @@ -18,10 +18,10 @@ import org.ehcache.clustered.client.internal.EhcacheClientEntity; import org.ehcache.clustered.client.service.ClusteringService; +import org.ehcache.spi.persistence.StateHolder; import org.ehcache.spi.persistence.StateRepository; import java.io.Serializable; -import java.util.concurrent.ConcurrentMap; /** * ClusteredStateRepository @@ -39,7 +39,7 @@ class ClusteredStateRepository implements StateRepository { } @Override - public ConcurrentMap getPersistentConcurrentMap(String name, Class keyClass, Class valueClass) { - return new ConcurrentClusteredMap(clusterCacheIdentifier.getId(), composedId + "-" + name, clientEntity); + public StateHolder getPersistentStateHolder(String name, Class keyClass, Class valueClass) { + return new ClusteredStateHolder(clusterCacheIdentifier.getId(), composedId + "-" + name, clientEntity); } } diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepositoryReplicationTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepositoryReplicationTest.java index 01327283b7..6f904bfd3e 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepositoryReplicationTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepositoryReplicationTest.java @@ -25,6 +25,7 @@ import org.ehcache.clustered.client.service.ClusteringService; import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; import org.ehcache.clustered.server.EhcacheServerEntityService; +import org.ehcache.spi.persistence.StateHolder; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -37,7 +38,6 @@ import java.lang.reflect.Field; import java.net.URI; -import java.util.concurrent.ConcurrentMap; import static org.ehcache.clustered.client.internal.UnitTestConnectionService.getOffheapResourcesType; import static org.hamcrest.Matchers.is; @@ -102,15 +102,15 @@ public Class getServiceType() { } }, "test", clientEntity); - ConcurrentMap testMap = stateRepository.getPersistentConcurrentMap("testMap", String.class, String.class); - testMap.putIfAbsent("One", "One"); - testMap.putIfAbsent("Two", "Two"); + StateHolder testHolder = stateRepository.getPersistentStateHolder("testHolder", String.class, String.class); + testHolder.putIfAbsent("One", "One"); + testHolder.putIfAbsent("Two", "Two"); clusterControl.terminateActive(); clusterControl.waitForActive(); - assertThat(testMap.get("One"), is("One")); - assertThat(testMap.get("Two"), is("Two")); + assertThat(testHolder.get("One"), is("One")); + assertThat(testHolder.get("Two"), is("Two")); service.stop(); } diff --git a/docs/src/docs/asciidoc/user/serializers-copiers.adoc b/docs/src/docs/asciidoc/user/serializers-copiers.adoc index d8cec9cf0c..f298160ed3 100644 --- a/docs/src/docs/asciidoc/user/serializers-copiers.adoc +++ b/docs/src/docs/asciidoc/user/serializers-copiers.adoc @@ -151,12 +151,12 @@ To address these requirements you can have a `StatefulSerializer` implementation public void init(StateRepository repository) { } ``` -The `StateRepository.getPersistentConcurrentMap(String, Class, Class)` provides a `ConcurrentMap` that you can use to store any relevant state. +The `StateRepository.getPersistentStateHolder(String, Class, Class)` provides a `StateHolder` (a map like structure) that you can use to store any relevant state. The `StateRepository` is provided by the authoritative tier of the cache and hence will have the same persistence properties of that tier. -For persistent caches it is highly recommended that all state is stored in these maps as the users won't have to worry about the persistence aspects of this map as it is taken care by `Ehcache`. +For persistent caches it is highly recommended that all state is stored in these holders as the users won't have to worry about the persistence aspects of this state holder as it is taken care by `Ehcache`. -* In the case of a disk persistent cache, the contents of the map will be persisted locally on to the disk. -* For clustered caches the contents are persisted in the cluster itself so that other clients using the same cache can also access the contents of the map. +* In the case of a disk persistent cache, the contents of the state holder will be persisted locally on to the disk. +* For clustered caches the contents are persisted in the cluster itself so that other clients using the same cache can also access the contents of the state holder. NOTE: The constructor with the signature `(ClassLoader classLoader, FileBasedPersistenceContext persistenceContext)` that existed till v3.1 has been removed since v3.2 in favor of `StatefulSerializer`s. diff --git a/impl/src/main/java/org/ehcache/impl/persistence/FileBasedStateRepository.java b/impl/src/main/java/org/ehcache/impl/persistence/FileBasedStateRepository.java index df9654807b..ede4e96b55 100644 --- a/impl/src/main/java/org/ehcache/impl/persistence/FileBasedStateRepository.java +++ b/impl/src/main/java/org/ehcache/impl/persistence/FileBasedStateRepository.java @@ -20,6 +20,8 @@ import org.ehcache.CachePersistenceException; import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; +import org.ehcache.impl.serialization.TransientStateHolder; +import org.ehcache.spi.persistence.StateHolder; import org.ehcache.spi.persistence.StateRepository; import java.io.Closeable; @@ -42,10 +44,10 @@ */ class FileBasedStateRepository implements StateRepository, Closeable { - private static final String MAP_FILE_PREFIX = "map-"; - private static final String MAP_FILE_SUFFIX = ".bin"; + private static final String HOLDER_FILE_PREFIX = "holder-"; + private static final String HOLDER_FILE_SUFFIX = ".bin"; private final File dataDirectory; - private final ConcurrentMap knownMaps; + private final ConcurrentMap knownHolders; private final AtomicInteger nextIndex = new AtomicInteger(); FileBasedStateRepository(File directory) throws CachePersistenceException { @@ -56,7 +58,7 @@ class FileBasedStateRepository implements StateRepository, Closeable { throw new IllegalArgumentException(directory + " is not a directory"); } this.dataDirectory = directory; - knownMaps = new ConcurrentHashMap(); + knownHolders = new ConcurrentHashMap(); loadMaps(); } @@ -66,7 +68,7 @@ private void loadMaps() throws CachePersistenceException { for (File file : dataDirectory.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { - return name.endsWith(MAP_FILE_SUFFIX); + return name.endsWith(HOLDER_FILE_SUFFIX); } })) { FileInputStream fis = new FileInputStream(file); @@ -78,7 +80,7 @@ public boolean accept(File dir, String name) { if (nextIndex.get() <= tuple.index) { nextIndex.set(tuple.index + 1); } - knownMaps.put(name, tuple); + knownHolders.put(name, tuple); } finally { oin.close(); } @@ -87,13 +89,13 @@ public boolean accept(File dir, String name) { } } } catch (Exception e) { - knownMaps.clear(); + knownHolders.clear(); throw new CachePersistenceException("Failed to load existing StateRepository data", e); } } private void saveMaps() throws IOException { - for (Map.Entry entry : knownMaps.entrySet()) { + for (Map.Entry entry : knownHolders.entrySet()) { File outFile = new File(dataDirectory, createFileName(entry)); FileOutputStream fos = new FileOutputStream(outFile); try { @@ -110,20 +112,20 @@ private void saveMaps() throws IOException { } } - private String createFileName(Map.Entry entry) {return MAP_FILE_PREFIX + entry.getValue().index + "-" + safeIdentifier(entry.getKey(), false) + MAP_FILE_SUFFIX;} + private String createFileName(Map.Entry entry) {return HOLDER_FILE_PREFIX + entry.getValue().index + "-" + safeIdentifier(entry.getKey(), false) + HOLDER_FILE_SUFFIX;} @Override - public ConcurrentMap getPersistentConcurrentMap(String name, Class keyClass, Class valueClass) { - Tuple result = knownMaps.get(name); + public StateHolder getPersistentStateHolder(String name, Class keyClass, Class valueClass) { + Tuple result = knownHolders.get(name); if (result == null) { - ConcurrentHashMap newMap = new ConcurrentHashMap(); - result = knownMaps.putIfAbsent(name, new Tuple(nextIndex.getAndIncrement(), newMap)); + StateHolder holder = new TransientStateHolder(); + result = knownHolders.putIfAbsent(name, new Tuple(nextIndex.getAndIncrement(), holder)); if (result == null) { - return newMap; + return holder; } } - return (ConcurrentMap) result.map; + return (StateHolder) result.holder; } @Override @@ -133,11 +135,11 @@ public void close() throws IOException { static class Tuple implements Serializable { final int index; - final ConcurrentMap map; + final StateHolder holder; - Tuple(int index, ConcurrentMap map) { + Tuple(int index, StateHolder holder) { this.index = index; - this.map = map; + this.holder = holder; } } } diff --git a/impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java index 3409aa2345..47dfca1731 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java @@ -27,9 +27,6 @@ import java.lang.ref.WeakReference; import java.nio.ByteBuffer; import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; @@ -37,6 +34,7 @@ import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; +import org.ehcache.spi.persistence.StateHolder; import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.SerializerException; import org.ehcache.impl.internal.util.ByteBufferInputStream; @@ -53,7 +51,7 @@ */ public class CompactJavaSerializer implements StatefulSerializer { - private volatile ConcurrentMap readLookup; + private volatile StateHolder readLookup; private final ConcurrentMap readLookupLocalCache = new ConcurrentHashMap(); private final ConcurrentMap writeLookup = new ConcurrentHashMap(); @@ -80,7 +78,7 @@ public static Class> asTypedSerializer() { @Override public void init(final StateRepository stateRepository) { - this.readLookup = stateRepository.getPersistentConcurrentMap("CompactJavaSerializer-ObjectStreamClassIndex", Integer.class, ObjectStreamClass.class); + this.readLookup = stateRepository.getPersistentStateHolder("CompactJavaSerializer-ObjectStreamClassIndex", Integer.class, ObjectStreamClass.class); loadMappingsInWriteContext(readLookup.entrySet(), true); } diff --git a/impl/src/main/java/org/ehcache/impl/serialization/TransientStateHolder.java b/impl/src/main/java/org/ehcache/impl/serialization/TransientStateHolder.java new file mode 100644 index 0000000000..51f063ba5a --- /dev/null +++ b/impl/src/main/java/org/ehcache/impl/serialization/TransientStateHolder.java @@ -0,0 +1,61 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.impl.serialization; + +import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; +import org.ehcache.spi.persistence.StateHolder; + +import java.io.Serializable; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentMap; + +public class TransientStateHolder implements StateHolder, Serializable { + + private final ConcurrentMap map = new ConcurrentHashMap(); + + @Override + public V putIfAbsent(final K key, final V value) { + return map.putIfAbsent(key, value); + } + + @Override + public V get(final K key) { + return map.get(key); + } + + @Override + public Set> entrySet() { + return map.entrySet(); + } + + @Override + public boolean equals(final Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + final TransientStateHolder that = (TransientStateHolder)o; + + return map.equals(that.map); + + } + + @Override + public int hashCode() { + return map.hashCode(); + } +} diff --git a/impl/src/main/java/org/ehcache/impl/serialization/TransientStateRepository.java b/impl/src/main/java/org/ehcache/impl/serialization/TransientStateRepository.java index 12519df360..07c07c448b 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/TransientStateRepository.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/TransientStateRepository.java @@ -17,6 +17,7 @@ package org.ehcache.impl.serialization; import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; +import org.ehcache.spi.persistence.StateHolder; import org.ehcache.spi.persistence.StateRepository; import java.io.Serializable; @@ -27,21 +28,21 @@ */ public class TransientStateRepository implements StateRepository { - private ConcurrentMap> knownMaps = new ConcurrentHashMap>(); + private ConcurrentMap> knownHolders = new ConcurrentHashMap>(); @Override @SuppressWarnings("unchecked") - public ConcurrentMap getPersistentConcurrentMap(String name, Class keyClass, Class valueClass) { - ConcurrentMap concurrentMap = (ConcurrentMap) knownMaps.get(name); - if (concurrentMap != null) { - return concurrentMap; + public StateHolder getPersistentStateHolder(String name, Class keyClass, Class valueClass) { + StateHolder stateHolder = (StateHolder) knownHolders.get(name); + if (stateHolder != null) { + return stateHolder; } else { - ConcurrentHashMap newMap = new ConcurrentHashMap(); - concurrentMap = (ConcurrentMap) knownMaps.putIfAbsent(name, newMap); - if (concurrentMap == null) { - return newMap; + StateHolder newHolder = new TransientStateHolder(); + stateHolder = (StateHolder) knownHolders.putIfAbsent(name, newHolder); + if (stateHolder == null) { + return newHolder; } else { - return concurrentMap; + return stateHolder; } } } diff --git a/impl/src/test/java/org/ehcache/impl/persistence/FileBasedStateRepositoryTest.java b/impl/src/test/java/org/ehcache/impl/persistence/FileBasedStateRepositoryTest.java index 3b4c70063f..63460e530c 100644 --- a/impl/src/test/java/org/ehcache/impl/persistence/FileBasedStateRepositoryTest.java +++ b/impl/src/test/java/org/ehcache/impl/persistence/FileBasedStateRepositoryTest.java @@ -16,6 +16,8 @@ package org.ehcache.impl.persistence; +import org.ehcache.impl.serialization.TransientStateHolder; +import org.ehcache.spi.persistence.StateHolder; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -26,8 +28,6 @@ import java.io.FilenameFilter; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; import static org.hamcrest.Matchers.is; import static org.junit.Assert.*; @@ -37,31 +37,31 @@ */ public class FileBasedStateRepositoryTest { - private static String MAP_FILE_NAME = "map-0-myMap.bin"; + private static String HOLDER_FILE_NAME = "holder-0-myHolder.bin"; @Rule public TemporaryFolder folder = new TemporaryFolder(); @Test - public void testMapSave() throws Exception { + public void testHolderSave() throws Exception { File directory = folder.newFolder("testSave"); FileBasedStateRepository stateRepository = new FileBasedStateRepository(directory); - String mapName = "myMap"; - ConcurrentMap myMap = stateRepository.getPersistentConcurrentMap(mapName, Long.class, String.class); + String holderName = "myHolder"; + StateHolder myHolder = stateRepository.getPersistentStateHolder(holderName, Long.class, String.class); - myMap.put(42L, "TheAnswer!"); + myHolder.putIfAbsent(42L, "TheAnswer!"); stateRepository.close(); - FileInputStream fis = new FileInputStream(new File(directory, MAP_FILE_NAME)); + FileInputStream fis = new FileInputStream(new File(directory, HOLDER_FILE_NAME)); try { ObjectInputStream ois = new ObjectInputStream(fis); try { String name = (String) ois.readObject(); - assertThat(name, is(mapName)); + assertThat(name, is(holderName)); FileBasedStateRepository.Tuple loadedTuple = (FileBasedStateRepository.Tuple) ois.readObject(); assertThat(loadedTuple.index, is(0)); - assertThat((ConcurrentMap)loadedTuple.map, is(myMap)); + assertThat((StateHolder)loadedTuple.holder, is(myHolder)); } finally { ois.close(); } @@ -71,17 +71,17 @@ public void testMapSave() throws Exception { } @Test - public void testMapLoad() throws Exception { + public void testHolderLoad() throws Exception { File directory = folder.newFolder("testLoad"); - String mapName = "myMap"; - ConcurrentMap map = new ConcurrentHashMap(); - map.put(42L, "Again? That's not even funny anymore!!"); + String holderName = "myHolder"; + StateHolder map = new TransientStateHolder(); + map.putIfAbsent(42L, "Again? That's not even funny anymore!!"); - FileOutputStream fos = new FileOutputStream(new File(directory, MAP_FILE_NAME)); + FileOutputStream fos = new FileOutputStream(new File(directory, HOLDER_FILE_NAME)); try { ObjectOutputStream oos = new ObjectOutputStream(fos); try { - oos.writeObject(mapName); + oos.writeObject(holderName); oos.writeObject(new FileBasedStateRepository.Tuple(0, map)); } finally { oos.close(); @@ -91,22 +91,22 @@ public void testMapLoad() throws Exception { } FileBasedStateRepository stateRepository = new FileBasedStateRepository(directory); - ConcurrentMap myMap = stateRepository.getPersistentConcurrentMap(mapName, Long.class, String.class); + StateHolder myHolder = stateRepository.getPersistentStateHolder(holderName, Long.class, String.class); - assertThat(myMap, is(map)); + assertThat(myHolder, is(map)); } @Test public void testIndexProperlySetAfterLoad() throws Exception { File directory = folder.newFolder("testIndexAfterLoad"); - String mapName = "myMap"; + String holderName = "myHolder"; - FileOutputStream fos = new FileOutputStream(new File(directory, MAP_FILE_NAME)); + FileOutputStream fos = new FileOutputStream(new File(directory, HOLDER_FILE_NAME)); try { ObjectOutputStream oos = new ObjectOutputStream(fos); try { - oos.writeObject(mapName); - oos.writeObject(new FileBasedStateRepository.Tuple(0, new ConcurrentHashMap())); + oos.writeObject(holderName); + oos.writeObject(new FileBasedStateRepository.Tuple(0, new TransientStateHolder())); } finally { oos.close(); } @@ -115,16 +115,16 @@ public void testIndexProperlySetAfterLoad() throws Exception { } FileBasedStateRepository stateRepository = new FileBasedStateRepository(directory); - stateRepository.getPersistentConcurrentMap("otherMap", Long.class, Long.class); + stateRepository.getPersistentStateHolder("otherHolder", Long.class, Long.class); stateRepository.close(); File[] files = directory.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { - return name.contains("otherMap") && name.contains("-1-"); + return name.contains("otherHolder") && name.contains("-1-"); } }); assertThat(files.length, is(1)); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/serialization/TransientStateRepositoryTest.java b/impl/src/test/java/org/ehcache/impl/serialization/TransientStateRepositoryTest.java index 41505c4c1f..1fc1413232 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/TransientStateRepositoryTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/TransientStateRepositoryTest.java @@ -16,11 +16,9 @@ package org.ehcache.impl.serialization; +import org.ehcache.spi.persistence.StateHolder; import org.junit.Test; -import java.io.Serializable; -import java.util.concurrent.ConcurrentMap; - import static org.hamcrest.Matchers.is; import static org.junit.Assert.*; @@ -32,11 +30,11 @@ public class TransientStateRepositoryTest { @Test public void testRemembersCreatedMaps() throws Exception { TransientStateRepository repository = new TransientStateRepository(); - ConcurrentMap test = repository.getPersistentConcurrentMap("test", Long.class, String.class); - test.put(42L, "Again??"); + StateHolder test = repository.getPersistentStateHolder("test", Long.class, String.class); + test.putIfAbsent(42L, "Again??"); - test = repository.getPersistentConcurrentMap("test", Long.class, String.class); + test = repository.getPersistentStateHolder("test", Long.class, String.class); assertThat(test.get(42L), is("Again??")); } -} \ No newline at end of file +} From f7c5a38d923b37d484ce70c86f266c8e284343de Mon Sep 17 00:00:00 2001 From: geoff gibson Date: Thu, 22 Sep 2016 08:26:39 -0700 Subject: [PATCH 041/218] Issue #1458 Descriptor test improvements reverts imports and puts StatsUtil back in test package removes StatUtil from production code --- .../ClusteringManagementServiceTest.java | 130 +++++++++++++++++- .../DefaultManagementRegistryServiceTest.java | 130 +++++++++++++++++- 2 files changed, 251 insertions(+), 9 deletions(-) diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java index 32b2379d13..f0b4fc33f7 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java @@ -15,7 +15,6 @@ */ package org.ehcache.clustered.management; - import org.ehcache.Cache; import org.ehcache.CacheManager; import org.ehcache.Status; @@ -29,6 +28,7 @@ import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; import org.junit.After; import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.Timeout; @@ -43,7 +43,9 @@ import org.terracotta.management.model.stats.history.CounterHistory; import java.io.Serializable; +import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.TreeSet; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; @@ -52,12 +54,22 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.collection.IsCollectionWithSize.hasSize; import static org.hamcrest.number.OrderingComparison.greaterThanOrEqualTo; import static org.junit.Assert.assertThat; +import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; +import org.terracotta.management.model.stats.StatisticType; public class ClusteringManagementServiceTest extends AbstractClusteringManagementTest { + private static final Collection ONHEAP_DESCRIPTORS = new ArrayList(); + private static final Collection OFFHEAP_DESCRIPTORS = new ArrayList(); + private static final Collection DISK_DESCRIPTORS = new ArrayList(); + private static final Collection CLUSTERED_DESCRIPTORS = new ArrayList(); + private static final Collection CACHE_DESCRIPTORS = new ArrayList(); + private static AtomicInteger N = new AtomicInteger(); @Rule @@ -129,7 +141,17 @@ public void test_capabilities_exposed() throws Exception { assertThat(capabilities[3].getName(), equalTo("SettingsCapability")); assertThat(capabilities[4].getName(), equalTo("ManagementAgentService")); assertThat(capabilities[0].getDescriptors(), hasSize(4)); - assertThat(capabilities[1].getDescriptors(), hasSize(75)); + + Collection descriptors = capabilities[1].getDescriptors(); + Collection allDescriptors = new ArrayList(); + allDescriptors.addAll(CACHE_DESCRIPTORS); + allDescriptors.addAll(ONHEAP_DESCRIPTORS); + allDescriptors.addAll(OFFHEAP_DESCRIPTORS); + allDescriptors.addAll(CLUSTERED_DESCRIPTORS); + + assertThat(descriptors, containsInAnyOrder(allDescriptors.toArray())); + assertThat(descriptors, hasSize(allDescriptors.size())); + } @Test @@ -224,4 +246,108 @@ public void test_stats_collection() throws Exception { } + @BeforeClass + public static void initDescriptors() throws ClassNotFoundException { + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissLatencyMinimum" , StatisticType.DURATION_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionLatencyMinimum" , StatisticType.DURATION_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionCount" , StatisticType.COUNTER_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissCount" , StatisticType.COUNTER_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissLatencyMaximum" , StatisticType.DURATION_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionRate" , StatisticType.RATE_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitRatioRatio" , StatisticType.RATIO_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MappingCount" , StatisticType.COUNTER_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyAverage" , StatisticType.AVERAGE_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyMinimum" , StatisticType.DURATION_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:OccupiedBytesCount" , StatisticType.COUNTER_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissRate" , StatisticType.RATE_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionLatencyAverage" , StatisticType.AVERAGE_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitCount" , StatisticType.COUNTER_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitRate" , StatisticType.RATE_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissLatencyAverage" , StatisticType.AVERAGE_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionLatencyMaximum" , StatisticType.DURATION_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyMaximum" , StatisticType.DURATION_HISTORY)); + + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionRate", StatisticType.RATE_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissRate", StatisticType.RATE_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitLatencyMinimum", StatisticType.DURATION_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitLatencyAverage", StatisticType.AVERAGE_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:OccupiedBytesCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionLatencyAverage", StatisticType.AVERAGE_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitLatencyMaximum", StatisticType.DURATION_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:AllocatedBytesCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionLatencyMaximum", StatisticType.DURATION_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MappingCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRate", StatisticType.RATE_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRatioRatio", StatisticType.RATIO_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionLatencyMinimum", StatisticType.DURATION_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissLatencyMaximum", StatisticType.DURATION_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MaxMappingCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissLatencyAverage", StatisticType.AVERAGE_HISTORY)); + + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissLatencyMaximum", StatisticType.DURATION_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissLatencyAverage", StatisticType.AVERAGE_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitLatencyMinimum", StatisticType.DURATION_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MaxMappingCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitRate", StatisticType.RATE_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:OccupiedBytesCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitLatencyAverage", StatisticType.AVERAGE_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionLatencyAverage", StatisticType.AVERAGE_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionLatencyMinimum", StatisticType.DURATION_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionRate", StatisticType.RATE_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionLatencyMaximum", StatisticType.DURATION_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:AllocatedBytesCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitLatencyMaximum", StatisticType.DURATION_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitRatioRatio", StatisticType.RATIO_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MappingCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissRate", StatisticType.RATE_HISTORY)); + + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissCount", StatisticType.COUNTER_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:EvictionLatencyMinimum", StatisticType.DURATION_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitCount", StatisticType.COUNTER_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissLatencyMaximum", StatisticType.DURATION_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MaxMappingCount", StatisticType.COUNTER_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitRate", StatisticType.RATE_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:EvictionLatencyAverage", StatisticType.AVERAGE_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:EvictionCount", StatisticType.COUNTER_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitLatencyAverage", StatisticType.AVERAGE_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitLatencyMaximum", StatisticType.DURATION_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissRate", StatisticType.RATE_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:OccupiedBytesCount", StatisticType.COUNTER_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:EvictionLatencyMaximum", StatisticType.DURATION_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissLatencyAverage", StatisticType.AVERAGE_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitRatioRatio", StatisticType.RATIO_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:AllocatedBytesCount", StatisticType.COUNTER_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MappingCount", StatisticType.COUNTER_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:EvictionRate", StatisticType.RATE_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitLatencyMinimum", StatisticType.DURATION_HISTORY)); + + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissLatencyMaximum", StatisticType.DURATION_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitRate", StatisticType.RATE_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitLatencyMinimum", StatisticType.DURATION_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitCount", StatisticType.COUNTER_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitRatioRatio", StatisticType.RATIO_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearLatencyAverage", StatisticType.AVERAGE_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitLatencyMaximum", StatisticType.DURATION_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearRate", StatisticType.RATE_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissLatencyAverage", StatisticType.AVERAGE_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitLatencyAverage", StatisticType.AVERAGE_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearLatencyMaximum", StatisticType.DURATION_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissRate", StatisticType.RATE_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearCount", StatisticType.COUNTER_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearLatencyMinimum", StatisticType.DURATION_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissCount", StatisticType.COUNTER_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissRatioRatio", StatisticType.RATIO_HISTORY)); + + } + } diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java index b16078358f..c7dffc5be6 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java @@ -48,6 +48,7 @@ import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.NoSuchElementException; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; @@ -56,12 +57,23 @@ import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; +import static org.hamcrest.Matchers.containsInAnyOrder; +import org.junit.BeforeClass; import org.junit.Rule; import org.junit.rules.TemporaryFolder; +import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; +import org.terracotta.management.model.stats.StatisticType; import org.terracotta.management.registry.StatisticQuery.Builder; + public class DefaultManagementRegistryServiceTest { + private static final Collection ONHEAP_DESCRIPTORS = new ArrayList(); + private static final Collection OFFHEAP_DESCRIPTORS = new ArrayList(); + private static final Collection DISK_DESCRIPTORS = new ArrayList(); + private static final Collection CACHE_DESCRIPTORS = new ArrayList(); + @Rule public final TemporaryFolder diskPath = new TemporaryFolder(); @@ -109,9 +121,15 @@ public void descriptorOnHeapTest() { assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName(), equalTo("StatisticsCapability")); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(2).getName(), equalTo("StatisticCollectorCapability")); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName(), equalTo("SettingsCapability")); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getDescriptors(), hasSize(4)); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getDescriptors(), hasSize(35)); + + Collection descriptors = new ArrayList(managementRegistry.getCapabilities()).get(1).getDescriptors(); + Collection allDescriptors = new ArrayList(); + allDescriptors.addAll(ONHEAP_DESCRIPTORS); + allDescriptors.addAll(CACHE_DESCRIPTORS); + + assertThat(descriptors, containsInAnyOrder(allDescriptors.toArray())); + assertThat(descriptors, hasSize(allDescriptors.size())); } finally { if(cacheManager1 != null) cacheManager1.close(); @@ -138,9 +156,16 @@ public void descriptorOffHeapTest() { assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName(), equalTo("StatisticsCapability")); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(2).getName(), equalTo("StatisticCollectorCapability")); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName(), equalTo("SettingsCapability")); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getDescriptors(), hasSize(4)); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getDescriptors(), hasSize(55)); + + Collection descriptors = new ArrayList(managementRegistry.getCapabilities()).get(1).getDescriptors(); + Collection allDescriptors = new ArrayList(); + allDescriptors.addAll(ONHEAP_DESCRIPTORS); + allDescriptors.addAll(OFFHEAP_DESCRIPTORS); + allDescriptors.addAll(CACHE_DESCRIPTORS); + + assertThat(descriptors, containsInAnyOrder(allDescriptors.toArray())); + assertThat(descriptors, hasSize(allDescriptors.size())); } finally { if(cacheManager1 != null) cacheManager1.close(); @@ -169,9 +194,16 @@ public void descriptorDiskStoreTest() throws URISyntaxException { assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName(), equalTo("StatisticsCapability")); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(2).getName(), equalTo("StatisticCollectorCapability")); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName(), equalTo("SettingsCapability")); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getDescriptors(), hasSize(4)); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getDescriptors(), hasSize(55)); + + Collection descriptors = new ArrayList(managementRegistry.getCapabilities()).get(1).getDescriptors(); + Collection allDescriptors = new ArrayList(); + allDescriptors.addAll(ONHEAP_DESCRIPTORS); + allDescriptors.addAll(DISK_DESCRIPTORS); + allDescriptors.addAll(CACHE_DESCRIPTORS); + + assertThat(descriptors, containsInAnyOrder(allDescriptors.toArray())); + assertThat(descriptors, hasSize(allDescriptors.size())); } finally { if(persistentCacheManager != null) persistentCacheManager.close(); @@ -202,7 +234,7 @@ public void testCanGetCapabilities() { assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName(), equalTo("SettingsCapability")); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getDescriptors(), hasSize(4)); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getDescriptors(), hasSize(35)); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getDescriptors(), hasSize(ONHEAP_DESCRIPTORS.size() + CACHE_DESCRIPTORS.size())); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getCapabilityContext().getAttributes(), hasSize(2)); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getCapabilityContext().getAttributes(), hasSize(2)); @@ -486,4 +518,88 @@ public void testCallOnInexistignContext() { } + @BeforeClass + public static void loadStatsUtil() throws ClassNotFoundException { + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissLatencyMinimum" , StatisticType.DURATION_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionLatencyMinimum" , StatisticType.DURATION_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionCount" , StatisticType.COUNTER_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissCount" , StatisticType.COUNTER_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissLatencyMaximum" , StatisticType.DURATION_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionRate" , StatisticType.RATE_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitRatioRatio" , StatisticType.RATIO_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MappingCount" , StatisticType.COUNTER_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyAverage" , StatisticType.AVERAGE_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyMinimum" , StatisticType.DURATION_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:OccupiedBytesCount" , StatisticType.COUNTER_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissRate" , StatisticType.RATE_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionLatencyAverage" , StatisticType.AVERAGE_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitCount" , StatisticType.COUNTER_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitRate" , StatisticType.RATE_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissLatencyAverage" , StatisticType.AVERAGE_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionLatencyMaximum" , StatisticType.DURATION_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyMaximum" , StatisticType.DURATION_HISTORY)); + + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionRate", StatisticType.RATE_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissRate", StatisticType.RATE_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitLatencyMinimum", StatisticType.DURATION_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitLatencyAverage", StatisticType.AVERAGE_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:OccupiedBytesCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionLatencyAverage", StatisticType.AVERAGE_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitLatencyMaximum", StatisticType.DURATION_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:AllocatedBytesCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionLatencyMaximum", StatisticType.DURATION_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MappingCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRate", StatisticType.RATE_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRatioRatio", StatisticType.RATIO_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionLatencyMinimum", StatisticType.DURATION_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissLatencyMaximum", StatisticType.DURATION_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MaxMappingCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissLatencyAverage", StatisticType.AVERAGE_HISTORY)); + + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissLatencyMaximum", StatisticType.DURATION_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissLatencyAverage", StatisticType.AVERAGE_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitLatencyMinimum", StatisticType.DURATION_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MaxMappingCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitRate", StatisticType.RATE_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:OccupiedBytesCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitLatencyAverage", StatisticType.AVERAGE_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionLatencyAverage", StatisticType.AVERAGE_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionLatencyMinimum", StatisticType.DURATION_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionRate", StatisticType.RATE_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionLatencyMaximum", StatisticType.DURATION_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:AllocatedBytesCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitLatencyMaximum", StatisticType.DURATION_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitRatioRatio", StatisticType.RATIO_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MappingCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissRate", StatisticType.RATE_HISTORY)); + + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissLatencyMaximum", StatisticType.DURATION_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitRate", StatisticType.RATE_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitLatencyMinimum", StatisticType.DURATION_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitCount", StatisticType.COUNTER_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitRatioRatio", StatisticType.RATIO_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearLatencyAverage", StatisticType.AVERAGE_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitLatencyMaximum", StatisticType.DURATION_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearRate", StatisticType.RATE_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissLatencyAverage", StatisticType.AVERAGE_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitLatencyAverage", StatisticType.AVERAGE_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearLatencyMaximum", StatisticType.DURATION_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissRate", StatisticType.RATE_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearCount", StatisticType.COUNTER_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearLatencyMinimum", StatisticType.DURATION_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissCount", StatisticType.COUNTER_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissRatioRatio", StatisticType.RATIO_HISTORY)); + + } + + } From 5406a8bd10d5631224cfd5fe4aa4c7cfd67024cf Mon Sep 17 00:00:00 2001 From: Chris Dennis Date: Fri, 23 Sep 2016 10:59:24 -0400 Subject: [PATCH 042/218] Issue #1463 Pulls a general StatisticMapper out of TierOperationStatistic. --- .../client/internal/store/ClusteredStore.java | 32 +-- .../core/statistics/StatisticMapper.java | 166 ++++++++++++ .../statistics/TierOperationStatistic.java | 186 +++++++------ .../core/statistics/StatisticMapperTest.java | 256 ++++++++++++++++++ .../internal/store/disk/OffHeapDiskStore.java | 34 +-- .../impl/internal/store/heap/OnHeapStore.java | 52 ++-- .../internal/store/offheap/OffHeapStore.java | 48 ++-- .../internal/store/tiering/TieredStore.java | 4 - 8 files changed, 583 insertions(+), 195 deletions(-) create mode 100644 core/src/main/java/org/ehcache/core/statistics/StatisticMapper.java create mode 100644 core/src/test/java/org/ehcache/core/statistics/StatisticMapperTest.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java index dcd465a2d3..72da95ff32 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java @@ -51,6 +51,7 @@ import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.time.TimeSourceService; import org.ehcache.core.statistics.TierOperationStatistic; +import org.ehcache.core.statistics.TierOperationStatistic.TierOperationOutcomes; import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; import org.ehcache.impl.internal.events.NullStoreEventDispatcher; import org.ehcache.spi.persistence.StateRepository; @@ -61,8 +62,6 @@ import org.ehcache.spi.service.ServiceConfiguration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.terracotta.context.ContextManager; -import org.terracotta.context.annotations.ContextAttribute; import org.terracotta.statistics.StatisticsManager; import org.terracotta.statistics.observer.OperationObserver; @@ -81,7 +80,6 @@ import static org.ehcache.core.exceptions.StorePassThroughException.handleRuntimeException; import static org.ehcache.core.internal.service.ServiceLocator.findSingletonAmongst; -import static org.ehcache.core.statistics.TierOperationStatistic.set; import static org.terracotta.statistics.StatisticBuilder.operation; /** @@ -569,17 +567,15 @@ public ClusteredStore createStore(final Configuration storeCo ClusteredStore store = createStoreInternal(storeConfig, serviceConfigs); Collection> tieredOps = new ArrayList>(); - TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, StoreOperationOutcomes.GetOutcome.class, store, new HashMap>() {{ - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(StoreOperationOutcomes.GetOutcome.HIT)); - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(StoreOperationOutcomes.GetOutcome.MISS, StoreOperationOutcomes.GetOutcome.TIMEOUT)); - }}, "get", TIER_HEIGHT, "get", STATISTICS_TAG); + TierOperationStatistic get = + new TierOperationStatistic( + store, TierOperationOutcomes.GET_TRANSLATION, "get", TIER_HEIGHT, "get", STATISTICS_TAG); StatisticsManager.associate(get).withParent(store); tieredOps.add(get); - TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, store, new HashMap>() {{ - put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); - put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); - }}, "eviction", TIER_HEIGHT, "eviction", STATISTICS_TAG); + TierOperationStatistic evict = + new TierOperationStatistic( + store, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", TIER_HEIGHT, "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(store); tieredOps.add(evict); @@ -745,17 +741,15 @@ public AuthoritativeTier createAuthoritativeTier(Configuration authoritativeTier = createStoreInternal(storeConfig, serviceConfigs); Collection> tieredOps = new ArrayList>(); - TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.class, authoritativeTier, new HashMap>() {{ - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.HIT)); - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS, AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.TIMEOUT)); - }}, "get", TIER_HEIGHT, "getAndFault", STATISTICS_TAG); + TierOperationStatistic get = + new TierOperationStatistic( + authoritativeTier, TierOperationOutcomes.GET_AND_FAULT_TRANSLATION, "get", TIER_HEIGHT, "getAndFault", STATISTICS_TAG); StatisticsManager.associate(get).withParent(authoritativeTier); tieredOps.add(get); - TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, authoritativeTier, new HashMap>() {{ - put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); - put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); - }}, "eviction", TIER_HEIGHT, "eviction", STATISTICS_TAG); + TierOperationStatistic evict = + new TierOperationStatistic( + authoritativeTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", TIER_HEIGHT, "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(authoritativeTier); tieredOps.add(evict); diff --git a/core/src/main/java/org/ehcache/core/statistics/StatisticMapper.java b/core/src/main/java/org/ehcache/core/statistics/StatisticMapper.java new file mode 100644 index 0000000000..eab43f5525 --- /dev/null +++ b/core/src/main/java/org/ehcache/core/statistics/StatisticMapper.java @@ -0,0 +1,166 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.statistics; + +import org.terracotta.statistics.OperationStatistic; +import org.terracotta.statistics.ValueStatistic; +import org.terracotta.statistics.observer.ChainedOperationObserver; + +import java.util.Collections; +import java.util.EnumMap; +import java.util.EnumSet; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +import static java.util.EnumSet.allOf; + +/** + * + */ +public class StatisticMapper, TARGET extends Enum> implements OperationStatistic { + + private final Class targetType; + private final Class sourceType; + private final OperationStatistic statistic; + private final Map> translation; + private final Map reverseTranslation; + private final ConcurrentMap, ChainedOperationObserver> derivedStats + = new ConcurrentHashMap, ChainedOperationObserver>(); + + public StatisticMapper(Map> translation, OperationStatistic statistic) { + Entry> first = translation.entrySet().iterator().next(); + + this.targetType = first.getKey().getDeclaringClass(); + this.sourceType = first.getValue().iterator().next().getDeclaringClass(); + this.statistic = statistic; + this.translation = translation; + Set unmappedTierOutcomes = allOf(targetType); + unmappedTierOutcomes.removeAll(translation.keySet()); + if (!unmappedTierOutcomes.isEmpty()) { + throw new IllegalArgumentException("Translation does not contain target outcomes " + unmappedTierOutcomes); + } + + this.reverseTranslation = reverse(translation); + Set unmappedStoreOutcomes = allOf(sourceType); + unmappedStoreOutcomes.removeAll(reverseTranslation.keySet()); + if (!unmappedStoreOutcomes.isEmpty()) { + throw new IllegalArgumentException("Translation does not contain source outcomes " + unmappedStoreOutcomes); + } + } + + private static , A extends Enum> Map reverse(Map> map) { + Map reverse = Collections.emptyMap(); + + for (Entry> e : map.entrySet()) { + for (B b : e.getValue()) { + if (reverse.isEmpty()) { + reverse = new EnumMap(b.getDeclaringClass()); + } + if (reverse.put(b, e.getKey()) != null) { + throw new IllegalArgumentException("Reverse statistic outcome mapping is ill-defined: " + map); + } + } + } + return reverse; + } + + @Override + public Class type() { + return targetType; + } + + @Override + public ValueStatistic statistic(TARGET result) { + return statistic.statistic(translation.get(result)); + } + + @Override + public ValueStatistic statistic(Set results) { + Set translated = EnumSet.noneOf(sourceType); + for (TARGET result : results) { + translated.addAll(translation.get(result)); + } + return statistic.statistic(translated); + } + + @Override + public long count(TARGET type) { + return statistic.sum(translation.get(type)); + } + + @Override + public long sum(Set types) { + Set translated = EnumSet.noneOf(sourceType); + for (TARGET type : types) { + translated.addAll(translation.get(type)); + } + return statistic.sum(translated); + } + + @Override + public long sum() { + return statistic.sum(); + } + + @Override + public void addDerivedStatistic(final ChainedOperationObserver derived) { + ChainedOperationObserver translator = new ChainedOperationObserver() { + @Override + public void begin(long time) { + derived.begin(time); + } + + @Override + public void end(long time, SOURCE result) { + derived.end(time, reverseTranslation.get(result)); + } + + @Override + public void end(long time, SOURCE result, long... parameters) { + derived.end(time, reverseTranslation.get(result), parameters); + } + }; + if (derivedStats.putIfAbsent(derived, translator) == null) { + statistic.addDerivedStatistic(translator); + } + } + + @Override + public void removeDerivedStatistic(ChainedOperationObserver derived) { + ChainedOperationObserver translator = derivedStats.remove(derived); + if (translator != null) { + statistic.removeDerivedStatistic(translator); + } + } + + @Override + public void begin() { + throw new UnsupportedOperationException(); + } + + @Override + public void end(TARGET result) { + throw new UnsupportedOperationException(); + } + + @Override + public void end(TARGET result, long... parameters) { + throw new UnsupportedOperationException(); + } +} diff --git a/core/src/main/java/org/ehcache/core/statistics/TierOperationStatistic.java b/core/src/main/java/org/ehcache/core/statistics/TierOperationStatistic.java index 6ea91e664b..19d1d5c807 100644 --- a/core/src/main/java/org/ehcache/core/statistics/TierOperationStatistic.java +++ b/core/src/main/java/org/ehcache/core/statistics/TierOperationStatistic.java @@ -15,187 +15,185 @@ */ package org.ehcache.core.statistics; +import org.ehcache.core.statistics.AuthoritativeTierOperationOutcomes.GetAndFaultOutcome; +import org.ehcache.core.statistics.CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome; +import org.ehcache.core.statistics.LowerCachingTierOperationsOutcome.GetAndRemoveOutcome; import org.terracotta.context.ContextManager; import org.terracotta.context.TreeNode; import org.terracotta.context.annotations.ContextAttribute; +import org.terracotta.context.query.Matcher; import org.terracotta.context.query.Matchers; import org.terracotta.context.query.Query; import org.terracotta.statistics.OperationStatistic; import org.terracotta.statistics.ValueStatistic; import org.terracotta.statistics.observer.ChainedOperationObserver; -import java.util.Arrays; -import java.util.Collection; import java.util.Collections; -import java.util.EnumSet; +import java.util.EnumMap; import java.util.HashMap; -import java.util.HashSet; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; +import static java.util.Collections.unmodifiableMap; +import static java.util.EnumSet.of; +import static org.terracotta.context.query.Matchers.allOf; import static org.terracotta.context.query.Matchers.attributes; import static org.terracotta.context.query.Matchers.context; -import static org.terracotta.context.query.Matchers.hasAttribute; import static org.terracotta.context.query.Matchers.identifier; import static org.terracotta.context.query.Matchers.subclassOf; -import static org.terracotta.context.query.Queries.self; import static org.terracotta.context.query.QueryBuilder.queryBuilder; +import static org.terracotta.context.query.Matchers.hasAttribute; -/** - * - */ @ContextAttribute("this") public class TierOperationStatistic, D extends Enum> implements OperationStatistic { @ContextAttribute("name") public final String name; @ContextAttribute("tags") public final Set tags; @ContextAttribute("properties") public final Map properties; - @ContextAttribute("type") public final Class type; - - private final Class tierOperatioOutcome; - private final OperationStatistic operationStatistic; - private final HashMap> storeToTierOperationOutcomeMap; - - public TierOperationStatistic(Class tierOperatioOutcome, Class storeOperatioOutcome, Object tier, HashMap> storeToTierOperationOutcomeMap, String sourceOperationName, int tierHeight, String targetOperationName, String discriminator) { - this.tierOperatioOutcome = tierOperatioOutcome; - this.operationStatistic = TierOperationStatistic.findOperationStat(tier, targetOperationName); - this.storeToTierOperationOutcomeMap = storeToTierOperationOutcomeMap; - this.name = sourceOperationName; - this.tags = new HashSet(); - this.tags.add("tier"); + @ContextAttribute("type") public final Class tierOutcomeType; + + private final StatisticMapper mapper; + + public TierOperationStatistic(Object tier, Map> translation, String statisticName, int tierHeight, String targetName, String discriminator) { + + this.name = statisticName; + this.tags = Collections.singleton("tier"); this.properties = new HashMap(); this.properties.put("tierHeight", tierHeight); this.properties.put("discriminator", discriminator); - this.type = tierOperatioOutcome; - - EnumSet tierOperatioOutcomeSet = EnumSet.allOf(tierOperatioOutcome); - //make sure all tierOperatioOutcome enum values are keys in the storeToTierOperationOutcomeMap - for (D tierOperatioOutcomeKey : tierOperatioOutcomeSet) { - if (!storeToTierOperationOutcomeMap.containsKey(tierOperatioOutcomeKey)) { - throw new IllegalArgumentException("storeTierOperationOutcomeMap does not contain key " + tierOperatioOutcomeKey); - } - } - //verify that all storeOperatioOutcomes are tracked - Set allAliasedValues = new HashSet(); - Collection> values = storeToTierOperationOutcomeMap.values(); - for (Set value : values) { - allAliasedValues.addAll(value); - } - Set allMissingValues = new HashSet(EnumSet.allOf(storeOperatioOutcome)); - allMissingValues.removeAll(allAliasedValues); - if (!allMissingValues.isEmpty()) { - throw new IllegalArgumentException("storeTierOperationOutcomeMap does not contain values " + allMissingValues); - } + Entry> first = translation.entrySet().iterator().next(); + Class storeOutcomeType = first.getValue().iterator().next().getDeclaringClass(); + this.tierOutcomeType = first.getKey().getDeclaringClass(); + + this.mapper = new StatisticMapper(translation, findOperationStat(tier, storeOutcomeType, targetName)); } @Override public Class type() { - return tierOperatioOutcome; + return tierOutcomeType; } @Override public ValueStatistic statistic(D result) { - return operationStatistic.statistic(storeToTierOperationOutcomeMap.get(result)); + return mapper.statistic(result); } @Override public ValueStatistic statistic(Set results) { - Set xlated = new HashSet(); - for (D result : results) { - xlated.addAll(storeToTierOperationOutcomeMap.get(result)); - } - return operationStatistic.statistic(xlated); + return mapper.statistic(results); } @Override public long count(D type) { - long value = 0L; - Set s = storeToTierOperationOutcomeMap.get(type); - for (S s1 : s) { - value += operationStatistic.count(s1); - } - return value; + return mapper.count(type); } @Override public long sum(Set types) { - Set xlated = new HashSet(); - for (D type : types) { - xlated.addAll(storeToTierOperationOutcomeMap.get(type)); - } - return operationStatistic.sum(xlated); + return mapper.sum(types); } @Override public long sum() { - return operationStatistic.sum(); + return mapper.sum(); } @Override public void addDerivedStatistic(final ChainedOperationObserver derived) { - operationStatistic.addDerivedStatistic(new ChainedOperationObserver() { - @Override - public void begin(long time) { - derived.begin(time); - } - - @Override - public void end(long time, S result) { - derived.end(time, (D) result); - } - - @Override - public void end(long time, S result, long... parameters) { - derived.end(time, (D) result, parameters); - } - }); + mapper.addDerivedStatistic(derived); } @Override public void removeDerivedStatistic(ChainedOperationObserver derived) { - operationStatistic.removeDerivedStatistic((ChainedOperationObserver) derived); + mapper.removeDerivedStatistic(derived); } @Override public void begin() { - throw new UnsupportedOperationException(); + mapper.begin(); } @Override public void end(D result) { - throw new UnsupportedOperationException(); + mapper.end(result); } @Override public void end(D result, long... parameters) { - throw new UnsupportedOperationException(); + mapper.end(result, parameters); } - private static OperationStatistic findOperationStat(Object rootNode, final String statName) { - Query q = queryBuilder().chain(self()) - .descendants().filter(context(identifier(subclassOf(OperationStatistic.class)))).build(); + @SuppressWarnings("unchecked") + private static > OperationStatistic findOperationStat(Object rootNode, final Class statisticType, final String statName) { + Query q = queryBuilder().descendants() + .filter(context(identifier(subclassOf(OperationStatistic.class)))) + .filter(context(attributes(Matchers.>allOf( + hasAttribute("name", statName), + hasAttribute("this", new Matcher() { + @Override + protected boolean matchesSafely(OperationStatistic object) { + return object.type().equals(statisticType); + } + }) + )))).build(); - Set operationStatisticNodes = q.execute(Collections.singleton(ContextManager.nodeFor(rootNode))); - Set result = queryBuilder() - .filter( - context(attributes(Matchers.>allOf( - hasAttribute("name", statName))))).build().execute(operationStatisticNodes); + + Set result = q.execute(Collections.singleton(ContextManager.nodeFor(rootNode))); if (result.size() != 1) { throw new RuntimeException("a single stat was expected; found " + result.size()); } TreeNode node = result.iterator().next(); - return (OperationStatistic) node.getContext().attributes().get("this"); - } - - public static Set set(X... xs) { - return new HashSet(Arrays.asList(xs)); + return (OperationStatistic) node.getContext().attributes().get("this"); } public static class TierOperationOutcomes { + public static final Map> GET_TRANSLATION; + static { + Map> translation = new EnumMap>(GetOutcome.class); + translation.put(GetOutcome.HIT, of(StoreOperationOutcomes.GetOutcome.HIT)); + translation.put(GetOutcome.MISS, of(StoreOperationOutcomes.GetOutcome.MISS, StoreOperationOutcomes.GetOutcome.TIMEOUT)); + GET_TRANSLATION = unmodifiableMap(translation); + } + + public static final Map> GET_AND_FAULT_TRANSLATION; + + static { + Map> translation = new EnumMap>(GetOutcome.class); + translation.put(GetOutcome.HIT, of(GetAndFaultOutcome.HIT)); + translation.put(GetOutcome.MISS, of(GetAndFaultOutcome.MISS, GetAndFaultOutcome.TIMEOUT)); + GET_AND_FAULT_TRANSLATION = unmodifiableMap(translation); + } + + public static final Map> GET_AND_REMOVE_TRANSLATION; + static { + Map> translation = new EnumMap>(GetOutcome.class); + translation.put(GetOutcome.HIT, of(GetAndRemoveOutcome.HIT_REMOVED)); + translation.put(GetOutcome.MISS, of(GetAndRemoveOutcome.MISS)); + GET_AND_REMOVE_TRANSLATION = unmodifiableMap(translation); + } + + public static final Map> GET_OR_COMPUTEIFABSENT_TRANSLATION; + static { + Map> translation = new EnumMap>(GetOutcome.class); + translation.put(GetOutcome.HIT, of(GetOrComputeIfAbsentOutcome.HIT)); + translation.put(GetOutcome.MISS, of(GetOrComputeIfAbsentOutcome.FAULTED, GetOrComputeIfAbsentOutcome.FAULT_FAILED, + GetOrComputeIfAbsentOutcome.FAULT_FAILED_MISS, GetOrComputeIfAbsentOutcome.MISS)); + GET_OR_COMPUTEIFABSENT_TRANSLATION = unmodifiableMap(translation); + } + + public static final Map> EVICTION_TRANSLATION; + static { + Map> translation = new EnumMap>(EvictionOutcome.class); + translation.put(EvictionOutcome.SUCCESS, of(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); + translation.put(EvictionOutcome.FAILURE, of(StoreOperationOutcomes.EvictionOutcome.FAILURE)); + EVICTION_TRANSLATION = unmodifiableMap(translation); + }; + public enum GetOutcome { HIT, MISS, diff --git a/core/src/test/java/org/ehcache/core/statistics/StatisticMapperTest.java b/core/src/test/java/org/ehcache/core/statistics/StatisticMapperTest.java new file mode 100644 index 0000000000..485080cac5 --- /dev/null +++ b/core/src/test/java/org/ehcache/core/statistics/StatisticMapperTest.java @@ -0,0 +1,256 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.statistics; + +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.terracotta.statistics.OperationStatistic; +import org.terracotta.statistics.observer.ChainedOperationObserver; + +import java.util.Collections; +import java.util.EnumMap; +import java.util.Map; +import java.util.Set; + +import static java.util.EnumSet.of; +import static org.ehcache.core.statistics.StatisticMapperTest.Source.C; +import static org.ehcache.core.statistics.StatisticMapperTest.Source.D; +import static org.ehcache.core.statistics.StatisticMapperTest.Source.E; +import static org.ehcache.core.statistics.StatisticMapperTest.Target.A; +import static org.ehcache.core.statistics.StatisticMapperTest.Target.B; +import static org.hamcrest.core.IsEqual.equalTo; +import static org.hamcrest.core.StringContains.containsString; +import static org.junit.Assert.assertThat; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +public class StatisticMapperTest { + + @Test + public void testInvalidSourceStatisticSet() { + try { + new StatisticMapper(Collections.>singletonMap(A, of(C, D)), null); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("target outcomes [B]")); + } + } + + @Test + public void testInvalidTargetStatisticSet() { + try { + Map> translation = new EnumMap>(Target.class); + translation.put(A, of(C)); + translation.put(B, of(D)); + new StatisticMapper(translation, null); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("source outcomes [E]")); + } + } + + @Test + public void testIllDefinedTranslation() { + try { + Map> translation = new EnumMap>(Target.class); + translation.put(A, of(C, D)); + translation.put(B, of(D, E)); + new StatisticMapper(translation, null); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("mapping is ill-defined")); + } + } + + @Test + public void testTargetTypeExtraction() { + Map> translation = new EnumMap>(Target.class); + translation.put(A, of(C)); + translation.put(B, of(D, E)); + StatisticMapper mapper = new StatisticMapper(translation, null); + + assertThat(mapper.type(), equalTo(Target.class)); + } + + @Test + public void testStatisticTranslation() { + OperationStatistic statistic = mock(OperationStatistic.class); + Map> translation = new EnumMap>(Target.class); + translation.put(A, of(C)); + translation.put(B, of(D, E)); + StatisticMapper mapper = new StatisticMapper(translation, statistic); + + mapper.statistic(B); + verify(statistic).statistic(of(D, E)); + + mapper.statistic(A); + verify(statistic).statistic(of(C)); + } + + @Test + public void testStatisticSetTranslation() { + OperationStatistic statistic = mock(OperationStatistic.class); + Map> translation = new EnumMap>(Target.class); + translation.put(A, of(C)); + translation.put(B, of(D, E)); + StatisticMapper mapper = new StatisticMapper(translation, statistic); + + mapper.statistic(of(A, B)); + verify(statistic).statistic(of(C, D, E)); + } + + @Test + public void testCountTranslation() { + OperationStatistic statistic = mock(OperationStatistic.class); + Map> translation = new EnumMap>(Target.class); + translation.put(A, of(C)); + translation.put(B, of(D, E)); + StatisticMapper mapper = new StatisticMapper(translation, statistic); + + mapper.count(B); + verify(statistic).sum(of(D, E)); + + mapper.count(A); + verify(statistic).sum(of(C)); + } + + @Test + public void testSumTranslation() { + OperationStatistic statistic = mock(OperationStatistic.class); + Map> translation = new EnumMap>(Target.class); + translation.put(A, of(C)); + translation.put(B, of(D, E)); + StatisticMapper mapper = new StatisticMapper(translation, statistic); + + mapper.sum(of(A, B)); + verify(statistic).sum(of(C, D, E)); + } + + @Test + public void testFullSum() { + OperationStatistic statistic = mock(OperationStatistic.class); + Map> translation = new EnumMap>(Target.class); + translation.put(A, of(C)); + translation.put(B, of(D, E)); + StatisticMapper mapper = new StatisticMapper(translation, statistic); + + mapper.sum(); + + verify(statistic).sum(); + } + + @Test + public void testDerivedStatisticBeginDelegation() { + ArgumentCaptor wrapperCapture = ArgumentCaptor.forClass(ChainedOperationObserver.class); + + OperationStatistic statistic = mock(OperationStatistic.class); + doNothing().when(statistic).addDerivedStatistic(wrapperCapture.capture()); + + Map> translation = new EnumMap>(Target.class); + translation.put(A, of(C)); + translation.put(B, of(D, E)); + StatisticMapper mapper = new StatisticMapper(translation, statistic); + + ChainedOperationObserver derived = mock(ChainedOperationObserver.class); + + mapper.addDerivedStatistic(derived); + + ChainedOperationObserver wrapper = wrapperCapture.getValue(); + + wrapper.begin(42L); + verify(derived).begin(42L); + } + + @Test + public void testDerivedStatisticEndDelegation() { + ArgumentCaptor wrapperCapture = ArgumentCaptor.forClass(ChainedOperationObserver.class); + + OperationStatistic statistic = mock(OperationStatistic.class); + doNothing().when(statistic).addDerivedStatistic(wrapperCapture.capture()); + + Map> translation = new EnumMap>(Target.class); + translation.put(A, of(C)); + translation.put(B, of(D, E)); + StatisticMapper mapper = new StatisticMapper(translation, statistic); + + ChainedOperationObserver derived = mock(ChainedOperationObserver.class); + + mapper.addDerivedStatistic(derived); + + ChainedOperationObserver wrapper = wrapperCapture.getValue(); + + wrapper.end(43L, E); + verify(derived).end(43L, B); + + wrapper.end(44L, C); + verify(derived).end(44L, A); + + wrapper.end(45L, D); + verify(derived).end(45L, B); + } + + @Test + public void testDerivedStatisticEndWithParametersDelegation() { + ArgumentCaptor wrapperCapture = ArgumentCaptor.forClass(ChainedOperationObserver.class); + + OperationStatistic statistic = mock(OperationStatistic.class); + doNothing().when(statistic).addDerivedStatistic(wrapperCapture.capture()); + + Map> translation = new EnumMap>(Target.class); + translation.put(A, of(C)); + translation.put(B, of(D, E)); + StatisticMapper mapper = new StatisticMapper(translation, statistic); + + ChainedOperationObserver derived = mock(ChainedOperationObserver.class); + + mapper.addDerivedStatistic(derived); + + ChainedOperationObserver wrapper = wrapperCapture.getValue(); + + wrapper.end(43L, E, 1L, 2L); + verify(derived).end(43L, B, 1L, 2L); + + wrapper.end(44L, C, 2L, 1L); + verify(derived).end(44L, A, 2L, 1L); + + wrapper.end(45L, D, 3L, 4L); + verify(derived).end(45L, B, 3L, 4L); + } + + @Test + public void testDerivedStatisticRemovalDelegation() { + OperationStatistic statistic = mock(OperationStatistic.class); + + Map> translation = new EnumMap>(Target.class); + translation.put(A, of(C)); + translation.put(B, of(D, E)); + StatisticMapper mapper = new StatisticMapper(translation, statistic); + + ChainedOperationObserver derived = mock(ChainedOperationObserver.class); + + mapper.addDerivedStatistic(derived); + mapper.removeDerivedStatistic(derived); + + verify(statistic).removeDerivedStatistic(any(ChainedOperationObserver.class)); + } + + enum Target { + A, B + } + + enum Source { + C, D, E + } +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java index 9f3968733e..50d184620f 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java @@ -19,13 +19,11 @@ import org.ehcache.config.SizedResourcePool; import org.ehcache.core.CacheConfigurationChangeListener; import org.ehcache.Status; -import org.ehcache.config.Eviction; import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourceType; import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.core.statistics.AuthoritativeTierOperationOutcomes; import org.ehcache.core.statistics.StoreOperationOutcomes; -import org.ehcache.core.statistics.TierOperationStatistic; import org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.events.StoreEventDispatcher; @@ -54,6 +52,8 @@ import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.core.internal.util.ConcurrentWeakIdentityHashMap; +import org.ehcache.core.statistics.TierOperationStatistic; +import org.ehcache.core.statistics.TierOperationStatistic.TierOperationOutcomes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terracotta.offheapstore.disk.paging.MappedPageSource; @@ -77,7 +77,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; @@ -88,7 +87,6 @@ import static java.lang.Math.max; import static org.ehcache.config.Eviction.noAdvice; import static org.ehcache.core.internal.service.ServiceLocator.findSingletonAmongst; -import static org.ehcache.core.statistics.TierOperationStatistic.set; import static org.terracotta.offheapstore.util.MemoryUnit.BYTES; /** @@ -335,17 +333,15 @@ public OffHeapDiskStore createStore(Configuration storeConfig OffHeapDiskStore store = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); Collection> tieredOps = new ArrayList>(); - TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, StoreOperationOutcomes.GetOutcome.class, store, new HashMap>() {{ - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(StoreOperationOutcomes.GetOutcome.HIT)); - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(StoreOperationOutcomes.GetOutcome.MISS, StoreOperationOutcomes.GetOutcome.TIMEOUT)); - }}, "get", ResourceType.Core.DISK.getTierHeight(), "get", STATISTICS_TAG); + TierOperationStatistic get = + new TierOperationStatistic( + store, TierOperationOutcomes.GET_TRANSLATION, "get", ResourceType.Core.DISK.getTierHeight(), "get", STATISTICS_TAG); StatisticsManager.associate(get).withParent(store); tieredOps.add(get); - TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, store, new HashMap>() {{ - put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); - put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); - }}, "eviction", ResourceType.Core.DISK.getTierHeight(), "eviction", STATISTICS_TAG); + TierOperationStatistic evict = + new TierOperationStatistic( + store, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.DISK.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(store); tieredOps.add(evict); @@ -481,17 +477,15 @@ public AuthoritativeTier createAuthoritativeTier(Configuration authoritativeTier = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); Collection> tieredOps = new ArrayList>(); - TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.class, authoritativeTier, new HashMap>() {{ - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.HIT)); - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS, AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.TIMEOUT)); - }}, "get", ResourceType.Core.DISK.getTierHeight(), "getAndFault", STATISTICS_TAG); + TierOperationStatistic get = + new TierOperationStatistic( + authoritativeTier, TierOperationOutcomes.GET_AND_FAULT_TRANSLATION, "get", ResourceType.Core.DISK.getTierHeight(), "getAndFault", STATISTICS_TAG); StatisticsManager.associate(get).withParent(authoritativeTier); tieredOps.add(get); - TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, authoritativeTier, new HashMap>() {{ - put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); - put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); - }}, "eviction", ResourceType.Core.DISK.getTierHeight(), "eviction", STATISTICS_TAG); + TierOperationStatistic evict = + new TierOperationStatistic( + authoritativeTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.DISK.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(authoritativeTier); tieredOps.add(evict); diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java index eecb66e345..613aa20bd7 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java @@ -21,7 +21,6 @@ import org.ehcache.core.CacheConfigurationChangeEvent; import org.ehcache.core.CacheConfigurationChangeListener; import org.ehcache.core.CacheConfigurationProperty; -import org.ehcache.config.Eviction; import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; import org.ehcache.config.ResourceType; @@ -30,7 +29,6 @@ import org.ehcache.core.events.StoreEventSink; import org.ehcache.core.spi.store.StoreAccessException; import org.ehcache.core.spi.store.heap.LimitExceededException; -import org.ehcache.core.statistics.TierOperationStatistic; import org.ehcache.expiry.Duration; import org.ehcache.expiry.Expiry; import org.ehcache.core.spi.function.BiFunction; @@ -68,9 +66,10 @@ import org.ehcache.core.statistics.HigherCachingTierOperationOutcomes; import org.ehcache.core.statistics.StoreOperationOutcomes; import org.ehcache.core.internal.util.ConcurrentWeakIdentityHashMap; +import org.ehcache.core.statistics.TierOperationStatistic; +import org.ehcache.core.statistics.TierOperationStatistic.TierOperationOutcomes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.terracotta.context.annotations.ContextAttribute; import org.terracotta.offheapstore.util.FindbugsSuppressWarnings; import org.terracotta.statistics.StatisticsManager; import org.terracotta.statistics.observer.OperationObserver; @@ -92,10 +91,10 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; +import static java.util.Collections.singletonMap; import static org.ehcache.config.Eviction.noAdvice; import static org.ehcache.core.exceptions.StorePassThroughException.handleRuntimeException; import static org.ehcache.core.internal.util.ValueSuppliers.supplierOf; -import static org.ehcache.core.statistics.TierOperationStatistic.set; import static org.terracotta.statistics.StatisticBuilder.operation; /** @@ -270,8 +269,7 @@ public OnHeapStore(final Configuration config, final TimeSource timeSource silentInvalidateAllWithHashObserver = operation(HigherCachingTierOperationOutcomes.SilentInvalidateAllWithHashOutcome.class).named("silentInvalidateAllWithHash").of(this).tag(STATISTICS_TAG).build(); Set tags = new HashSet(Arrays.asList(STATISTICS_TAG, "tier")); - Map properties = new HashMap(); - properties.put("discriminator", STATISTICS_TAG); + Map properties = singletonMap("discriminator", STATISTICS_TAG); StatisticsManager.createPassThroughStatistic(this, "mappings", tags, properties, new Callable() { @Override public Number call() throws Exception { @@ -1664,17 +1662,15 @@ public OnHeapStore createStore(final Configuration storeConfi OnHeapStore store = createStoreInternal(storeConfig, new ScopedStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); Collection> tieredOps = new ArrayList>(); - TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, StoreOperationOutcomes.GetOutcome.class, store, new HashMap>() {{ - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(StoreOperationOutcomes.GetOutcome.HIT)); - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(StoreOperationOutcomes.GetOutcome.MISS, StoreOperationOutcomes.GetOutcome.TIMEOUT)); - }}, "get", ResourceType.Core.HEAP.getTierHeight(), "get", STATISTICS_TAG); + TierOperationStatistic get = + new TierOperationStatistic( + store, TierOperationOutcomes.GET_TRANSLATION, "get", ResourceType.Core.HEAP.getTierHeight(), "get", STATISTICS_TAG); StatisticsManager.associate(get).withParent(store); tieredOps.add(get); - TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, store, new HashMap>() {{ - put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); - put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); - }}, "eviction", ResourceType.Core.HEAP.getTierHeight(), "eviction", STATISTICS_TAG); + TierOperationStatistic evict = + new TierOperationStatistic( + store, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.HEAP.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(store); tieredOps.add(evict); @@ -1763,17 +1759,15 @@ public CachingTier createCachingTier(Configuration storeConfi OnHeapStore cachingTier = createStoreInternal(storeConfig, NullStoreEventDispatcher.nullStoreEventDispatcher(), serviceConfigs); Collection> tieredOps = new ArrayList>(); - TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.class, cachingTier, new HashMap>() {{ - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.HIT)); - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULTED, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED_MISS, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS)); - }}, "get", ResourceType.Core.HEAP.getTierHeight(), "getOrComputeIfAbsent", STATISTICS_TAG); + TierOperationStatistic get = + new TierOperationStatistic( + cachingTier, TierOperationOutcomes.GET_OR_COMPUTEIFABSENT_TRANSLATION, "get", ResourceType.Core.HEAP.getTierHeight(), "getOrComputeIfAbsent", STATISTICS_TAG); StatisticsManager.associate(get).withParent(cachingTier); tieredOps.add(get); - TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, cachingTier, new HashMap>() {{ - put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); - put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); - }}, "eviction", ResourceType.Core.HEAP.getTierHeight(), "eviction", STATISTICS_TAG); + TierOperationStatistic evict + = new TierOperationStatistic( + cachingTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.HEAP.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(cachingTier); tieredOps.add(evict); @@ -1802,17 +1796,15 @@ public HigherCachingTier createHigherCachingTier(Configuration higherCachingTier = createStoreInternal(storeConfig, new ScopedStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); Collection> tieredOps = new ArrayList>(); - TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.class, higherCachingTier, new HashMap>() {{ - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.HIT)); - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULTED, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED_MISS, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS)); - }}, "get", ResourceType.Core.HEAP.getTierHeight(), "getOrComputeIfAbsent", STATISTICS_TAG); + TierOperationStatistic get = + new TierOperationStatistic( + higherCachingTier, TierOperationOutcomes.GET_OR_COMPUTEIFABSENT_TRANSLATION, "get", ResourceType.Core.HEAP.getTierHeight(), "getOrComputeIfAbsent", STATISTICS_TAG); StatisticsManager.associate(get).withParent(higherCachingTier); tieredOps.add(get); - TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, higherCachingTier, new HashMap>() {{ - put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); - put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); - }}, "eviction", ResourceType.Core.HEAP.getTierHeight(), "eviction", STATISTICS_TAG); + TierOperationStatistic evict = + new TierOperationStatistic( + higherCachingTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.HEAP.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(higherCachingTier); tieredOps.add(evict); diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java index 9b2a747d14..ed55b18381 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java @@ -26,7 +26,6 @@ import org.ehcache.core.statistics.AuthoritativeTierOperationOutcomes; import org.ehcache.core.statistics.LowerCachingTierOperationsOutcome; import org.ehcache.core.statistics.StoreOperationOutcomes; -import org.ehcache.core.statistics.TierOperationStatistic; import org.ehcache.impl.internal.events.NullStoreEventDispatcher; import org.ehcache.impl.internal.events.ThreadLocalStoreEventDispatcher; import org.ehcache.impl.internal.store.offheap.factories.EhcacheSegmentFactory; @@ -46,6 +45,8 @@ import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.core.internal.util.ConcurrentWeakIdentityHashMap; +import org.ehcache.core.statistics.TierOperationStatistic; +import org.ehcache.core.statistics.TierOperationStatistic.TierOperationOutcomes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terracotta.offheapstore.paging.PageSource; @@ -60,14 +61,11 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import static org.ehcache.config.Eviction.noAdvice; -import static org.ehcache.core.statistics.TierOperationStatistic.set; -import org.ehcache.impl.internal.store.heap.OnHeapStore; import static org.ehcache.impl.internal.store.offheap.OffHeapStoreUtils.getBufferSource; /** @@ -154,17 +152,15 @@ public OffHeapStore createStore(Configuration storeConfig, Se OffHeapStore store = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); Collection> tieredOps = new ArrayList>(); - TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, StoreOperationOutcomes.GetOutcome.class, store, new HashMap>() {{ - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(StoreOperationOutcomes.GetOutcome.HIT)); - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(StoreOperationOutcomes.GetOutcome.MISS, StoreOperationOutcomes.GetOutcome.TIMEOUT)); - }}, "get", ResourceType.Core.OFFHEAP.getTierHeight(), "get", STATISTICS_TAG); + TierOperationStatistic get = + new TierOperationStatistic( + store, TierOperationOutcomes.GET_TRANSLATION, "get", ResourceType.Core.OFFHEAP.getTierHeight(), "get", STATISTICS_TAG); StatisticsManager.associate(get).withParent(store); tieredOps.add(get); - TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, store, new HashMap>() {{ - put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); - put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); - }}, "eviction", ResourceType.Core.OFFHEAP.getTierHeight(), "eviction", STATISTICS_TAG); + TierOperationStatistic evict = + new TierOperationStatistic( + store, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.OFFHEAP.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(store); tieredOps.add(evict); @@ -248,17 +244,15 @@ public AuthoritativeTier createAuthoritativeTier(Configuration authoritativeTier = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); Collection> tieredOps = new ArrayList>(); - TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.class, authoritativeTier, new HashMap>() {{ - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.HIT)); - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS, AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.TIMEOUT)); - }}, "get", ResourceType.Core.OFFHEAP.getTierHeight(), "getAndFault", STATISTICS_TAG); + TierOperationStatistic get = + new TierOperationStatistic( + authoritativeTier, TierOperationOutcomes.GET_AND_FAULT_TRANSLATION, "get", ResourceType.Core.OFFHEAP.getTierHeight(), "getAndFault", STATISTICS_TAG); StatisticsManager.associate(get).withParent(authoritativeTier); tieredOps.add(get); - TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, authoritativeTier, new HashMap>() {{ - put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); - put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); - }}, "eviction", ResourceType.Core.OFFHEAP.getTierHeight(), "eviction", STATISTICS_TAG); + TierOperationStatistic evict + = new TierOperationStatistic( + authoritativeTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.OFFHEAP.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(authoritativeTier); tieredOps.add(evict); @@ -281,17 +275,15 @@ public LowerCachingTier createCachingTier(Configuration store OffHeapStore lowerCachingTier = createStoreInternal(storeConfig, NullStoreEventDispatcher.nullStoreEventDispatcher(), serviceConfigs); Collection> tieredOps = new ArrayList>(); - TierOperationStatistic get = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.GetOutcome.class, LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.class, lowerCachingTier, new HashMap>() {{ - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT, set(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.HIT_REMOVED)); - put(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS, set(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.MISS)); - }}, "get", ResourceType.Core.OFFHEAP.getTierHeight(), "getAndRemove", STATISTICS_TAG); + TierOperationStatistic get + = new TierOperationStatistic( + lowerCachingTier, TierOperationOutcomes.GET_AND_REMOVE_TRANSLATION, "get", ResourceType.Core.OFFHEAP.getTierHeight(), "getAndRemove", STATISTICS_TAG); StatisticsManager.associate(get).withParent(lowerCachingTier); tieredOps.add(get); - TierOperationStatistic evict = new TierOperationStatistic(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class, StoreOperationOutcomes.EvictionOutcome.class, lowerCachingTier, new HashMap>() {{ - put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.SUCCESS, set(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); - put(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.FAILURE, set(StoreOperationOutcomes.EvictionOutcome.FAILURE)); - }}, "eviction", ResourceType.Core.OFFHEAP.getTierHeight(), "eviction", STATISTICS_TAG); + TierOperationStatistic evict = + new TierOperationStatistic( + lowerCachingTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.OFFHEAP.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(lowerCachingTier); tieredOps.add(evict); diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java index 2beaa58573..adf31eb0f2 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java @@ -37,14 +37,12 @@ import org.ehcache.spi.service.ServiceDependencies; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.terracotta.context.annotations.ContextAttribute; import org.terracotta.statistics.StatisticsManager; import java.util.AbstractMap; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -52,8 +50,6 @@ import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicReference; -import static org.ehcache.core.statistics.TierOperationStatistic.set; - /** * A {@link Store} implementation supporting a tiered caching model. */ From 145b611ffbb82c825e5d87d60b7ec8ab4523e3da Mon Sep 17 00:00:00 2001 From: Chris Dennis Date: Mon, 26 Sep 2016 17:31:34 -0400 Subject: [PATCH 043/218] Closes #1465 Adds copying newCacheConfigurationBuilder(CacheConfiguration) method --- .../builders/CacheConfigurationBuilder.java | 19 +++++++ .../CacheConfigurationBuilderTest.java | 49 ++++++++++++++----- 2 files changed, 57 insertions(+), 11 deletions(-) diff --git a/impl/src/main/java/org/ehcache/config/builders/CacheConfigurationBuilder.java b/impl/src/main/java/org/ehcache/config/builders/CacheConfigurationBuilder.java index df7317580f..1f8df4a384 100644 --- a/impl/src/main/java/org/ehcache/config/builders/CacheConfigurationBuilder.java +++ b/impl/src/main/java/org/ehcache/config/builders/CacheConfigurationBuilder.java @@ -96,6 +96,25 @@ public static CacheConfigurationBuilder newCacheConfigurationBuilde return new CacheConfigurationBuilder(keyType, valueType, resourcePoolsBuilder.build()); } + /** + * Creates a new instance ready to produce a {@link CacheConfiguration} functionally equivalent to the supplied configuration. + * + * @param configuration seed configuration + * @param the key type + * @param the value type + * @return a {@code CacheConfigurationBuilder} + */ + public static CacheConfigurationBuilder newCacheConfigurationBuilder(CacheConfiguration configuration) { + CacheConfigurationBuilder builder = newCacheConfigurationBuilder(configuration.getKeyType(), configuration.getValueType(), configuration.getResourcePools()) + .withClassLoader(configuration.getClassLoader()) + .withEvictionAdvisor(configuration.getEvictionAdvisor()) + .withExpiry(configuration.getExpiry()); + for (ServiceConfiguration serviceConfig : configuration.getServiceConfigurations()) { + builder = builder.add(serviceConfig); + } + return builder; + } + private CacheConfigurationBuilder(Class keyType, Class valueType, ResourcePools resourcePools) { this.keyType = keyType; this.valueType = valueType; diff --git a/impl/src/test/java/org/ehcache/config/builders/CacheConfigurationBuilderTest.java b/impl/src/test/java/org/ehcache/config/builders/CacheConfigurationBuilderTest.java index de6f839fdf..8a91b6799f 100644 --- a/impl/src/test/java/org/ehcache/config/builders/CacheConfigurationBuilderTest.java +++ b/impl/src/test/java/org/ehcache/config/builders/CacheConfigurationBuilderTest.java @@ -16,14 +16,10 @@ package org.ehcache.config.builders; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.EvictionAdvisor; -import org.ehcache.config.ResourceType; -import org.ehcache.config.ResourceUnit; +import org.ehcache.config.*; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; -import org.ehcache.spi.loaderwriter.BulkCacheWritingException; -import org.ehcache.spi.serialization.SerializerException; +import org.ehcache.core.internal.service.ServiceLocator; import org.ehcache.expiry.Duration; import org.ehcache.expiry.Expirations; import org.ehcache.expiry.Expiry; @@ -32,23 +28,27 @@ import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration; import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration; import org.ehcache.impl.internal.classes.ClassInstanceConfiguration; -import org.ehcache.core.internal.service.ServiceLocator; import org.ehcache.spi.copy.Copier; +import org.ehcache.spi.loaderwriter.BulkCacheWritingException; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.serialization.Serializer; +import org.ehcache.spi.serialization.SerializerException; +import org.ehcache.spi.service.ServiceConfiguration; import org.hamcrest.Matcher; import org.hamcrest.Matchers; +import org.hamcrest.collection.IsIterableContainingInAnyOrder; +import org.hamcrest.core.IsSame; import org.junit.Test; +import org.mockito.Mockito; import java.nio.ByteBuffer; import java.util.Map; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.sameInstance; -import static org.hamcrest.Matchers.notNullValue; -import static org.junit.Assert.assertThat; +import static org.hamcrest.Matchers.*; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThat; +import static org.mockito.Mockito.mock; public class CacheConfigurationBuilderTest { @@ -279,4 +279,31 @@ public void testSizeOf() { assertEquals(sizeOfEngineConfiguration.getMaxObjectGraphSize(), 1000); } + + @Test + public void testCopyingOfExistingConfiguration() { + Class keyClass = Integer.class; + Class valueClass = String.class; + ClassLoader loader = mock(ClassLoader.class); + EvictionAdvisor eviction = mock(EvictionAdvisor.class); + Expiry expiry = mock(Expiry.class); + ServiceConfiguration service = mock(ServiceConfiguration.class); + + CacheConfiguration configuration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Integer.class, String.class, heap(10)) + .withClassLoader(loader) + .withEvictionAdvisor(eviction) + .withExpiry(expiry) + .add(service) + .build(); + + CacheConfiguration copy = CacheConfigurationBuilder.newCacheConfigurationBuilder(configuration).build(); + + assertThat(copy.getKeyType(), equalTo(keyClass)); + assertThat(copy.getValueType(), equalTo(valueClass)); + assertThat(copy.getClassLoader(), equalTo(loader)); + + assertThat(copy.getEvictionAdvisor(), IsSame.>sameInstance(eviction)); + assertThat(copy.getExpiry(), IsSame.>sameInstance(expiry)); + assertThat(copy.getServiceConfigurations(), containsInAnyOrder(IsSame.>sameInstance(service))); + } } From dc7b69b67c99759ff5a10c404eea0b68c1566c5a Mon Sep 17 00:00:00 2001 From: Chris Dennis Date: Thu, 29 Sep 2016 17:47:35 -0400 Subject: [PATCH 044/218] Issue #1467 Prevents shadow plugin from populating the 'Class-Path' manifest key --- buildSrc/src/main/groovy/EhDistribute.groovy | 1 + buildSrc/src/main/groovy/EhOsgi.groovy | 2 +- buildSrc/src/main/groovy/EhPomMangle.groovy | 6 +++--- clustered/clustered-dist/build.gradle | 4 ++-- dist/build.gradle | 4 ++-- 5 files changed, 9 insertions(+), 8 deletions(-) diff --git a/buildSrc/src/main/groovy/EhDistribute.groovy b/buildSrc/src/main/groovy/EhDistribute.groovy index af43ca6361..a815108ba5 100644 --- a/buildSrc/src/main/groovy/EhDistribute.groovy +++ b/buildSrc/src/main/groovy/EhDistribute.groovy @@ -40,6 +40,7 @@ class EhDistribute implements Plugin { def OSGI_OVERRIDE_KEYS = ['Import-Package', 'Export-Package', 'Private-Package', 'Tool', 'Bnd-LastModified', 'Created-By', 'Require-Capability'] project.configurations { + shadowCompile shadowProvided } diff --git a/buildSrc/src/main/groovy/EhOsgi.groovy b/buildSrc/src/main/groovy/EhOsgi.groovy index 1704876074..521d58f400 100644 --- a/buildSrc/src/main/groovy/EhOsgi.groovy +++ b/buildSrc/src/main/groovy/EhOsgi.groovy @@ -54,7 +54,7 @@ class EhOsgi implements Plugin { if (project.hasProperty('shadowJar')) { classesDir = project.shadowJar.archivePath - classpath = project.files(project.configurations.shadow, project.configurations.shadowProvided) + classpath = project.files(project.configurations.shadowCompile, project.configurations.shadowProvided) } else { classesDir = new File(project.buildDir, 'classes/main') //can't figure out where to get this value classpath = project.sourceSets.main.compileClasspath diff --git a/buildSrc/src/main/groovy/EhPomMangle.groovy b/buildSrc/src/main/groovy/EhPomMangle.groovy index a20274e57e..44b160fa56 100644 --- a/buildSrc/src/main/groovy/EhPomMangle.groovy +++ b/buildSrc/src/main/groovy/EhPomMangle.groovy @@ -27,7 +27,7 @@ import scripts.Utils * Removes all implicit dependencies from the pom * and adds only what is specified in (from shadowJar) * - * project.configurations.shadow (as compile) + * project.configurations.shadowCompile (as compile) * project.configurations.shadowProvided (as provided) * * as well as (these do not affect shadow) @@ -49,7 +49,7 @@ class EhPomMangle implements Plugin { project.plugins.apply 'signing' project.configurations { - shadow + shadowCompile shadowProvided pomOnlyCompile pomOnlyProvided @@ -60,7 +60,7 @@ class EhPomMangle implements Plugin { pom.scopeMappings.mappings.remove(project.configurations.runtime) pom.scopeMappings.mappings.remove(project.configurations.testCompile) pom.scopeMappings.mappings.remove(project.configurations.testRuntime) - pom.scopeMappings.addMapping(MavenPlugin.COMPILE_PRIORITY, project.configurations.shadow, Conf2ScopeMappingContainer.COMPILE) + pom.scopeMappings.addMapping(MavenPlugin.COMPILE_PRIORITY, project.configurations.shadowCompile, Conf2ScopeMappingContainer.COMPILE) pom.scopeMappings.addMapping(MavenPlugin.COMPILE_PRIORITY, project.configurations.shadowProvided, Conf2ScopeMappingContainer.PROVIDED) //Anything extra to add to pom that isn't in the shadowed jar or compilation diff --git a/clustered/clustered-dist/build.gradle b/clustered/clustered-dist/build.gradle index 623b66b7a1..4f1fada655 100644 --- a/clustered/clustered-dist/build.gradle +++ b/clustered/clustered-dist/build.gradle @@ -54,7 +54,7 @@ dependencies { kit "org.terracotta.internal:terracotta-kit:$terracottaCoreVersion@zip" changing true - shadow "org.slf4j:slf4j-api:$parent.slf4jVersion" + shadowCompile "org.slf4j:slf4j-api:$parent.slf4jVersion" pomOnlyCompile "org.ehcache:ehcache:$parent.baseVersion" } @@ -99,7 +99,7 @@ distributions { from project(':dist').javadocJar.archivePath.getPath() } into ('client/lib') { - from configurations.shadow + from configurations.shadowCompile } into ('') { from 'src/assemble' diff --git a/dist/build.gradle b/dist/build.gradle index f762212b14..cca9c9d950 100644 --- a/dist/build.gradle +++ b/dist/build.gradle @@ -28,5 +28,5 @@ dependencies { apply plugin: EhDistribute dependencies { - shadow "org.slf4j:slf4j-api:$parent.slf4jVersion" -} \ No newline at end of file + shadowCompile "org.slf4j:slf4j-api:$parent.slf4jVersion" +} From 68d4ddbebe2dabb7de4a4024b39adff5938e6053 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Fri, 30 Sep 2016 12:45:52 +0200 Subject: [PATCH 045/218] :memo: #1474 Update readme following 3.1.3 release --- README.adoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.adoc b/README.adoc index 0f77cfb325..ce1a681a5c 100644 --- a/README.adoc +++ b/README.adoc @@ -12,9 +12,9 @@ For samples, documentation, and usage information, please see http://ehcache.org == Current release -We released 3.1.2 on September 9th 2016. +We released 3.1.3 on September 30th 2016. -The https://github.com/ehcache/ehcache3/releases/tag/v3.1.2[release notes] contain the links to the artifacts and the documentation to help you get started. +The https://github.com/ehcache/ehcache3/releases/tag/v3.1.3[release notes] contain the links to the artifacts and the documentation to help you get started. You should consider upgrading to 3.1.x as it does all 3.0.x does and more with a fully compatible API. The only thing to note is that transactional support has been moved to a separate jar. From 172ed7541d3c50826dac83c1be81e66eda1ec516 Mon Sep 17 00:00:00 2001 From: Albin Suresh Date: Fri, 23 Sep 2016 03:41:55 +0530 Subject: [PATCH 046/218] Closes #1209 Passive sync of entity lifecycle state --- .../client/ActivePassiveSyncTest.java | 143 ++++++++++++++++++ .../messages/EhcacheEntityMessage.java | 1 + .../server/ConcurrencyStrategies.java | 2 +- .../clustered/server/EhcacheActiveEntity.java | 27 +++- .../server/EhcachePassiveEntity.java | 16 +- .../server/EhcacheServerEntityService.java | 3 +- .../server/EhcacheStateServiceImpl.java | 18 ++- .../messages/EhcacheSyncMessageCodec.java | 56 +++++++ .../messages/EntityStateSyncMessage.java | 79 ++++++++++ .../server/state/EhcacheStateService.java | 13 +- .../messages/EhcacheSyncMessageCodecTest.java | 101 +++++++++++++ 11 files changed, 437 insertions(+), 22 deletions(-) create mode 100644 clustered/client/src/test/java/org/ehcache/clustered/client/ActivePassiveSyncTest.java create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/messages/EhcacheSyncMessageCodec.java create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/messages/EntityStateSyncMessage.java create mode 100644 clustered/server/src/test/java/org/ehcache/clustered/server/messages/EhcacheSyncMessageCodecTest.java diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/ActivePassiveSyncTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/ActivePassiveSyncTest.java new file mode 100644 index 0000000000..e5857fc2ef --- /dev/null +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/ActivePassiveSyncTest.java @@ -0,0 +1,143 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client; + +import org.ehcache.clustered.client.config.ClusteredResourcePool; +import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.internal.EhcacheClientEntity; +import org.ehcache.clustered.client.internal.EhcacheClientEntityService; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockEntityClientService; +import org.ehcache.clustered.client.internal.service.ClusteringServiceFactory; +import org.ehcache.clustered.client.service.ClusteringService; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; +import org.ehcache.clustered.server.EhcacheServerEntityService; +import org.ehcache.impl.serialization.CompactJavaSerializer; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.terracotta.offheapresource.OffHeapResourcesConfiguration; +import org.terracotta.offheapresource.OffHeapResourcesProvider; +import org.terracotta.offheapresource.config.MemoryUnit; +import org.terracotta.passthrough.PassthroughClusterControl; +import org.terracotta.passthrough.PassthroughServer; +import org.terracotta.passthrough.PassthroughTestHelpers; + +import java.lang.reflect.Field; +import java.net.URI; + +import static org.ehcache.clustered.client.internal.UnitTestConnectionService.getOffheapResourcesType; +import static org.ehcache.config.units.MemoryUnit.MB; + +public class ActivePassiveSyncTest { + + private PassthroughClusterControl clusterControl; + private static String STRIPENAME = "stripe"; + private static String STRIPE_URI = "passthrough://" + STRIPENAME; + + @Before + public void setUp() throws Exception { + this.clusterControl = PassthroughTestHelpers.createActivePassive(STRIPENAME, + new PassthroughTestHelpers.ServerInitializer() { + @Override + public void registerServicesForServer(PassthroughServer server) { + server.registerServerEntityService(new EhcacheServerEntityService()); + server.registerClientEntityService(new EhcacheClientEntityService()); + server.registerServerEntityService(new VoltronReadWriteLockServerEntityService()); + server.registerClientEntityService(new VoltronReadWriteLockEntityClientService()); + server.registerServiceProvider(new OffHeapResourcesProvider(), + new OffHeapResourcesConfiguration(getOffheapResourcesType("test", 32, MemoryUnit.MB))); + + UnitTestConnectionService.addServerToStripe(STRIPENAME, server); + } + } + ); + + clusterControl.waitForActive(); + clusterControl.waitForRunningPassivesInStandby(); + } + + @After + public void tearDown() throws Exception { + UnitTestConnectionService.removeStripe(STRIPENAME); + clusterControl.tearDown(); + } + + @Test + public void testTierManagerStateSync() throws Exception { + clusterControl.terminateOnePassive(); + + ClusteringServiceConfiguration configuration = + ClusteringServiceConfigurationBuilder.cluster(URI.create(STRIPE_URI)) + .autoCreate() + .defaultServerResource("test") + .resourcePool("foo", 8L, MB) + .build(); + + ClusteringService service = new ClusteringServiceFactory().create(configuration); + service.start(null); + EhcacheClientEntity clientEntity = getEntity(service); + + clusterControl.startOneServer(); + clusterControl.waitForRunningPassivesInStandby(); + clusterControl.terminateActive(); + + clientEntity.validate(configuration.getServerConfiguration()); + service.stop(); + } + + @Test + public void testServerStoreStateSync() throws Exception { + clusterControl.terminateOnePassive(); + + ClusteringServiceConfiguration configuration = + ClusteringServiceConfigurationBuilder.cluster(URI.create(STRIPE_URI)) + .autoCreate() + .defaultServerResource("test") + .build(); + + ClusteringService service = new ClusteringServiceFactory().create(configuration); + service.start(null); + EhcacheClientEntity clientEntity = getEntity(service); + clientEntity.createCache("foo", getServerStoreConfiguration("test")); + + clusterControl.startOneServer(); + clusterControl.waitForRunningPassivesInStandby(); + clusterControl.terminateActive(); + + clientEntity.validateCache("foo", getServerStoreConfiguration("test")); + + service.stop(); + } + + private static EhcacheClientEntity getEntity(ClusteringService clusteringService) throws NoSuchFieldException, IllegalAccessException { + Field entity = clusteringService.getClass().getDeclaredField("entity"); + entity.setAccessible(true); + return (EhcacheClientEntity)entity.get(clusteringService); + } + + private static ServerStoreConfiguration getServerStoreConfiguration(String resourceName) { + ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(resourceName, 8, MB); + return new ServerStoreConfiguration(resourcePool.getPoolAllocation(), + String.class.getName(), String.class.getName(), null, null, CompactJavaSerializer.class.getName(), CompactJavaSerializer.class + .getName(), Consistency.STRONG); + } +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java index 2036351ee9..bcebc157f0 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java @@ -36,6 +36,7 @@ public enum Type { LIFECYCLE_OP((byte) 10), SERVER_STORE_OP((byte) 20), STATE_REPO_OP((byte) 30), + SYNC_OP((byte) 40), ; private final byte code; diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/ConcurrencyStrategies.java b/clustered/server/src/main/java/org/ehcache/clustered/server/ConcurrencyStrategies.java index 453d9c8848..3b246948d7 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/ConcurrencyStrategies.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/ConcurrencyStrategies.java @@ -33,7 +33,7 @@ public static final ConcurrencyStrategy defaultConc return new DefaultConcurrencyStrategy(bucketCount); } - static class DefaultConcurrencyStrategy implements ConcurrencyStrategy { + public static class DefaultConcurrencyStrategy implements ConcurrencyStrategy { public static final int DEFAULT_KEY = 1; private final int bucketCount; diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 5b0de6a98c..3732e6e358 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -28,6 +28,7 @@ import java.util.concurrent.atomic.AtomicInteger; import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.ClusteredEhcacheIdentity; import org.ehcache.clustered.common.PoolAllocation; @@ -49,6 +50,7 @@ import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; import org.ehcache.clustered.common.internal.store.ServerStore; +import org.ehcache.clustered.server.messages.EntityStateSyncMessage; import org.ehcache.clustered.server.state.EhcacheStateService; import org.ehcache.clustered.server.state.config.EhcacheStateServiceConfig; import org.slf4j.Logger; @@ -96,7 +98,7 @@ class EhcacheActiveEntity implements ActiveServerEntity>(); private final ConcurrentHashMap clientIdMap = new ConcurrentHashMap<>(); - private final Set invalidIds = Collections.newSetFromMap(new ConcurrentHashMap<>()); + private final Set trackedClients = Collections.newSetFromMap(new ConcurrentHashMap<>()); private final ReconnectDataCodec reconnectDataCodec = new ReconnectDataCodec(); private final ServerStoreCompatibility storeCompatibility = new ServerStoreCompatibility(); private final EhcacheEntityResponseFactory responseFactory; @@ -228,7 +230,7 @@ public void disconnected(ClientDescriptor clientDescriptor) { } UUID clientId = clientIdMap.remove(clientDescriptor); if (clientId != null) { - invalidIds.remove(clientId); + trackedClients.remove(clientId); ehcacheStateService.getClientMessageTracker().remove(clientId); } } @@ -291,7 +293,18 @@ public void onEviction(long key) { @Override public void synchronizeKeyToPassive(PassiveSynchronizationChannel syncChannel, int concurrencyKey) { - throw new UnsupportedOperationException("Active/passive is not supported yet"); + if (concurrencyKey == ConcurrencyStrategies.DefaultConcurrencyStrategy.DEFAULT_KEY) { + ServerSideConfiguration configuration = + new ServerSideConfiguration(ehcacheStateService.getDefaultServerResource(), ehcacheStateService.getSharedResourcePools()); + + Map storeConfigs = new HashMap<>(); + for (String storeName : ehcacheStateService.getStores()) { + ServerStoreImpl store = ehcacheStateService.getStore(storeName); + storeConfigs.put(storeName, store.getStoreConfiguration()); + } + + syncChannel.synchronizeToPassive(new EntityStateSyncMessage(configuration, storeConfigs, trackedClients)); + } } @Override @@ -547,7 +560,7 @@ public void destroy() { private void configure(ClientDescriptor clientDescriptor, ConfigureStoreManager message) throws ClusterException { validateClientConnected(clientDescriptor); if (ehcacheStateService.getClientMessageTracker().isConfigureApplicable(message.getClientId(), message.getId())) { - ehcacheStateService.configure(message); + ehcacheStateService.configure(message.getConfiguration()); } this.clientStateMap.get(clientDescriptor).attach(); } @@ -562,17 +575,17 @@ private void configure(ClientDescriptor clientDescriptor, ConfigureStoreManager */ private void validate(ClientDescriptor clientDescriptor, ValidateStoreManager message) throws ClusterException { validateClientConnected(clientDescriptor); - if (invalidIds.contains(message.getClientId())) { + if (trackedClients.contains(message.getClientId())) { throw new InvalidClientIdException("Client ID : " + message.getClientId() + " is already being tracked by Active"); } addClientId(clientDescriptor, message.getClientId()); - ehcacheStateService.validate(message); + ehcacheStateService.validate(message.getConfiguration()); this.clientStateMap.get(clientDescriptor).attach(); } private void addClientId(ClientDescriptor clientDescriptor, UUID clientId) { clientIdMap.put(clientDescriptor, clientId); - invalidIds.add(clientId); + trackedClients.add(clientId); } /** diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index 71d5f15292..a697760a46 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -31,6 +31,7 @@ import org.ehcache.clustered.common.internal.messages.LifecycleMessage.DestroyServerStore; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; +import org.ehcache.clustered.server.messages.EntityStateSyncMessage; import org.ehcache.clustered.server.state.ClientMessageTracker; import org.ehcache.clustered.server.state.EhcacheStateService; import org.ehcache.clustered.server.state.config.EhcacheStateServiceConfig; @@ -42,6 +43,7 @@ import org.terracotta.offheapresource.OffHeapResources; import java.util.Collections; +import java.util.Map; import java.util.Set; import java.util.UUID; @@ -72,6 +74,9 @@ public void invoke(EhcacheEntityMessage message) { case STATE_REPO_OP: ehcacheStateService.getStateRepositoryManager().invoke((StateRepositoryOpMessage)message); break; + case SYNC_OP: + invokeSyncOperation((EntityStateSyncMessage) message); + break; default: throw new IllegalMessageException("Unknown message : " + message); } @@ -128,6 +133,15 @@ private void invokeServerStoreOperation(ServerStoreOpMessage message) throws Clu } } + private void invokeSyncOperation(EntityStateSyncMessage message) throws ClusterException { + ehcacheStateService.configure(message.getConfiguration()); + for (Map.Entry entry : message.getStoreConfigs().entrySet()) { + ehcacheStateService.createStore(entry.getKey(), entry.getValue()); + } + message.getTrackedClients().stream().forEach(id -> ehcacheStateService.getClientMessageTracker().add(id)); + ehcacheStateService.getClientMessageTracker().setEntityConfiguredStamp(message.getClientId(), message.getId()); + } + private void invokeLifeCycleOperation(LifecycleMessage message) throws ClusterException { switch (message.operation()) { case CONFIGURE: @@ -145,7 +159,7 @@ private void invokeLifeCycleOperation(LifecycleMessage message) throws ClusterEx } private void configure(ConfigureStoreManager message) throws ClusterException { - ehcacheStateService.configure(message); + ehcacheStateService.configure(message.getConfiguration()); ehcacheStateService.getClientMessageTracker().setEntityConfiguredStamp(message.getClientId(), message.getId()); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java index 1999aa9cd1..9fa365ef3b 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java @@ -18,6 +18,7 @@ import org.ehcache.clustered.common.internal.messages.EhcacheCodec; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.ehcache.clustered.server.messages.EhcacheSyncMessageCodec; import org.terracotta.entity.ConcurrencyStrategy; import org.terracotta.entity.EntityServerService; import org.terracotta.entity.MessageCodec; @@ -64,6 +65,6 @@ public MessageCodec getMessageCodec @Override public SyncMessageCodec getSyncMessageCodec() { - return null; + return new EhcacheSyncMessageCodec(); } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java index 687c4dd6eb..f0da1589ee 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java @@ -49,6 +49,7 @@ import java.util.Map; import java.util.Set; +import static java.util.stream.Collectors.toMap; import static org.terracotta.offheapstore.util.MemoryUnit.GIGABYTES; import static org.terracotta.offheapstore.util.MemoryUnit.MEGABYTES; @@ -109,18 +110,22 @@ Set getDedicatedResourcePoolIds() { return Collections.unmodifiableSet(dedicatedResourcePools.keySet()); } - String getDefaultServerResource() { + public String getDefaultServerResource() { return this.defaultServerResource; } - public void validate(ValidateStoreManager message) throws ClusterException { + @Override + public Map getSharedResourcePools() { + return sharedResourcePools.entrySet().stream().collect(toMap(Map.Entry::getKey, e -> e.getValue().getPool())); + } + + public void validate(ServerSideConfiguration configuration) throws ClusterException { if (!isConfigured()) { throw new LifecycleException("Clustered Tier Manager is not configured"); } - ServerSideConfiguration incomingConfig = message.getConfiguration(); - if (incomingConfig != null) { - checkConfigurationCompatibility(incomingConfig); + if (configuration != null) { + checkConfigurationCompatibility(configuration); } } @@ -169,10 +174,9 @@ private static Map resolveResourcePools(Se return Collections.unmodifiableMap(pools); } - public void configure(ConfigureStoreManager message) throws ClusterException { + public void configure(ServerSideConfiguration configuration) throws ClusterException { if (!isConfigured()) { LOGGER.info("Configuring server-side clustered tier manager"); - ServerSideConfiguration configuration = message.getConfiguration(); this.defaultServerResource = configuration.getDefaultServerResource(); if (this.defaultServerResource != null) { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/messages/EhcacheSyncMessageCodec.java b/clustered/server/src/main/java/org/ehcache/clustered/server/messages/EhcacheSyncMessageCodec.java new file mode 100644 index 0000000000..12e51ff7f3 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/messages/EhcacheSyncMessageCodec.java @@ -0,0 +1,56 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.messages; + +import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; +import org.ehcache.clustered.common.internal.store.Util; +import org.terracotta.entity.MessageCodecException; +import org.terracotta.entity.SyncMessageCodec; + +import java.nio.ByteBuffer; + +public class EhcacheSyncMessageCodec implements SyncMessageCodec { + + private static final byte OPCODE_SIZE = 1; + + @Override + public byte[] encode(final int concurrencyKey, final EhcacheEntityMessage message) throws MessageCodecException { + if (message instanceof EntityStateSyncMessage) { + byte[] encodedMsg = Util.marshall(message); + ByteBuffer buffer = ByteBuffer.allocate(OPCODE_SIZE + encodedMsg.length); + buffer.put(message.getOpCode()); + buffer.put(encodedMsg); + return buffer.array(); + } else { + throw new IllegalArgumentException(this.getClass().getName() + " can not encode " + message + " which is not a " + EntityStateSyncMessage.class); + } + } + + @Override + public EhcacheEntityMessage decode(final int concurrencyKey, final byte[] payload) throws MessageCodecException { + ByteBuffer message = ByteBuffer.wrap(payload); + byte[] encodedMsg = new byte[message.capacity() - OPCODE_SIZE]; + byte opCode = message.get(); + if (opCode == EhcacheEntityMessage.Type.SYNC_OP.getCode()) { + message.get(encodedMsg, 0, encodedMsg.length); + EntityStateSyncMessage entityMessage = (EntityStateSyncMessage) Util.unmarshall(encodedMsg); + return entityMessage; + } else { + throw new IllegalArgumentException("EntityStateSyncMessage operation not defined for : " + opCode); + } + } +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/messages/EntityStateSyncMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/messages/EntityStateSyncMessage.java new file mode 100644 index 0000000000..63153232fb --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/messages/EntityStateSyncMessage.java @@ -0,0 +1,79 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.messages; + +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; + +import java.io.Serializable; +import java.util.Map; +import java.util.Set; +import java.util.UUID; + +public class EntityStateSyncMessage extends EhcacheEntityMessage implements Serializable { + + private final ServerSideConfiguration configuration; + private final Map storeConfigs; + private final Set trackedClients; + + public EntityStateSyncMessage(final ServerSideConfiguration configuration, + final Map storeConfigs, + final Set trackedClients) { + this.configuration = configuration; + this.storeConfigs = storeConfigs; + this.trackedClients = trackedClients; + } + + public ServerSideConfiguration getConfiguration() { + return configuration; + } + + public Map getStoreConfigs() { + return storeConfigs; + } + + public Set getTrackedClients() { + return trackedClients; + } + + @Override + public Type getType() { + return Type.SYNC_OP; + } + + @Override + public byte getOpCode() { + return getType().getCode(); + } + + @Override + public void setId(final long id) { + throw new UnsupportedOperationException(); + } + + @Override + public long getId() { + throw new UnsupportedOperationException(); + } + + @Override + public UUID getClientId() { + throw new UnsupportedOperationException(); + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java index 20bf72228b..b7ecbe8a67 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java @@ -16,30 +16,33 @@ package org.ehcache.clustered.server.state; +import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.exceptions.ClusterException; -import org.ehcache.clustered.common.internal.messages.LifecycleMessage.ConfigureStoreManager; -import org.ehcache.clustered.common.internal.messages.LifecycleMessage.ValidateStoreManager; import org.ehcache.clustered.server.ServerStoreImpl; import org.ehcache.clustered.server.repo.StateRepositoryManager; import com.tc.classloader.CommonComponent; +import java.util.Map; import java.util.Set; -import java.util.UUID; @CommonComponent public interface EhcacheStateService { + String getDefaultServerResource(); + + Map getSharedResourcePools(); + ServerStoreImpl getStore(String name); Set getStores(); void destroy(); - void validate(ValidateStoreManager message) throws ClusterException; + void validate(ServerSideConfiguration configuration) throws ClusterException; - void configure(ConfigureStoreManager message) throws ClusterException; + void configure(ServerSideConfiguration configuration) throws ClusterException; ServerStoreImpl createStore(String name, ServerStoreConfiguration serverStoreConfiguration) throws ClusterException; diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/messages/EhcacheSyncMessageCodecTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/messages/EhcacheSyncMessageCodecTest.java new file mode 100644 index 0000000000..809c719820 --- /dev/null +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/messages/EhcacheSyncMessageCodecTest.java @@ -0,0 +1,101 @@ +package org.ehcache.clustered.server.messages; + +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.PoolAllocation; +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.junit.Test; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.UUID; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.*; + +public class EhcacheSyncMessageCodecTest { + + @Test + public void testEncodeDecode() throws Exception { + Map sharedPools = new HashMap<>(); + ServerSideConfiguration.Pool pool1 = new ServerSideConfiguration.Pool(1, "foo1"); + ServerSideConfiguration.Pool pool2 = new ServerSideConfiguration.Pool(2, "foo2"); + sharedPools.put("shared-pool-1", pool1); + sharedPools.put("shared-pool-2", pool2); + ServerSideConfiguration serverSideConfig = new ServerSideConfiguration("default-pool", sharedPools); + + PoolAllocation poolAllocation1 = new PoolAllocation.Dedicated("dedicated", 4); + ServerStoreConfiguration serverStoreConfiguration1 = new ServerStoreConfiguration(poolAllocation1, + "storedKeyType1", "storedValueType1", "actualKeyType1", "actualValueType1", + "keySerializerType1", "valueSerializerType1", Consistency.STRONG); + + PoolAllocation poolAllocation2 = new PoolAllocation.Shared("shared"); + ServerStoreConfiguration serverStoreConfiguration2 = new ServerStoreConfiguration(poolAllocation2, + "storedKeyType2", "storedValueType2", "actualKeyType2", "actualValueType2", + "keySerializerType2", "valueSerializerType2", Consistency.EVENTUAL); + + PoolAllocation poolAllocation3 = new PoolAllocation.Unknown(); + ServerStoreConfiguration serverStoreConfiguration3 = new ServerStoreConfiguration(poolAllocation3, + "storedKeyType3", "storedValueType3", "actualKeyType3", "actualValueType3", + "keySerializerType3", "valueSerializerType3", Consistency.STRONG); + + Map storeConfigs = new HashMap<>(); + storeConfigs.put("cache1", serverStoreConfiguration1); + storeConfigs.put("cache2", serverStoreConfiguration2); + storeConfigs.put("cache3", serverStoreConfiguration3); + + UUID clientId1 = UUID.randomUUID(); + UUID clientId2 = UUID.randomUUID(); + Set clientIds = new HashSet<>(); + clientIds.add(clientId1); + clientIds.add(clientId2); + + EntityStateSyncMessage message = new EntityStateSyncMessage(serverSideConfig, storeConfigs, clientIds); + EhcacheSyncMessageCodec codec = new EhcacheSyncMessageCodec(); + EntityStateSyncMessage decodedMessage = (EntityStateSyncMessage) codec.decode(0, codec.encode(0, message)); + + assertThat(decodedMessage.getConfiguration().getDefaultServerResource(), is("default-pool")); + assertThat(decodedMessage.getConfiguration().getResourcePools(), is(sharedPools)); + assertThat(decodedMessage.getTrackedClients(), is(clientIds)); + assertThat(decodedMessage.getStoreConfigs().keySet(), containsInAnyOrder("cache1", "cache2", "cache3")); + + ServerStoreConfiguration serverStoreConfiguration = decodedMessage.getStoreConfigs().get("cache1"); + assertThat(serverStoreConfiguration.getPoolAllocation(), instanceOf(PoolAllocation.Dedicated.class)); + PoolAllocation.Dedicated dedicatedPool = (PoolAllocation.Dedicated) serverStoreConfiguration.getPoolAllocation(); + assertThat(dedicatedPool.getResourceName(), is("dedicated")); + assertThat(dedicatedPool.getSize(), is(4L)); + assertThat(serverStoreConfiguration.getStoredKeyType(), is("storedKeyType1")); + assertThat(serverStoreConfiguration.getStoredValueType(), is("storedValueType1")); + assertThat(serverStoreConfiguration.getActualKeyType(), is("actualKeyType1")); + assertThat(serverStoreConfiguration.getActualValueType(), is("actualValueType1")); + assertThat(serverStoreConfiguration.getKeySerializerType(), is("keySerializerType1")); + assertThat(serverStoreConfiguration.getValueSerializerType(), is("valueSerializerType1")); + assertThat(serverStoreConfiguration.getConsistency(), is(Consistency.STRONG)); + + serverStoreConfiguration = decodedMessage.getStoreConfigs().get("cache2"); + assertThat(serverStoreConfiguration.getPoolAllocation(), instanceOf(PoolAllocation.Shared.class)); + PoolAllocation.Shared sharedPool = (PoolAllocation.Shared) serverStoreConfiguration.getPoolAllocation(); + assertThat(sharedPool.getResourcePoolName(), is("shared")); + assertThat(serverStoreConfiguration.getStoredKeyType(), is("storedKeyType2")); + assertThat(serverStoreConfiguration.getStoredValueType(), is("storedValueType2")); + assertThat(serverStoreConfiguration.getActualKeyType(), is("actualKeyType2")); + assertThat(serverStoreConfiguration.getActualValueType(), is("actualValueType2")); + assertThat(serverStoreConfiguration.getKeySerializerType(), is("keySerializerType2")); + assertThat(serverStoreConfiguration.getValueSerializerType(), is("valueSerializerType2")); + assertThat(serverStoreConfiguration.getConsistency(), is(Consistency.EVENTUAL)); + + serverStoreConfiguration = decodedMessage.getStoreConfigs().get("cache3"); + assertThat(serverStoreConfiguration.getPoolAllocation(), instanceOf(PoolAllocation.Unknown.class)); + assertThat(serverStoreConfiguration.getStoredKeyType(), is("storedKeyType3")); + assertThat(serverStoreConfiguration.getStoredValueType(), is("storedValueType3")); + assertThat(serverStoreConfiguration.getActualKeyType(), is("actualKeyType3")); + assertThat(serverStoreConfiguration.getActualValueType(), is("actualValueType3")); + assertThat(serverStoreConfiguration.getKeySerializerType(), is("keySerializerType3")); + assertThat(serverStoreConfiguration.getValueSerializerType(), is("valueSerializerType3")); + assertThat(serverStoreConfiguration.getConsistency(), is(Consistency.STRONG)); + } +} From 8855882306a0d022a8bf460a12361fecea5793ec Mon Sep 17 00:00:00 2001 From: Abhilash Date: Wed, 5 Oct 2016 21:09:12 +0530 Subject: [PATCH 047/218] Adding header to EhcacheSYncMessageCodecTest #1209 --- .../messages/EhcacheSyncMessageCodecTest.java | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/messages/EhcacheSyncMessageCodecTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/messages/EhcacheSyncMessageCodecTest.java index 809c719820..9847ed3aa7 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/messages/EhcacheSyncMessageCodecTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/messages/EhcacheSyncMessageCodecTest.java @@ -1,3 +1,18 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.ehcache.clustered.server.messages; import org.ehcache.clustered.common.Consistency; From e3f24ff387dcad81498549e7c48dcead843ac6cb Mon Sep 17 00:00:00 2001 From: Henri Tremblay Date: Sun, 25 Sep 2016 00:35:21 -0400 Subject: [PATCH 048/218] Start maintainable services if needed when destroying a cache (#1192) --- .../java/org/ehcache/core/EhcacheManager.java | 53 +++-- .../org/ehcache/core/EhcacheManagerTest.java | 149 +++++++++----- .../DefaultDiskResourceService.java | 25 ++- .../builders/PersistentCacheManagerTest.java | 103 +++++++--- ...eManagerDestroyRemovesPersistenceTest.java | 30 +-- .../internal/util/FileExistenceMatchers.java | 183 ++++++++---------- .../util/FileExistenceMatchersTest.java | 81 ++++++++ .../DefaultLocalPersistenceServiceTest.java | 36 ++-- 8 files changed, 424 insertions(+), 236 deletions(-) create mode 100644 impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchersTest.java diff --git a/core/src/main/java/org/ehcache/core/EhcacheManager.java b/core/src/main/java/org/ehcache/core/EhcacheManager.java index f7e67db449..cc75748065 100644 --- a/core/src/main/java/org/ehcache/core/EhcacheManager.java +++ b/core/src/main/java/org/ehcache/core/EhcacheManager.java @@ -17,6 +17,7 @@ package org.ehcache.core; import org.ehcache.Cache; +import org.ehcache.CachePersistenceException; import org.ehcache.PersistentCacheManager; import org.ehcache.Status; import org.ehcache.config.Builder; @@ -27,36 +28,35 @@ import org.ehcache.core.config.BaseCacheConfiguration; import org.ehcache.core.config.DefaultConfiguration; import org.ehcache.core.config.store.StoreEventSourceConfiguration; -import org.ehcache.core.internal.store.StoreConfigurationImpl; import org.ehcache.core.events.CacheEventDispatcher; import org.ehcache.core.events.CacheEventDispatcherFactory; +import org.ehcache.core.events.CacheEventListenerConfiguration; +import org.ehcache.core.events.CacheEventListenerProvider; import org.ehcache.core.events.CacheManagerListener; -import org.ehcache.core.spi.LifeCycledAdapter; import org.ehcache.core.internal.service.ServiceLocator; -import org.ehcache.core.spi.store.InternalCacheManager; -import org.ehcache.core.spi.store.Store; +import org.ehcache.core.internal.store.StoreConfigurationImpl; import org.ehcache.core.internal.store.StoreSupport; -import org.ehcache.core.spi.service.CacheManagerProviderService; import org.ehcache.core.internal.util.ClassLoading; -import org.ehcache.event.CacheEventListener; -import org.ehcache.core.events.CacheEventListenerConfiguration; -import org.ehcache.core.events.CacheEventListenerProvider; -import org.ehcache.CachePersistenceException; import org.ehcache.core.spi.LifeCycled; -import org.ehcache.spi.service.ServiceProvider; +import org.ehcache.core.spi.LifeCycledAdapter; +import org.ehcache.core.spi.service.CacheManagerProviderService; +import org.ehcache.core.spi.store.InternalCacheManager; +import org.ehcache.core.spi.store.Store; +import org.ehcache.event.CacheEventListener; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.loaderwriter.CacheLoaderWriterProvider; import org.ehcache.spi.loaderwriter.WriteBehindConfiguration; import org.ehcache.spi.loaderwriter.WriteBehindProvider; +import org.ehcache.spi.persistence.PersistableResourceService; import org.ehcache.spi.serialization.SerializationProvider; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.serialization.UnsupportedTypeException; import org.ehcache.spi.service.MaintainableService; -import org.ehcache.spi.persistence.PersistableResourceService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceCreationConfiguration; import org.ehcache.spi.service.ServiceDependencies; +import org.ehcache.spi.service.ServiceProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -147,6 +147,15 @@ private void resolveServices() { serviceLocator.loadDependenciesOf(ServiceDeps.class); } + /** + * Exposed for testing purpose + * + * @return the status transitioner keeping the current cache manager state + */ + StatusTransitioner getStatusTransitioner() { + return statusTransitioner; + } + @Override public Cache getCache(String alias, Class keyType, Class valueType) { statusTransitioner.checkAvailable(); @@ -660,21 +669,35 @@ public void destroyCache(final String alias) throws CachePersistenceException { StatusTransitioner.Transition maintenance = null; try { maintenance = statusTransitioner.maintenance(); - maintenance.succeeded(); } catch(IllegalStateException e) { // the cache manager is already started, no need to put it in maintenance - // however, we need to check that we are in maintenance. Note that right after the check, the is a window - // for someone to go in maintenance + // however, we need to check that some other thread ISN'T in maintenance + // Note that right after the check, there is a window for someone to go in maintenance statusTransitioner.checkAvailable(); } + if(maintenance != null) { + try { + startMaintainableServices(); + maintenance.succeeded(); + } catch (Throwable t) { + throw maintenance.failed(t); + } + } + try { removeAndCloseWithoutNotice(alias); destroyPersistenceSpace(alias); } finally { // if it was started, stop it if(maintenance != null) { - statusTransitioner.exitMaintenance().succeeded(); + StatusTransitioner.Transition st = statusTransitioner.exitMaintenance(); + try { + stopMaintainableServices(); + st.succeeded(); + } catch (Throwable t) { + throw st.failed(t); + } } } diff --git a/core/src/test/java/org/ehcache/core/EhcacheManagerTest.java b/core/src/test/java/org/ehcache/core/EhcacheManagerTest.java index c0683e74e2..e573d1138c 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheManagerTest.java +++ b/core/src/test/java/org/ehcache/core/EhcacheManagerTest.java @@ -18,7 +18,9 @@ import org.ehcache.Cache; import org.ehcache.CacheManager; +import org.ehcache.CachePersistenceException; import org.ehcache.PersistentCacheManager; +import org.ehcache.StateTransitionException; import org.ehcache.Status; import org.ehcache.UserManagedCache; import org.ehcache.config.CacheConfiguration; @@ -30,28 +32,29 @@ import org.ehcache.core.config.ResourcePoolsHelper; import org.ehcache.core.events.CacheEventDispatcher; import org.ehcache.core.events.CacheEventDispatcherFactory; +import org.ehcache.core.events.CacheEventListenerProvider; import org.ehcache.core.events.CacheManagerListener; -import org.ehcache.core.spi.store.Store; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.core.internal.util.ClassLoading; -import org.ehcache.core.events.CacheEventListenerProvider; -import org.ehcache.StateTransitionException; -import org.ehcache.spi.service.ServiceProvider; +import org.ehcache.core.spi.service.LocalPersistenceService; +import org.ehcache.core.spi.store.Store; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.loaderwriter.CacheLoaderWriterProvider; import org.ehcache.spi.loaderwriter.WriteBehindProvider; +import org.ehcache.spi.service.MaintainableService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceCreationConfiguration; +import org.ehcache.spi.service.ServiceProvider; import org.hamcrest.CoreMatchers; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.ExpectedException; import org.mockito.Matchers; import org.mockito.Mockito; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -85,6 +88,23 @@ @SuppressWarnings({ "unchecked", "rawtypes" }) public class EhcacheManagerTest { + @Rule + public ExpectedException expectedException = ExpectedException.none(); + + private static Map> newCacheMap() { + return new HashMap>(); + } + + private List minimunCacheManagerServices() { + return new ArrayList(Arrays.asList( + mock(Store.Provider.class), + mock(CacheLoaderWriterProvider.class), + mock(WriteBehindProvider.class), + mock(CacheEventDispatcherFactory.class), + mock(CacheEventListenerProvider.class), + mock(LocalPersistenceService.class))); + } + @Test public void testCanDestroyAndClose() throws Exception { CacheConfiguration cacheConfiguration = new BaseCacheConfiguration(Long.class, String.class, null, @@ -99,7 +119,7 @@ public void testCanDestroyAndClose() throws Exception { when(store.getConfigurationChangeListeners()).thenReturn(new ArrayList()); when(cacheEventNotificationListenerServiceProvider.createCacheEventDispatcher(store)).thenReturn(mock(CacheEventDispatcher.class)); - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put("aCache", cacheConfiguration); DefaultConfiguration config = new DefaultConfiguration(caches, null); PersistentCacheManager cacheManager = new EhcacheManager(config, Arrays.asList( @@ -121,7 +141,7 @@ public void testCanDestroyAndClose() throws Exception { @Test public void testConstructionThrowsWhenNotBeingToResolveService() { - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); final DefaultConfiguration config = new DefaultConfiguration(caches, null, new ServiceCreationConfiguration() { @Override public Class getServiceType() { @@ -138,7 +158,8 @@ public Class getServiceType() { @Test public void testCreationFailsOnDuplicateServiceCreationConfiguration() { - DefaultConfiguration config = new DefaultConfiguration(Collections.>emptyMap(), null, new ServiceCreationConfiguration() { + Map> caches = newCacheMap(); + DefaultConfiguration config = new DefaultConfiguration(caches, null, new ServiceCreationConfiguration() { @Override public Class getServiceType() { return NoSuchService.class; @@ -159,18 +180,13 @@ public Class getServiceType() { @Test public void testStopAllServicesWhenCacheInitializationFails() { - Store.Provider storeProvider = mock(Store.Provider.class); - - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put("myCache", mock(CacheConfiguration.class)); DefaultConfiguration config = new DefaultConfiguration(caches, null); - CacheManager cacheManager = new EhcacheManager(config, Arrays.asList( - storeProvider, - mock(CacheLoaderWriterProvider.class), - mock(WriteBehindProvider.class), - mock(CacheEventDispatcherFactory.class), - mock(CacheEventListenerProvider.class), - mock(LocalPersistenceService.class))); + List services = minimunCacheManagerServices(); + EhcacheManager cacheManager = new EhcacheManager(config, services); + + Store.Provider storeProvider = (Store.Provider) services.get(0); // because I know it's the first of the list try { cacheManager.init(); @@ -182,7 +198,7 @@ public void testStopAllServicesWhenCacheInitializationFails() { @Test public void testNoClassLoaderSpecified() { - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put("foo", new BaseCacheConfiguration(Object.class, Object.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools())); DefaultConfiguration config = new DefaultConfiguration(caches, null); @@ -216,7 +232,7 @@ public void testClassLoaderSpecified() { assertNotSame(cl1, cl2); assertNotSame(cl1.getClass(), cl2.getClass()); - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put("foo1", new BaseCacheConfiguration(Object.class, Object.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools())); caches.put("foo2", new BaseCacheConfiguration(Object.class, Object.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools())); caches.put("foo3", new BaseCacheConfiguration(Object.class, Object.class, null, cl2, null, ResourcePoolsHelper.createHeapOnlyPools())); @@ -245,7 +261,7 @@ public void testClassLoaderSpecified() { @Test public void testReturnsNullForNonExistCache() { - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); DefaultConfiguration config = new DefaultConfiguration(caches, null); EhcacheManager cacheManager = new EhcacheManager(config, getServices(null, null)); cacheManager.init(); @@ -267,7 +283,7 @@ public void testThrowsWhenAddingExistingCache() { when(storeProvider .createStore(Matchers.anyObject(), Matchers.anyVararg())).thenReturn(mock); - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put("bar", cacheConfiguration); DefaultConfiguration config = new DefaultConfiguration(caches, null); @@ -297,7 +313,7 @@ public void testThrowsWhenNotInitialized() { .createStore(Matchers.anyObject(), Matchers.anyVararg())).thenReturn(mock); final CacheConfiguration cacheConfiguration = new BaseCacheConfiguration(Integer.class, String.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools()); - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put("bar", cacheConfiguration); DefaultConfiguration config = new DefaultConfiguration(caches, null); EhcacheManager cacheManager = new EhcacheManager(config, services); @@ -335,7 +351,7 @@ public void testThrowsWhenRetrievingCacheWithWrongTypes() { .createStore(Matchers.anyObject(), Matchers.anyVararg())).thenReturn(mock); final CacheConfiguration cacheConfiguration = new BaseCacheConfiguration(Integer.class, String.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools()); - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put("bar", cacheConfiguration); DefaultConfiguration config = new DefaultConfiguration(caches, null); EhcacheManager cacheManager = new EhcacheManager(config, services); @@ -379,27 +395,26 @@ public void testLifeCyclesCacheLoaders() throws Exception { when(cacheLoaderWriterProvider.createCacheLoaderWriter("foo", fooConfig)).thenReturn(fooLoaderWriter); + Map> caches = newCacheMap(); + caches.put("bar", barConfig); + caches.put("foo", fooConfig); - @SuppressWarnings("serial") - final Configuration cfg = new DefaultConfiguration( - new HashMap>() {{ - put("bar", barConfig); - put("foo", fooConfig); - }}, + Configuration cfg = new DefaultConfiguration( + caches, getClass().getClassLoader() ); - final Store.Provider storeProvider = mock(Store.Provider.class); + Store.Provider storeProvider = mock(Store.Provider.class); when(storeProvider.rank(anySet(), anyCollection())).thenReturn(1); - final Store mock = mock(Store.class); - final CacheEventDispatcherFactory cenlProvider = mock(CacheEventDispatcherFactory.class); - final CacheEventDispatcher cenlServiceMock = mock(CacheEventDispatcher.class); + Store mock = mock(Store.class); + CacheEventDispatcherFactory cenlProvider = mock(CacheEventDispatcherFactory.class); + CacheEventDispatcher cenlServiceMock = mock(CacheEventDispatcher.class); when(cenlProvider.createCacheEventDispatcher(mock)).thenReturn(cenlServiceMock); - final Collection services = getServices(cacheLoaderWriterProvider, decoratorLoaderWriterProvider, storeProvider, cenlProvider); + Collection services = getServices(cacheLoaderWriterProvider, decoratorLoaderWriterProvider, storeProvider, cenlProvider); when(storeProvider .createStore(Matchers.anyObject(), Matchers.anyVararg())).thenReturn(mock); - final EhcacheManager manager = new EhcacheManager(cfg, services); + EhcacheManager manager = new EhcacheManager(cfg, services); manager.init(); verify(cacheLoaderWriterProvider).createCacheLoaderWriter("bar", barConfig); @@ -423,7 +438,7 @@ public void testDoesNotifyAboutCache() { final Collection services = getServices(mock, cenlProvider); when(mock.createStore(Matchers.anyObject())).thenReturn(mock(Store.class)); - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); DefaultConfiguration config = new DefaultConfiguration(caches, null); EhcacheManager cacheManager = new EhcacheManager(config, services); final CacheManagerListener listener = mock(CacheManagerListener.class); @@ -450,7 +465,7 @@ public void testDoesNotNotifyAboutCacheOnInitOrClose() { final Collection services = getServices(mock, cenlProvider); when(mock.createStore(Matchers.anyObject())).thenReturn(mock(Store.class)); final String cacheAlias = "bar"; - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put(cacheAlias, cacheConfiguration); DefaultConfiguration config = new DefaultConfiguration(caches, null); EhcacheManager cacheManager = new EhcacheManager(config, services); @@ -472,7 +487,7 @@ public void testClosesStartedCachesDownWhenInitThrows() { final Collection services = getServices(storeProvider, null); final RuntimeException thrown = new RuntimeException(); when(storeProvider.createStore(Matchers.anyObject())).thenReturn(mock(Store.class)); - Map> cacheMap = new HashMap>(); + Map> cacheMap = newCacheMap(); cacheMap.put("foo", cacheConfiguration); cacheMap.put("bar", cacheConfiguration); cacheMap.put("foobar", cacheConfiguration); @@ -526,7 +541,7 @@ public void testClosesAllCachesDownWhenCloseThrows() { final Collection services = getServices(storeProvider, cenlProvider); final RuntimeException thrown = new RuntimeException(); when(storeProvider.createStore(Matchers.anyObject())).thenReturn(mock(Store.class)); - Map> cacheMap = new HashMap>(); + Map> cacheMap = newCacheMap(); cacheMap.put("foo", cacheConfiguration); cacheMap.put("bar", cacheConfiguration); cacheMap.put("foobar", cacheConfiguration); @@ -564,7 +579,7 @@ protected void closeEhcache(final String alias, final InternalCache ehcach @Test public void testDoesNotifyAboutLifecycle() { - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); DefaultConfiguration config = new DefaultConfiguration(caches, null); EhcacheManager cacheManager = new EhcacheManager(config, getServices(null, null)); final CacheManagerListener listener = mock(CacheManagerListener.class); @@ -636,7 +651,7 @@ public void releaseCacheEventDispatcher(CacheEventDispatcher eventD when(mockStore.getConfigurationChangeListeners()).thenReturn(configurationChangeListenerList); when(storeProvider.createStore(Matchers.anyObject())).thenReturn(mockStore); - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put("foo", cacheConfiguration); DefaultConfiguration config = new DefaultConfiguration(caches, null); EhcacheManager cacheManager = new EhcacheManager(config, services) { @@ -665,7 +680,7 @@ public void testChangesToManagerAreReflectedInConfig() { when(cacheEventNotificationListenerServiceProvider.createCacheEventDispatcher(store)).thenReturn(mock(CacheEventDispatcher.class)); CacheConfiguration cache1Configuration = new BaseCacheConfiguration(Long.class, String.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools()); - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put("cache1", cache1Configuration); DefaultConfiguration config = new DefaultConfiguration(caches, null); @@ -710,7 +725,7 @@ public void testCachesAddedAtRuntimeGetReInited() { when(cacheEventNotificationListenerServiceProvider.createCacheEventDispatcher(store)).thenReturn(mock(CacheEventDispatcher.class)); CacheConfiguration cache1Configuration = new BaseCacheConfiguration(Long.class, String.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools()); - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put("cache1", cache1Configuration); DefaultConfiguration config = new DefaultConfiguration(caches, null); CacheManager cacheManager = new EhcacheManager(config, Arrays.asList( @@ -744,7 +759,7 @@ public void testCloseWhenRuntimeCacheCreationFails() throws Exception { when(storeProvider.rank(anySet(), anyCollection())).thenReturn(1); doThrow(new Error("Test EhcacheManager close.")).when(storeProvider).createStore(any(Store.Configuration.class), Matchers.anyVararg()); - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); DefaultConfiguration config = new DefaultConfiguration(caches, null); final CacheManager cacheManager = new EhcacheManager(config, Arrays.asList( storeProvider, @@ -778,7 +793,7 @@ public void testCloseWhenCacheCreationFailsDuringInitialization() throws Excepti doThrow(new Error("Test EhcacheManager close.")).when(storeProvider).createStore(any(Store.Configuration.class), Matchers.anyVararg()); CacheConfiguration cacheConfiguration = new BaseCacheConfiguration(Long.class, String.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools()); - Map> caches = new HashMap>(); + Map> caches = newCacheMap(); caches.put("cache1", cacheConfiguration); DefaultConfiguration config = new DefaultConfiguration(caches, null); final CacheManager cacheManager = new EhcacheManager(config, Arrays.asList( @@ -813,6 +828,46 @@ public void run() { } + @Test + public void testDestroyCacheFailsIfAlreadyInMaintenanceMode() throws CachePersistenceException, InterruptedException { + Map> caches = newCacheMap(); + DefaultConfiguration config = new DefaultConfiguration(caches, null); + final EhcacheManager manager = new EhcacheManager(config, minimunCacheManagerServices()); + + Thread thread = new Thread(new Runnable() { + @Override + public void run() { + manager.getStatusTransitioner().maintenance().succeeded(); + } + }); + thread.start(); + thread.join(1000); + + expectedException.expect(IllegalStateException.class); + expectedException.expectMessage("State is MAINTENANCE, yet you don't own it!"); + + manager.destroyCache("test"); + } + + @Test + public void testDestroyCacheFailsAndStopIfStartingServicesFails() throws CachePersistenceException, InterruptedException { + Map> caches = newCacheMap(); + DefaultConfiguration config = new DefaultConfiguration(caches, null); + List services = minimunCacheManagerServices(); + MaintainableService service = mock(MaintainableService.class); + doThrow(new RuntimeException("failed")).when(service).startForMaintenance(Mockito.>anyObject()); + services.add(service); + + EhcacheManager manager = new EhcacheManager(config, services); + + expectedException.expect(StateTransitionException.class); + expectedException.expectMessage("failed"); + + manager.destroyCache("test"); + + assertThat(manager.getStatus(), equalTo(Status.UNINITIALIZED)); + } + private Collection getServices(Store.Provider storeProvider, CacheEventDispatcherFactory cenlProvider) { return getServices(mock(CacheLoaderWriterProvider.class), mock(WriteBehindProvider.class), storeProvider != null ? storeProvider : mock(Store.Provider.class), diff --git a/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java b/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java index 596972b314..9ed4252b07 100644 --- a/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java +++ b/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java @@ -51,6 +51,10 @@ public DefaultDiskResourceService() { this.knownPersistenceSpaces = new ConcurrentHashMap(); } + private boolean isStarted() { + return persistenceService != null; + } + /** * {@inheritDoc} */ @@ -88,7 +92,7 @@ public boolean handlesResourceType(ResourceType resourceType) { */ @Override public PersistenceSpaceIdentifier getPersistenceSpaceIdentifier(String name, CacheConfiguration config) throws CachePersistenceException { - if (persistenceService == null) { + if (!isStarted()) { return null; } boolean persistent = config.getResourcePools().getPoolForResource(ResourceType.Core.DISK).isPersistent(); @@ -152,17 +156,22 @@ private PersistenceSpace createSpace(String name, boolean persistent) throws Cac } } + private void checkStarted() { + if(!isStarted()) { + throw new IllegalStateException(getClass().getName() + " should be started to call destroy"); + } + } + /** * {@inheritDoc} */ @Override public void destroy(String name) throws CachePersistenceException { - if (persistenceService == null) { - return; - } + checkStarted(); + PersistenceSpace space = knownPersistenceSpaces.remove(name); SafeSpaceIdentifier identifier = (space == null) ? - persistenceService.createSafeSpaceIdentifier(PERSISTENCE_SPACE_OWNER, name) : space.identifier.persistentSpaceId; + persistenceService.createSafeSpaceIdentifier(PERSISTENCE_SPACE_OWNER, name) : space.identifier.persistentSpaceId; persistenceService.destroySafeSpace(identifier, true); } @@ -171,9 +180,7 @@ public void destroy(String name) throws CachePersistenceException { */ @Override public void destroyAll() { - if (persistenceService == null) { - return; - } + checkStarted(); persistenceService.destroyAll(PERSISTENCE_SPACE_OWNER); } @@ -271,4 +278,4 @@ public File getDirectory() { return directory; } } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/config/builders/PersistentCacheManagerTest.java b/impl/src/test/java/org/ehcache/config/builders/PersistentCacheManagerTest.java index 9c25d5e16a..aa6326391e 100644 --- a/impl/src/test/java/org/ehcache/config/builders/PersistentCacheManagerTest.java +++ b/impl/src/test/java/org/ehcache/config/builders/PersistentCacheManagerTest.java @@ -21,7 +21,6 @@ import org.ehcache.config.units.MemoryUnit; import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; import org.junit.Before; -import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; @@ -29,10 +28,13 @@ import java.io.File; import java.io.IOException; +import java.util.Arrays; import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; -import static org.ehcache.impl.internal.util.FileExistenceMatchers.fileExistOwnerClosedExpected; -import static org.ehcache.impl.internal.util.FileExistenceMatchers.fileExistsOwnerOpenExpected; +import static org.ehcache.impl.internal.util.FileExistenceMatchers.containsCacheDirectory; +import static org.ehcache.impl.internal.util.FileExistenceMatchers.isLocked; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; @@ -63,6 +65,15 @@ public void setup() throws IOException { public void testInitializesLocalPersistenceService() throws IOException { builder.build(true); assertTrue(rootDirectory.isDirectory()); + assertThat(Arrays.asList(rootDirectory.list()), contains(".lock")); + } + + @Test + public void testInitializesLocalPersistenceServiceAndCreateCache() throws IOException { + buildCacheManagerWithCache(true); + + assertThat(rootDirectory, isLocked()); + assertThat(rootDirectory, containsCacheDirectory(TEST_CACHE_ALIAS)); } @Test @@ -81,49 +92,77 @@ public void testDestroyCache_UnexistingCacheDoesNothing() throws CachePersistenc @Test public void testDestroyCache_Initialized_DestroyExistingCache() throws CachePersistenceException { - PersistentCacheManager manager = builder - .withCache(TEST_CACHE_ALIAS, - CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder().disk(10, MemoryUnit.MB, true))) - .build(true); - assertThat(rootDirectory, fileExistsOwnerOpenExpected(1, TEST_CACHE_ALIAS)); + PersistentCacheManager manager = buildCacheManagerWithCache(true); + manager.destroyCache(TEST_CACHE_ALIAS); - assertThat(rootDirectory, fileExistsOwnerOpenExpected(0, TEST_CACHE_ALIAS)); + + assertThat(rootDirectory, isLocked()); + assertThat(rootDirectory, not(containsCacheDirectory(TEST_CACHE_ALIAS))); } - @Ignore("Ignoring as currently no support for destroying cache on a closed cache manager") + @Test public void testDestroyCache_Uninitialized_DestroyExistingCache() throws CachePersistenceException { - PersistentCacheManager manager = builder - .withCache(TEST_CACHE_ALIAS, - CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder().disk(10, MemoryUnit.MB, true))) - .build(true); - assertThat(rootDirectory, fileExistsOwnerOpenExpected(1, TEST_CACHE_ALIAS)); - manager.close(); // pass it to uninitialized - assertThat(rootDirectory, fileExistOwnerClosedExpected(1, TEST_CACHE_ALIAS)); + PersistentCacheManager manager = buildCacheManagerWithCache(true); + + manager.close(); manager.destroyCache(TEST_CACHE_ALIAS); - assertThat(rootDirectory, fileExistOwnerClosedExpected(0, TEST_CACHE_ALIAS)); + + assertThat(rootDirectory, not(isLocked())); + assertThat(rootDirectory, not(containsCacheDirectory(TEST_CACHE_ALIAS))); } - @Ignore("Ignoring as currently no support for destroying cache on a closed cache manager") @Test public void testDestroyCache_CacheManagerUninitialized() throws CachePersistenceException { + PersistentCacheManager manager = buildCacheManagerWithCache(false); + + manager.destroyCache(TEST_CACHE_ALIAS); + + assertThat(rootDirectory, not(isLocked())); + assertThat(rootDirectory, not(containsCacheDirectory(TEST_CACHE_ALIAS))); + } + + @Test + public void testClose_DiskCacheLockReleased() throws CachePersistenceException { + PersistentCacheManager manager = buildCacheManagerWithCache(true); + + // Should lock the file when the CacheManager is opened + assertThat(rootDirectory, isLocked()); + + manager.close(); // pass it to uninitialized + + // Should unlock the file when the CacheManager is closed + assertThat(rootDirectory, not(isLocked())); + } + + @Test + public void testCloseAndThenOpenOnTheSameFile() throws CachePersistenceException { + // Open a CacheManager that will create a cache, close it and put it out of scope { - PersistentCacheManager manager = builder - .withCache(TEST_CACHE_ALIAS, - CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder().disk(10, MemoryUnit.MB, true))) - .build(true); - assertThat(rootDirectory, fileExistsOwnerOpenExpected(1, TEST_CACHE_ALIAS)); - manager.close(); // pass it to uninitialized - assertThat(rootDirectory, fileExistOwnerClosedExpected(1, TEST_CACHE_ALIAS)); + PersistentCacheManager manager = buildCacheManagerWithCache(true); + manager.close(); } + // Create a new CacheManager that will have the same cache. The cache should be there but the cache manager unlocked since the CacheManager isn't started { PersistentCacheManager manager = builder.build(false); - assertThat(rootDirectory, fileExistOwnerClosedExpected(1, TEST_CACHE_ALIAS)); - manager.destroyCache(TEST_CACHE_ALIAS); - assertThat(rootDirectory, fileExistOwnerClosedExpected(0, TEST_CACHE_ALIAS)); + assertThat(rootDirectory, not(isLocked())); + assertThat(rootDirectory, containsCacheDirectory(TEST_CACHE_ALIAS)); } } + + public static class A { + + public A() throws IOException { + throw new IOException(".."); + } + + } + + private PersistentCacheManager buildCacheManagerWithCache(boolean init) { + return builder + .withCache(TEST_CACHE_ALIAS, + CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().disk(10, MemoryUnit.MB, true))) + .build(init); + } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/persistence/CacheManagerDestroyRemovesPersistenceTest.java b/impl/src/test/java/org/ehcache/impl/internal/persistence/CacheManagerDestroyRemovesPersistenceTest.java index c079366ec9..cfe907e3d8 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/persistence/CacheManagerDestroyRemovesPersistenceTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/persistence/CacheManagerDestroyRemovesPersistenceTest.java @@ -29,16 +29,18 @@ import java.net.URISyntaxException; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.ehcache.impl.internal.util.FileExistenceMatchers.fileExistNoOwner; -import static org.ehcache.impl.internal.util.FileExistenceMatchers.fileExistsOwnerOpen; +import static org.ehcache.impl.internal.util.FileExistenceMatchers.containsCacheDirectory; +import static org.ehcache.impl.internal.util.FileExistenceMatchers.isLocked; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertThat; /** * */ public class CacheManagerDestroyRemovesPersistenceTest { + public static final String PERSISTENT_CACHE = "persistent-cache"; private PersistentCacheManager persistentCacheManager; @Test @@ -51,7 +53,7 @@ public void testDestroyRemovesPersistenceData () throws URISyntaxException, Cach persistentCacheManager.close(); persistentCacheManager.destroy(); - assertThat(file, fileExistNoOwner()); + assertThat(file, not(isLocked())); } @Test @@ -59,9 +61,9 @@ public void testDestroyCacheDestroysPersistenceContext() throws URISyntaxExcepti File file = new File(getStoragePath(), "testDestroy"); initCacheManager(file); - persistentCacheManager.destroyCache("persistent-cache"); + persistentCacheManager.destroyCache(PERSISTENT_CACHE); - assertThat(file, fileExistsOwnerOpen(0)); + assertThat(file, not(containsCacheDirectory(PERSISTENT_CACHE))); } @Test @@ -69,15 +71,15 @@ public void testCreateCacheWithSameAliasAfterDestroy() throws URISyntaxException File file = new File(getStoragePath(), "testDestroy"); initCacheManager(file); - persistentCacheManager.destroyCache("persistent-cache"); + persistentCacheManager.destroyCache(PERSISTENT_CACHE); - persistentCacheManager.createCache("persistent-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + persistentCacheManager.createCache(PERSISTENT_CACHE, CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, newResourcePoolsBuilder() .heap(10, EntryUnit.ENTRIES) .disk(10L, MemoryUnit.MB, true)) .build()); - assertNotNull(persistentCacheManager.getCache("persistent-cache", Long.class, String.class)); + assertNotNull(persistentCacheManager.getCache(PERSISTENT_CACHE, Long.class, String.class)); persistentCacheManager.close(); } @@ -87,7 +89,7 @@ public void testDestroyCacheWithUnknownAlias() throws URISyntaxException, CacheP File file = new File(getStoragePath(), "testDestroyUnknownAlias"); initCacheManager(file); - Cache cache = persistentCacheManager.getCache("persistent-cache", Long.class, String.class); + Cache cache = persistentCacheManager.getCache(PERSISTENT_CACHE, Long.class, String.class); cache.put(1L, "One"); @@ -96,15 +98,15 @@ public void testDestroyCacheWithUnknownAlias() throws URISyntaxException, CacheP PersistentCacheManager anotherPersistentCacheManager = CacheManagerBuilder.newCacheManagerBuilder() .with(new CacheManagerPersistenceConfiguration(file)).build(true); - anotherPersistentCacheManager.destroyCache("persistent-cache"); + anotherPersistentCacheManager.destroyCache(PERSISTENT_CACHE); - assertThat(file, fileExistsOwnerOpen(0)); + assertThat(file, not(containsCacheDirectory(PERSISTENT_CACHE))); } private void initCacheManager(File file) throws URISyntaxException { persistentCacheManager = CacheManagerBuilder.newCacheManagerBuilder() .with(new CacheManagerPersistenceConfiguration(file)) - .withCache("persistent-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + .withCache(PERSISTENT_CACHE, CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, newResourcePoolsBuilder() .heap(10, EntryUnit.ENTRIES) .disk(10L, MemoryUnit.MB, true)) @@ -114,7 +116,7 @@ private void initCacheManager(File file) throws URISyntaxException { private void putValuesInCacheAndCloseCacheManager() { Cache preConfigured = - persistentCacheManager.getCache("persistent-cache", Long.class, String.class); + persistentCacheManager.getCache(PERSISTENT_CACHE, Long.class, String.class); preConfigured.put(1L, "foo"); persistentCacheManager.close(); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchers.java b/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchers.java index 35537203fe..f2ee0124e2 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchers.java +++ b/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchers.java @@ -24,129 +24,106 @@ import java.io.FilenameFilter; /** - * Matcher(s) for file existence in the persistence directory.. + * Matchers for file locks and existence in the persistence directory. * * @author RKAV */ public class FileExistenceMatchers { - public static Matcher fileExistsOwnerOpen(final int numExpectedFiles) { - return new TypeSafeMatcher() { - @Override - protected boolean matchesSafely(File item) { - return fileExistsOwnerOpenWithName(item, numExpectedFiles, null); - } - - @Override - public void describeTo(Description description) { - } - }; - } + private static class DirectoryIsLockedMatcher extends TypeSafeMatcher { + @Override + protected boolean matchesSafely(File dir) { + File[] files = dir.listFiles(new FilenameFilter() { + @Override + public boolean accept(File dir, String name) { + return name.equals(".lock"); + } + }); + return files != null && files.length == 1; + } - public static Matcher fileExistsOwnerOpenExpected(final int numExpectedFiles, final String expected) { - return new TypeSafeMatcher() { - @Override - protected boolean matchesSafely(File item) { - return fileExistsOwnerOpenWithName(item, numExpectedFiles, expected); - } + @Override + public void describeMismatchSafely(File item, Description mismatchDescription) { + mismatchDescription.appendValue(item) + .appendText(" doesn't contain a .lock file"); + } - @Override - public void describeTo(Description description) { - } - }; + @Override + public void describeTo(Description description) { + description.appendText("a .lock file in the directory"); + } } - public static Matcher fileExistOwnerClosed(final int numExpectedFiles) { - return new TypeSafeMatcher() { - @Override - protected boolean matchesSafely(File item) { - return fileExistsOwnerClosedWithName(item, numExpectedFiles, null); - } + private static class ContainsCacheDirectoryMatcher extends TypeSafeMatcher { - @Override - public void describeTo(Description description) { - } - }; - } + private String parentDirectory; + private String startWith; - public static Matcher fileExistOwnerClosedExpected(final int numExpectedFiles, final String expected) { - return new TypeSafeMatcher() { - @Override - protected boolean matchesSafely(File item) { - return fileExistsOwnerClosedWithName(item, numExpectedFiles, expected); - } + public ContainsCacheDirectoryMatcher(String safeSpaceOwner, String cacheAlias) { + this.parentDirectory = safeSpaceOwner; + this.startWith = cacheAlias + "_"; + } - @Override - public void describeTo(Description description) { - } - }; - } + @Override + protected boolean matchesSafely(File item) { + // The directory layout is that there will be a directory named 'file' + // If the cache directory exists, it will contain a directory starting with 'cacheAlias_' - public static Matcher fileExistNoOwner() { - return new TypeSafeMatcher() { - @Override - protected boolean matchesSafely(File item) { - File[] files = item.listFiles(); - return files == null || files.length == 0; + File file = new File(item, parentDirectory); + if(!file.exists() || !file.isAbsolute()) { + return false; } - @Override - public void describeTo(Description description) { + File[] files = file.listFiles(new FilenameFilter() { + @Override + public boolean accept(File dir, String name) { + return name.startsWith(startWith); + } + }); - } - }; - } + return files != null && files.length == 1 && files[0].isDirectory(); + } - private static boolean fileExistsOwnerOpenWithName(final File item, final int numExpectedFiles, final String expected) { - boolean matches = false; - File[] files = item.listFiles(); - if (files == null) { - return false; + @Override + public void describeMismatchSafely(File item, Description mismatchDescription) { + mismatchDescription.appendValue(item) + .appendText(" doesn't contains a file starting with " + startWith); } - if (files.length == 2) { - int i = files[0].isDirectory() ? 0 : 1; - if (expected != null) { - files = files[i].listFiles(new FilenameFilter() { - @Override - public boolean accept(File dir, String name) { - return name.startsWith(expected); - } - }); - } else { - files = files[i].isDirectory() ? files[i].listFiles() : null; - } - if (numExpectedFiles > 0) { - matches = files != null && files.length == numExpectedFiles; - } else { - matches = files == null || files.length == 0; - } + + @Override + public void describeTo(Description description) { + description.appendText("contains a file starting with '" + "'"); } - return matches; } - private static boolean fileExistsOwnerClosedWithName(final File item, final int numExpectedFiles, final String expected) { - boolean matches = false; - File[] files = item.listFiles(); - if (files == null) { - return false; - } - if (files.length == 1) { - if (expected != null) { - files = files[0].listFiles(new FilenameFilter() { - @Override - public boolean accept(File dir, String name) { - return name.startsWith(expected); - } - }); - } else { - files = files[0].isDirectory() ? files[0].listFiles() : null; - } - if (numExpectedFiles > 0) { - matches = files != null && files.length == numExpectedFiles; - } else { - matches = files == null || files.length == 0; - } - } - return matches; + /** + * Matcher checking if the persistence directory is locked by a cache manager + * + * @return the matcher + */ + public static Matcher isLocked() { + return new DirectoryIsLockedMatcher(); + } + + /** + * Matcher checking if a cache directory starting with this name exists in the 'file' safe space + * + * @param cacheAlias cache alias that will be the prefix of the cache directory + * @return the matcher + */ + public static Matcher containsCacheDirectory(String cacheAlias) { + return new ContainsCacheDirectoryMatcher("file", cacheAlias); } + + /** + * Matcher checking if a cache directory starting within the safe space + * + * @param safeSpaceOwner name of the same space owner. It is also the name of the safe space root directory + * @param cacheAlias cache alias that will be the prefix of the cache directory + * @return the matcher + */ + public static Matcher containsCacheDirectory(String safeSpaceOwner, String cacheAlias) { + return new ContainsCacheDirectoryMatcher(safeSpaceOwner, cacheAlias); + } + } diff --git a/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchersTest.java b/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchersTest.java new file mode 100644 index 0000000000..e539fdb4f7 --- /dev/null +++ b/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchersTest.java @@ -0,0 +1,81 @@ +package org.ehcache.impl.internal.util; + +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +import java.io.File; +import java.io.IOException; + +import static org.ehcache.impl.internal.util.FileExistenceMatchers.containsCacheDirectory; +import static org.ehcache.impl.internal.util.FileExistenceMatchers.isLocked; +import static org.hamcrest.CoreMatchers.not; +import static org.junit.Assert.assertThat; + +/** + * @author Henri Tremblay + */ +public class FileExistenceMatchersTest { + + @Rule + public TemporaryFolder folder = new TemporaryFolder(); + + @Test + public void directoryIsLocked() throws Exception { + File dir = folder.newFolder(); + + assertThat(dir, not(isLocked())); + } + + @Test + public void directoryIsNotLocked() throws Exception { + File dir = folder.newFolder(); + File lock = new File(dir, ".lock"); + lock.createNewFile(); + + assertThat(dir, isLocked()); + } + + @Test + public void containsCacheDirectory_noFileDir() throws IOException { + File dir = folder.newFolder(); + + assertThat(dir, not(containsCacheDirectory("test123"))); + } + + @Test + public void containsCacheDirectory_noCacheDir() throws IOException { + File dir = folder.newFolder(); + File file = new File(dir, "file"); + file.mkdir(); + + assertThat(dir, not(containsCacheDirectory("test123"))); + } + + @Test + public void containsCacheDirectory_moreThanOneCacheDir() throws IOException { + File dir = folder.newFolder(); + File file = new File(dir, "file"); + file.mkdir(); + new File(file, "test123_aaa").mkdir(); + new File(file, "test123_bbb").mkdir(); + + assertThat(dir, not(containsCacheDirectory("test123"))); + } + + @Test + public void containsCacheDirectory_existing() throws IOException { + File dir = folder.newFolder(); + new File(dir, "file/test123_aaa").mkdirs(); + + assertThat(dir, containsCacheDirectory("test123")); + } + + @Test + public void containsCacheDirectory_withSafeSpaceExisting() throws IOException { + File dir = folder.newFolder(); + new File(dir, "safespace/test123_aaa").mkdirs(); + + assertThat(dir, containsCacheDirectory("safespace", "test123")); + } +} diff --git a/impl/src/test/java/org/ehcache/impl/persistence/DefaultLocalPersistenceServiceTest.java b/impl/src/test/java/org/ehcache/impl/persistence/DefaultLocalPersistenceServiceTest.java index 4ff2ad14f3..4f07b1bae8 100644 --- a/impl/src/test/java/org/ehcache/impl/persistence/DefaultLocalPersistenceServiceTest.java +++ b/impl/src/test/java/org/ehcache/impl/persistence/DefaultLocalPersistenceServiceTest.java @@ -17,28 +17,25 @@ package org.ehcache.impl.persistence; import org.ehcache.CachePersistenceException; +import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; -import org.mockito.Mockito; import java.io.File; import java.io.IOException; -import static org.ehcache.core.spi.service.LocalPersistenceService.SafeSpaceIdentifier; -import static org.ehcache.impl.internal.util.FileExistenceMatchers.fileExistOwnerClosed; -import static org.ehcache.impl.internal.util.FileExistenceMatchers.fileExistsOwnerOpen; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; +import static org.ehcache.impl.internal.util.FileExistenceMatchers.containsCacheDirectory; +import static org.ehcache.impl.internal.util.FileExistenceMatchers.isLocked; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; import static org.junit.Assert.fail; import static org.junit.Assume.assumeTrue; -import static org.mockito.Mockito.never; -import static org.ehcache.impl.internal.util.FileExistenceMatchers.fileExistOwnerClosed; -import static org.ehcache.impl.internal.util.FileExistenceMatchers.fileExistsOwnerOpen; public class DefaultLocalPersistenceServiceTest { @@ -114,17 +111,24 @@ public void testPhysicalDestroy() throws IOException, CachePersistenceException final File f = folder.newFolder("testPhysicalDestroy"); final DefaultLocalPersistenceService service = new DefaultLocalPersistenceService(new DefaultPersistenceConfiguration(f)); service.start(null); + assertThat(service.getLockFile().exists(), is(true)); - SafeSpaceIdentifier id = service.createSafeSpaceIdentifier("test", "test"); + assertThat(f, isLocked()); + + LocalPersistenceService.SafeSpaceIdentifier id = service.createSafeSpaceIdentifier("test", "test"); service.createSafeSpace(id); - assertThat(f, fileExistsOwnerOpen(1)); + + assertThat(f, containsCacheDirectory("test", "test")); + // try to destroy the physical space without the logical id - SafeSpaceIdentifier newId = service.createSafeSpaceIdentifier("test", "test"); + LocalPersistenceService.SafeSpaceIdentifier newId = service.createSafeSpaceIdentifier("test", "test"); service.destroySafeSpace(newId, false); - assertThat(f, fileExistsOwnerOpen(0)); + + assertThat(f, not(containsCacheDirectory("test", "test"))); + service.stop(); - assertThat(f, fileExistOwnerClosed(0)); - assertThat(service.getLockFile().exists(), is(false)); + + assertThat(f, not(isLocked())); } @Test From 0af4d0a4d5db3f0b95fb5cd7382ecbaf576fc870 Mon Sep 17 00:00:00 2001 From: Henri Tremblay Date: Tue, 4 Oct 2016 18:17:41 -0400 Subject: [PATCH 049/218] Add assertJ --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 60e3f30ed8..fa3ad7953f 100644 --- a/build.gradle +++ b/build.gradle @@ -77,7 +77,7 @@ subprojects { } else { compileOnly 'com.google.code.findbugs:annotations:2.0.3' } - testCompile 'junit:junit:4.12', 'org.hamcrest:hamcrest-library:1.3' + testCompile 'junit:junit:4.12', 'org.assertj:assertj-core:1.7.1', 'org.hamcrest:hamcrest-library:1.3' testCompile('org.mockito:mockito-core:1.9.5') { exclude group:'org.hamcrest', module:'hamcrest-core' } From 9aa96de71dec72c380488e975b02c11b8e9c9377 Mon Sep 17 00:00:00 2001 From: Henri Tremblay Date: Tue, 4 Oct 2016 18:39:30 -0400 Subject: [PATCH 050/218] Go in cache scope maintenance mode when destroying a cache on a stopped cache manager (close #1192) --- .../spi/service/MaintainableService.java | 13 +- .../service/DefaultClusteringService.java | 50 +++---- .../client/ClusteredCacheDestroyTest.java | 21 ++- .../service/DefaultClusteringServiceTest.java | 43 ++---- .../java/org/ehcache/core/EhcacheManager.java | 8 +- .../org/ehcache/core/EhcacheManagerTest.java | 4 +- .../DefaultDiskResourceService.java | 35 +++-- .../DefaultLocalPersistenceService.java | 2 +- .../persistence/TestDiskResourceService.java | 2 +- .../DefaultDiskResourceServiceTest.java | 122 ++++++++++++++++++ 10 files changed, 223 insertions(+), 77 deletions(-) create mode 100644 impl/src/test/java/org/ehcache/impl/persistence/DefaultDiskResourceServiceTest.java diff --git a/api/src/main/java/org/ehcache/spi/service/MaintainableService.java b/api/src/main/java/org/ehcache/spi/service/MaintainableService.java index d5fc4adce7..61f21e5137 100644 --- a/api/src/main/java/org/ehcache/spi/service/MaintainableService.java +++ b/api/src/main/java/org/ehcache/spi/service/MaintainableService.java @@ -22,11 +22,20 @@ */ @PluralService public interface MaintainableService extends Service { + + enum MaintenanceScope { + /** Will impact the cache manager */ + CACHE_MANAGER, + /** Will impact one or many caches */ + CACHE + } + /** * Start this service for maintenance, based on its default configuration. - * * @param serviceProvider enables to depend on other maintainable services + * @param maintenanceScope the scope of the maintenance + * */ - void startForMaintenance(ServiceProvider serviceProvider); + void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope); } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java index 439ac809b7..dd5f89c9ff 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java @@ -163,6 +163,20 @@ public void start(final ServiceProvider serviceProvider) { } } + @Override + public void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope) { + initClusterConnection(); + createEntityFactory(); + if(maintenanceScope == MaintenanceScope.CACHE_MANAGER) { + if (!entityFactory.acquireLeadership(entityIdentifier)) { + entityFactory = null; + closeConnection(); + throw new IllegalStateException("Couldn't acquire cluster-wide maintenance lease"); + } + } + inMaintenance = true; + } + private void createEntityFactory() { entityFactory = new EhcacheClientEntityFactory(clusterConnection, operationTimeouts); } @@ -204,18 +218,6 @@ private EhcacheClientEntity autoCreateEntity() throws EhcacheEntityValidationExc } } - @Override - public void startForMaintenance(ServiceProvider serviceProvider) { - initClusterConnection(); - createEntityFactory(); - if (!entityFactory.acquireLeadership(entityIdentifier)) { - entityFactory = null; - closeConnection(); - throw new IllegalStateException("Couldn't acquire cluster-wide maintenance lease"); - } - inMaintenance = true; - } - @Override public void stop() { LOGGER.info("stop called for clustered tiers on {}", this.clusterUri); @@ -302,22 +304,28 @@ public StateRepository getStateRepositoryWithin(PersistenceSpaceIdentifier id } } + private void checkStarted() { + if(!isStarted()) { + throw new IllegalStateException(getClass().getName() + " should be started to call destroy"); + } + } + @Override public void destroy(String name) throws CachePersistenceException { - boolean wasStarted = isStarted(); - // If the cluster isn't started, start it first to be able to destroy the cache - if(!wasStarted) { - initClusterConnection(); - createEntityFactory(); + checkStarted(); + + // will happen when in maintenance mode + if(entity == null) { try { entity = entityFactory.retrieve(entityIdentifier, configuration.getServerConfiguration()); } catch (EntityNotFoundException e) { // No entity on the server, so no need to destroy anything } catch (TimeoutException e) { throw new CachePersistenceException("Could not connect to the clustered tier manager '" + entityIdentifier - + "'; retrieve operation timed out", e); + + "'; retrieve operation timed out", e); } } + try { entity.destroyCache(name); } catch (ClusteredTierDestructionException e) { @@ -325,15 +333,11 @@ public void destroy(String name) throws CachePersistenceException { } catch (TimeoutException e) { throw new CachePersistenceException("Could not destroy clustered tier '" + name + "' on " + clusterUri + "; destroy operation timed out" + clusterUri, e); - } finally { - if (!wasStarted) { - stop(); - } } } protected boolean isStarted() { - return entity != null; + return entityFactory != null; } @Override diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredCacheDestroyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredCacheDestroyTest.java index 8e6b78909f..c191427c81 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredCacheDestroyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredCacheDestroyTest.java @@ -46,7 +46,6 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; @@ -186,7 +185,17 @@ public void testDestroyCacheWithCacheManagerStopped() throws CachePersistenceExc } @Test - public void testDestroyCacheWithCacheManagerStopped_whenUsedExclusively() throws CachePersistenceException { + public void testDestroyCacheWithTwoCacheManagerOnSameCache_forbiddenWhenInUse() throws CachePersistenceException { + PersistentCacheManager persistentCacheManager1 = clusteredCacheManagerBuilder.build(true); + PersistentCacheManager persistentCacheManager2 = clusteredCacheManagerBuilder.build(true); + + expectedException.expect(CachePersistenceException.class); + expectedException.expectMessage("Cannot destroy clustered tier 'clustered-cache': in use by 1 other client(s) (on terracotta://example.com:9540)"); + persistentCacheManager1.destroyCache(CLUSTERED_CACHE); + } + + @Test + public void testDestroyCacheWithTwoCacheManagerOnSameCache_firstRemovesSecondDestroy() throws CachePersistenceException { PersistentCacheManager persistentCacheManager1 = clusteredCacheManagerBuilder.build(true); PersistentCacheManager persistentCacheManager2 = clusteredCacheManagerBuilder.build(true); @@ -196,12 +205,12 @@ public void testDestroyCacheWithCacheManagerStopped_whenUsedExclusively() throws } @Test - public void testDestroyCacheWithCacheManagerStopped_forbiddenWhenInUse() throws CachePersistenceException { - PersistentCacheManager persistentCacheManager1 = clusteredCacheManagerBuilder.build(true); + public void testDestroyCacheWithTwoCacheManagerOnSameCache_secondDoesntHaveTheCacheButPreventExclusiveAccessToCluster() throws CachePersistenceException { + PersistentCacheManager persistentCacheManager1 = clusteredCacheManagerBuilder.build(false); PersistentCacheManager persistentCacheManager2 = clusteredCacheManagerBuilder.build(true); - expectedException.expect(CachePersistenceException.class); - expectedException.expectMessage("Cannot destroy clustered tier 'clustered-cache': in use by 1 other client(s) (on terracotta://example.com:9540)"); + persistentCacheManager2.removeCache(CLUSTERED_CACHE); + persistentCacheManager1.destroyCache(CLUSTERED_CACHE); } } diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java index b1169a3c72..f5e58208f0 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java @@ -48,6 +48,7 @@ import org.ehcache.impl.internal.spi.serialization.DefaultSerializationProvider; import org.ehcache.spi.persistence.PersistableResourceService; import org.ehcache.spi.persistence.StateRepository; +import org.ehcache.spi.service.MaintainableService; import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; @@ -78,6 +79,7 @@ import static org.ehcache.config.ResourceType.Core.OFFHEAP; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; @@ -329,7 +331,7 @@ public void testStartForMaintenanceAutoStart() throws Exception { .build(); DefaultClusteringService service = new DefaultClusteringService(configuration); assertThat(service.isConnected(), is(false)); - service.startForMaintenance(null); + service.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); assertThat(service.isConnected(), is(true)); assertThat(UnitTestConnectionService.getConnectionProperties(clusterUri).size(), is(1)); @@ -354,7 +356,7 @@ public void testStartForMaintenanceOtherAutoCreate() throws Exception { DefaultClusteringService maintenanceService = new DefaultClusteringService(configuration); try { - maintenanceService.startForMaintenance(null); + maintenanceService.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); fail("Expecting IllegalStateException"); } catch (IllegalStateException e) { // Expected @@ -392,7 +394,7 @@ public void testStartForMaintenanceOtherCreated() throws Exception { assertThat(activeEntity.getConnectedClients().size(), is(0)); DefaultClusteringService maintenanceService = new DefaultClusteringService(configuration); - maintenanceService.startForMaintenance(null); + maintenanceService.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); assertThat(activeEntities.size(), is(1)); @@ -445,14 +447,14 @@ public void testStartForMaintenanceInterlock() throws Exception { .autoCreate() .build(); DefaultClusteringService maintenanceService1 = new DefaultClusteringService(configuration); - maintenanceService1.startForMaintenance(null); + maintenanceService1.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); assertThat(activeEntities.size(), is(0)); DefaultClusteringService maintenanceService2 = new DefaultClusteringService(configuration); try { - maintenanceService2.startForMaintenance(null); + maintenanceService2.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); fail("Expecting IllegalStateException"); } catch (IllegalStateException e) { assertThat(e.getMessage(), containsString(" acquire cluster-wide ")); @@ -468,14 +470,14 @@ public void testStartForMaintenanceSequence() throws Exception { .autoCreate() .build(); DefaultClusteringService maintenanceService1 = new DefaultClusteringService(configuration); - maintenanceService1.startForMaintenance(null); + maintenanceService1.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); maintenanceService1.stop(); List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); assertThat(activeEntities.size(), is(0)); DefaultClusteringService maintenanceService2 = new DefaultClusteringService(configuration); - maintenanceService2.startForMaintenance(null); + maintenanceService2.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); maintenanceService2.stop(); activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); @@ -547,7 +549,7 @@ public void testBasicDestroyAll() throws Exception { assertThat(e.getMessage(), containsString("Maintenance mode required")); } - createService.startForMaintenance(null); + createService.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); createService.destroyAll(); @@ -1322,7 +1324,7 @@ public void testGetServerStoreProxyDedicatedDestroy() throws Exception { } @Test - public void testDestroyWhenStoppedWorks() throws Exception { + public void testDestroyCantBeCalledIfStopped() throws Exception { String cacheAlias = "cacheAlias"; String targetResource = "serverResource2"; ClusteringServiceConfiguration configuration = @@ -1331,28 +1333,11 @@ public void testDestroyWhenStoppedWorks() throws Exception { .defaultServerResource("defaultResource") .build(); DefaultClusteringService creationService = new DefaultClusteringService(configuration); - creationService.start(null); - - DefaultSerializationProvider serializationProvider = new DefaultSerializationProvider(null); - serializationProvider.start(providerContaining()); - Store.Configuration storeConfiguration = - getDedicatedStoreConfig(targetResource, serializationProvider, Long.class, String.class); - ServerStoreProxy serverStoreProxy = creationService.getServerStoreProxy( - getClusteredCacheIdentifier(creationService, cacheAlias), storeConfiguration, Consistency.EVENTUAL); - assertThat(serverStoreProxy.getCacheId(), is(cacheAlias)); + expectedException.expect(IllegalStateException.class); + expectedException.expectMessage(endsWith(" should be started to call destroy")); - creationService.stop(); creationService.destroy(cacheAlias); - - List activeEntities = observableEhcacheServerEntityService.getServedActiveEntities(); - ObservableEhcacheActiveEntity activeEntity = activeEntities.get(0); - - assertThat(activeEntity.getDedicatedResourcePoolIds(), is(Matchers.empty())); - assertThat(activeEntity.getStores(), is(Matchers.empty())); - assertThat(activeEntity.getInUseStores().keySet(), is(Matchers.empty())); - - assertThat("Service must be stopped after destroying the cache", creationService.isStarted(), is(false)); } @Test @@ -1403,7 +1388,7 @@ public void testFullDestroyAll() throws Exception { assertThat(e.getMessage(), containsString("Maintenance mode required")); } - createService.startForMaintenance(null); + createService.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); createService.destroyAll(); diff --git a/core/src/main/java/org/ehcache/core/EhcacheManager.java b/core/src/main/java/org/ehcache/core/EhcacheManager.java index cc75748065..141ec363de 100644 --- a/core/src/main/java/org/ehcache/core/EhcacheManager.java +++ b/core/src/main/java/org/ehcache/core/EhcacheManager.java @@ -678,7 +678,7 @@ public void destroyCache(final String alias) throws CachePersistenceException { if(maintenance != null) { try { - startMaintainableServices(); + startMaintainableServices(MaintainableService.MaintenanceScope.CACHE); maintenance.succeeded(); } catch (Throwable t) { throw maintenance.failed(t); @@ -715,7 +715,7 @@ private void destroyPersistenceSpace(String alias) throws CachePersistenceExcept public void destroy() throws CachePersistenceException { StatusTransitioner.Transition st = statusTransitioner.maintenance(); try { - startMaintainableServices(); + startMaintainableServices(MaintainableService.MaintenanceScope.CACHE_MANAGER); st.succeeded(); } catch (Throwable t) { throw st.failed(t); @@ -731,11 +731,11 @@ public void destroy() throws CachePersistenceException { LOGGER.info("All persistent data destroyed for {}", simpleName); } - private void startMaintainableServices() { + private void startMaintainableServices(MaintainableService.MaintenanceScope maintenanceScope) { ServiceProvider provider = getMaintainableServiceProvider(); Collection services = serviceLocator.getServicesOfType(MaintainableService.class); for (MaintainableService service : services) { - service.startForMaintenance(provider); + service.startForMaintenance(provider, maintenanceScope); } } diff --git a/core/src/test/java/org/ehcache/core/EhcacheManagerTest.java b/core/src/test/java/org/ehcache/core/EhcacheManagerTest.java index e573d1138c..b0c713e498 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheManagerTest.java +++ b/core/src/test/java/org/ehcache/core/EhcacheManagerTest.java @@ -77,6 +77,7 @@ import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyCollection; import static org.mockito.Matchers.anySet; +import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; @@ -855,7 +856,8 @@ public void testDestroyCacheFailsAndStopIfStartingServicesFails() throws CachePe DefaultConfiguration config = new DefaultConfiguration(caches, null); List services = minimunCacheManagerServices(); MaintainableService service = mock(MaintainableService.class); - doThrow(new RuntimeException("failed")).when(service).startForMaintenance(Mockito.>anyObject()); + doThrow(new RuntimeException("failed")).when(service) + .startForMaintenance(Mockito.>anyObject(), eq(MaintainableService.MaintenanceScope.CACHE)); services.add(service); EhcacheManager manager = new EhcacheManager(config, services); diff --git a/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java b/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java index 9ed4252b07..193d604a35 100644 --- a/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java +++ b/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java @@ -42,17 +42,18 @@ */ public class DefaultDiskResourceService implements DiskResourceService { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultDiskResourceService.class); - private static final String PERSISTENCE_SPACE_OWNER = "file"; + static final String PERSISTENCE_SPACE_OWNER = "file"; private final ConcurrentMap knownPersistenceSpaces; private volatile LocalPersistenceService persistenceService; + private volatile boolean isStarted; public DefaultDiskResourceService() { this.knownPersistenceSpaces = new ConcurrentHashMap(); } private boolean isStarted() { - return persistenceService != null; + return isStarted; } /** @@ -61,14 +62,16 @@ private boolean isStarted() { @Override public void start(final ServiceProvider serviceProvider) { persistenceService = serviceProvider.getService(LocalPersistenceService.class); + isStarted = true; } /** * {@inheritDoc} */ @Override - public void startForMaintenance(ServiceProvider serviceProvider) { + public void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope) { persistenceService = serviceProvider.getService(LocalPersistenceService.class); + isStarted = true; } /** @@ -76,6 +79,7 @@ public void startForMaintenance(ServiceProvider serviceProv */ @Override public void stop() { + isStarted = false; persistenceService = null; } @@ -92,7 +96,7 @@ public boolean handlesResourceType(ResourceType resourceType) { */ @Override public PersistenceSpaceIdentifier getPersistenceSpaceIdentifier(String name, CacheConfiguration config) throws CachePersistenceException { - if (!isStarted()) { + if (persistenceService == null) { return null; } boolean persistent = config.getResourcePools().getPoolForResource(ResourceType.Core.DISK).isPersistent(); @@ -117,7 +121,7 @@ public void releasePersistenceSpaceIdentifier(PersistenceSpaceIdentifier iden } } if (name == null) { - throw new CachePersistenceException("Unknown space " + identifier); + throw newCachePersistenceException(identifier); } PersistenceSpace persistenceSpace = knownPersistenceSpaces.remove(name); if (persistenceSpace != null) { @@ -169,6 +173,10 @@ private void checkStarted() { public void destroy(String name) throws CachePersistenceException { checkStarted(); + if(persistenceService == null) { + return; + } + PersistenceSpace space = knownPersistenceSpaces.remove(name); SafeSpaceIdentifier identifier = (space == null) ? persistenceService.createSafeSpaceIdentifier(PERSISTENCE_SPACE_OWNER, name) : space.identifier.persistentSpaceId; @@ -181,6 +189,11 @@ public void destroy(String name) throws CachePersistenceException { @Override public void destroyAll() { checkStarted(); + + if(persistenceService == null) { + return; + } + persistenceService.destroyAll(PERSISTENCE_SPACE_OWNER); } @@ -197,11 +210,14 @@ public StateRepository getStateRepositoryWithin(PersistenceSpaceIdentifier id FileBasedStateRepository previous = persistenceSpace.stateRepositories.putIfAbsent(name, stateRepository); if (previous != null) { return previous; - } else { - return stateRepository; } + return stateRepository; } - throw new CachePersistenceException("Unknown space " + identifier); + throw newCachePersistenceException(identifier); + } + + private CachePersistenceException newCachePersistenceException(PersistenceSpaceIdentifier identifier) { + return new CachePersistenceException("Unknown space: " + identifier); } private PersistenceSpace getPersistenceSpace(PersistenceSpaceIdentifier identifier) { @@ -222,9 +238,8 @@ public FileBasedPersistenceContext createPersistenceContextWithin(PersistenceSpa if (containsSpace(identifier)) { return new DefaultFileBasedPersistenceContext( FileUtils.createSubDirectory(((DefaultPersistenceSpaceIdentifier)identifier).persistentSpaceId.getRoot(), name)); - } else { - throw new CachePersistenceException("Unknown space: " + identifier); } + throw newCachePersistenceException(identifier); } private boolean containsSpace(PersistenceSpaceIdentifier identifier) { diff --git a/impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java b/impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java index 666293a92c..f9ed490873 100644 --- a/impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java +++ b/impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java @@ -164,7 +164,7 @@ public void destroyAll(String owner) { } @Override - public synchronized void startForMaintenance(ServiceProvider serviceProvider) { + public synchronized void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope) { internalStart(); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java b/impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java index aa101e9b6d..fdc1d39c90 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java +++ b/impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java @@ -124,7 +124,7 @@ public void start(ServiceProvider serviceProvider) { } @Override - public void startForMaintenance(ServiceProvider serviceProvider) { + public void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope) { //ignore } diff --git a/impl/src/test/java/org/ehcache/impl/persistence/DefaultDiskResourceServiceTest.java b/impl/src/test/java/org/ehcache/impl/persistence/DefaultDiskResourceServiceTest.java new file mode 100644 index 0000000000..208f70d2e0 --- /dev/null +++ b/impl/src/test/java/org/ehcache/impl/persistence/DefaultDiskResourceServiceTest.java @@ -0,0 +1,122 @@ +package org.ehcache.impl.persistence; + +import org.ehcache.CachePersistenceException; +import org.ehcache.config.ResourceType; +import org.ehcache.core.spi.service.LocalPersistenceService; +import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceProvider; +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.runners.Enclosed; +import org.junit.rules.ExpectedException; +import org.junit.runner.RunWith; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * @author Henri Tremblay + */ +@RunWith(Enclosed.class) +public class DefaultDiskResourceServiceTest { + + public static abstract class AbstractDefaultDiskResourceServiceTest { + + @Rule + public ExpectedException expectedException = ExpectedException.none(); + + protected DefaultDiskResourceService service = new DefaultDiskResourceService(); + protected ServiceProvider serviceProvider = mock(ServiceProvider.class); + + @Before + public void setup() { + service.start(serviceProvider); + } + + @After + public void tearDown() { + service.stop(); + } + + } + + public static class WithPersistenceService extends AbstractDefaultDiskResourceServiceTest { + + LocalPersistenceService persistenceService = mock(LocalPersistenceService.class); + + @Before + public void setup() { + when(serviceProvider.getService(LocalPersistenceService.class)).thenReturn(persistenceService); + super.setup(); + } + + @Test + public void testHandlesResourceType() { + assertThat(service.handlesResourceType(ResourceType.Core.DISK)).isTrue(); + } + + @Test + public void testDestroyAll() { + service.destroyAll(); + verify(persistenceService).destroyAll(DefaultDiskResourceService.PERSISTENCE_SPACE_OWNER); + } + + @Test + public void testDestroy() throws CachePersistenceException { + service.destroy("test"); // should do nothing + } + + // Some tests still missing here + } + + public static class WithoutPersistenceService extends AbstractDefaultDiskResourceServiceTest { + + @Test + public void testHandlesResourceType() { + assertThat(service.handlesResourceType(ResourceType.Core.DISK)).isFalse(); + } + + @Test + public void testDestroyAll() { + service.destroyAll(); // should do nothing + } + + @Test + public void testDestroy() throws CachePersistenceException { + service.destroy("test"); // should do nothing + } + + @Test + public void testCreatePersistenceContextWithin() throws CachePersistenceException { + expectedException.expect(CachePersistenceException.class); + expectedException.expectMessage("Unknown space: null"); + service.createPersistenceContextWithin(null, "test"); + } + + @Test + public void testGetPersistenceSpaceIdentifier() throws CachePersistenceException { + assertThat(service.getPersistenceSpaceIdentifier("test", null)).isNull(); + } + + + @Test + public void testGetStateRepositoryWithin() throws CachePersistenceException { + expectedException.expect(CachePersistenceException.class); + expectedException.expectMessage("Unknown space: null"); + assertThat(service.getStateRepositoryWithin(null, "test")).isNull(); + } + + @Test + public void testReleasePersistenceSpaceIdentifier() throws CachePersistenceException { + expectedException.expect(CachePersistenceException.class); + expectedException.expectMessage("Unknown space: null"); + assertThat(service.getStateRepositoryWithin(null, "test")).isNull(); + } + + } + +} From 0f0d124b1f77ae5fd0319ddbaac8d3df74fd0b31 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Thu, 6 Oct 2016 16:54:14 +0530 Subject: [PATCH 051/218] Checkstyle failure + follow up for #1192 --- .../ehcache/spi/service/MaintainableService.java | 3 +++ .../internal/util/FileExistenceMatchersTest.java | 16 ++++++++++++++++ .../DefaultDiskResourceServiceTest.java | 15 +++++++++++++++ 3 files changed, 34 insertions(+) diff --git a/api/src/main/java/org/ehcache/spi/service/MaintainableService.java b/api/src/main/java/org/ehcache/spi/service/MaintainableService.java index 61f21e5137..6403678917 100644 --- a/api/src/main/java/org/ehcache/spi/service/MaintainableService.java +++ b/api/src/main/java/org/ehcache/spi/service/MaintainableService.java @@ -23,6 +23,9 @@ @PluralService public interface MaintainableService extends Service { + /** + * Defines Maintenance scope + */ enum MaintenanceScope { /** Will impact the cache manager */ CACHE_MANAGER, diff --git a/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchersTest.java b/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchersTest.java index e539fdb4f7..67f03f6464 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchersTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/util/FileExistenceMatchersTest.java @@ -1,3 +1,19 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.ehcache.impl.internal.util; import org.junit.Rule; diff --git a/impl/src/test/java/org/ehcache/impl/persistence/DefaultDiskResourceServiceTest.java b/impl/src/test/java/org/ehcache/impl/persistence/DefaultDiskResourceServiceTest.java index 208f70d2e0..aca37fc4b0 100644 --- a/impl/src/test/java/org/ehcache/impl/persistence/DefaultDiskResourceServiceTest.java +++ b/impl/src/test/java/org/ehcache/impl/persistence/DefaultDiskResourceServiceTest.java @@ -1,3 +1,18 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.ehcache.impl.persistence; import org.ehcache.CachePersistenceException; From 2a31ee16dd3cc4ce79f8961a20ba0fed9a44b766 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Thu, 6 Oct 2016 16:54:48 +0530 Subject: [PATCH 052/218] Findbugs fix + follow up #1192 --- .../client/internal/service/DefaultClusteringService.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java index dd5f89c9ff..b1860cc6e0 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java @@ -327,7 +327,9 @@ public void destroy(String name) throws CachePersistenceException { } try { - entity.destroyCache(name); + if (entity != null) { + entity.destroyCache(name); + } } catch (ClusteredTierDestructionException e) { throw new CachePersistenceException(e.getMessage() + " (on " + clusterUri + ")", e); } catch (TimeoutException e) { From c8d2696b7e38c1ea1f9ca317a6dafb8f5e1355c6 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Tue, 20 Sep 2016 15:27:26 +0530 Subject: [PATCH 053/218] Key value replication using IEntityMessanger #1211 --- build.gradle | 2 +- .../client/internal/EhcacheClientEntity.java | 2 +- .../client/ActivePassiveSyncTest.java | 17 +- .../ClusteringServiceConfigurationTest.java | 2 +- ...steringServiceConfigurationParserTest.java | 2 +- .../messages/ClientIDTrackerMessage.java | 126 ++++++++++++ .../messages/ClientIdTrackerMessageCodec.java | 91 +++++++++ .../internal/messages/EhcacheCodec.java | 11 +- .../messages/EhcacheEntityMessage.java | 1 + .../messages/ServerStoreOpMessage.java | 2 +- .../ClientIDTrackerMessageCodecTest.java | 67 +++++++ .../internal/messages/EhcacheCodecTest.java | 29 ++- ...onReadWriteLockPassiveIntegrationTest.java | 1 - ...asicClusteredCacheCRUDReplicationTest.java | 183 ++++++++++++++++++ .../BasicLifeCyclePassiveReplicationTest.java | 31 ++- .../test/resources/clusteredConfiguration.txt | 2 +- .../clustered/server/EhcacheActiveEntity.java | 38 +++- .../server/EhcachePassiveEntity.java | 42 +++- .../server/EhcacheActiveEntityTest.java | 3 + .../server/EhcachePassiveEntityTest.java | 20 +- 20 files changed, 640 insertions(+), 32 deletions(-) create mode 100644 clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessage.java create mode 100644 clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIdTrackerMessageCodec.java create mode 100644 clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessageCodecTest.java create mode 100644 clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheCRUDReplicationTest.java rename clustered/integration-test/src/test/java/org/ehcache/clustered/{ => replication}/BasicLifeCyclePassiveReplicationTest.java (85%) diff --git a/build.gradle b/build.gradle index fa3ad7953f..eb3256b36c 100644 --- a/build.gradle +++ b/build.gradle @@ -32,7 +32,7 @@ ext { terracottaCoreVersion = '5.0.7-beta' offheapResourceVersion = terracottaPlatformVersion entityApiVersion = terracottaApisVersion - terracottaPassthroughTestingVersion = '1.0.7.beta' + terracottaPassthroughTestingVersion = '1.0.7.beta2' entityTestLibVersion = terracottaPassthroughTestingVersion galvanVersion = '1.0.7-beta' diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java index 852936ce93..ebb8f81923 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java @@ -327,7 +327,7 @@ private static T waitFor(TimeoutDuration timeLimit, InvokeFuture future) */ public static final class Timeouts { - public static final TimeoutDuration DEFAULT_READ_OPERATION_TIMEOUT = TimeoutDuration.of(5, TimeUnit.SECONDS); + public static final TimeoutDuration DEFAULT_READ_OPERATION_TIMEOUT = TimeoutDuration.of(20, TimeUnit.SECONDS); private final TimeoutDuration readOperationTimeout; private final TimeoutDuration mutativeOperationTimeout; diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/ActivePassiveSyncTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/ActivePassiveSyncTest.java index e5857fc2ef..79725ef33e 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/ActivePassiveSyncTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/ActivePassiveSyncTest.java @@ -24,14 +24,18 @@ import org.ehcache.clustered.client.internal.EhcacheClientEntityService; import org.ehcache.clustered.client.internal.UnitTestConnectionService; import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockEntityClientService; +import org.ehcache.clustered.client.internal.service.ClusteredTierManagerValidationException; import org.ehcache.clustered.client.internal.service.ClusteringServiceFactory; import org.ehcache.clustered.client.service.ClusteringService; import org.ehcache.clustered.common.Consistency; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.exceptions.LifecycleException; import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; import org.ehcache.clustered.server.EhcacheServerEntityService; import org.ehcache.impl.serialization.CompactJavaSerializer; +import org.hamcrest.Matchers; import org.junit.After; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.terracotta.offheapresource.OffHeapResourcesConfiguration; @@ -43,9 +47,14 @@ import java.lang.reflect.Field; import java.net.URI; +import java.util.concurrent.TimeoutException; import static org.ehcache.clustered.client.internal.UnitTestConnectionService.getOffheapResourcesType; import static org.ehcache.config.units.MemoryUnit.MB; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; +import static org.junit.Assert.assertThat; +import static org.junit.Assert.fail; public class ActivePassiveSyncTest { @@ -100,7 +109,13 @@ public void testTierManagerStateSync() throws Exception { clusterControl.waitForRunningPassivesInStandby(); clusterControl.terminateActive(); - clientEntity.validate(configuration.getServerConfiguration()); + try { + clientEntity.validate(configuration.getServerConfiguration()); + fail("ClusteredTierManagerValidationException Expected."); + } catch (ClusteredTierManagerValidationException e) { + assertThat(e.getCause(), instanceOf(LifecycleException.class)); + assertThat(e.getCause().getMessage(), containsString("is already being tracked with Client Id")); + } service.stop(); } diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/config/ClusteringServiceConfigurationTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/config/ClusteringServiceConfigurationTest.java index 858e85f2ae..d6cfee67a2 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/config/ClusteringServiceConfigurationTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/config/ClusteringServiceConfigurationTest.java @@ -56,7 +56,7 @@ public void testGetReadOperationTimeout() throws Exception { @Test public void testDefaultReadOperationTimeout() throws Exception { - assertThat(new ClusteringServiceConfiguration(DEFAULT_URI).getReadOperationTimeout(), is(TimeoutDuration.of(5, TimeUnit.SECONDS))); + assertThat(new ClusteringServiceConfiguration(DEFAULT_URI).getReadOperationTimeout(), is(TimeoutDuration.of(20, TimeUnit.SECONDS))); } @Test(expected = NullPointerException.class) diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteringServiceConfigurationParserTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteringServiceConfigurationParserTest.java index 000df77bce..8b72a046ca 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteringServiceConfigurationParserTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/xml/ClusteringServiceConfigurationParserTest.java @@ -170,7 +170,7 @@ public void testGetTimeoutNone() throws Exception { ServiceLocator.findSingletonAmongst(ClusteringServiceConfiguration.class, serviceCreationConfigurations); assertThat(clusteringServiceConfiguration, is(notNullValue())); - assertThat(clusteringServiceConfiguration.getReadOperationTimeout(), is(TimeoutDuration.of(5, TimeUnit.SECONDS))); + assertThat(clusteringServiceConfiguration.getReadOperationTimeout(), is(TimeoutDuration.of(20, TimeUnit.SECONDS))); } @Test diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessage.java new file mode 100644 index 0000000000..495c97cc1f --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessage.java @@ -0,0 +1,126 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.internal.store.Chain; + +import java.util.UUID; + +/** + * This message is sent by the Active Entity to Passive Entity. + */ +public class ClientIDTrackerMessage extends EhcacheEntityMessage { + + public enum ReplicationOp { + CHAIN_REPLICATION_OP((byte) 31), + CLIENTID_TRACK_OP((byte) 32) + ; + + private final byte replicationOpCode; + + ReplicationOp(byte replicationOpCode) { + this.replicationOpCode = replicationOpCode; + } + + public byte getReplicationOpCode() { + return replicationOpCode; + } + + + public static ReplicationOp getReplicationOp(byte replicationOpCode) { + switch (replicationOpCode) { + case 31: + return CHAIN_REPLICATION_OP; + case 32: + return CLIENTID_TRACK_OP; + default: + throw new IllegalArgumentException("Replication operation not defined for : " + replicationOpCode); + } + } + } + + private final UUID clientId; + private final long msgId; + + public ClientIDTrackerMessage(long msgId, UUID clientId) { + this.msgId = msgId; + this.clientId = clientId; + } + + @Override + public Type getType() { + return Type.REPLICATION_OP; + } + + @Override + public byte getOpCode() { + return operation().getReplicationOpCode(); + } + + @Override + public void setId(long id) { + throw new UnsupportedOperationException("This method is not supported on replication message"); + } + + public ReplicationOp operation() { + return ReplicationOp.CLIENTID_TRACK_OP; + } + + public long getId() { + return msgId; + } + + public UUID getClientId() { + return clientId; + } + + public static class ChainReplicationMessage extends ClientIDTrackerMessage implements ConcurrentEntityMessage { + + private final String cacheId; + private final long key; + private final Chain chain; + + public ChainReplicationMessage(String cacheId, long key, Chain chain, long msgId, UUID clientId) { + super(msgId, clientId); + this.cacheId = cacheId; + this.key = key; + this.chain = chain; + } + + public String getCacheId() { + return this.cacheId; + } + + public long getKey() { + return key; + } + + public Chain getChain() { + return chain; + } + + @Override + public ReplicationOp operation() { + return ReplicationOp.CHAIN_REPLICATION_OP; + } + + @Override + public int concurrencyKey() { + return (int) (this.cacheId.hashCode() + key); + } + } +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIdTrackerMessageCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIdTrackerMessageCodec.java new file mode 100644 index 0000000000..325c43fa61 --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIdTrackerMessageCodec.java @@ -0,0 +1,91 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.internal.ClusteredEhcacheIdentity; +import org.ehcache.clustered.common.internal.messages.ClientIDTrackerMessage.ReplicationOp; +import org.ehcache.clustered.common.internal.messages.ClientIDTrackerMessage.ChainReplicationMessage; +import org.ehcache.clustered.common.internal.store.Chain; + +import java.nio.ByteBuffer; +import java.util.UUID; + +class ClientIDTrackerMessageCodec { + + private static final byte OP_CODE_SIZE = 1; + private static final byte CACHE_ID_LEN_SIZE = 4; + private static final byte KEY_SIZE = 8; + private static final byte MESSAGE_ID_SIZE = 24; + + private ChainCodec chainCodec = new ChainCodec(); + + public byte[] encode(ClientIDTrackerMessage message) { + + ByteBuffer encodedMsg; + switch (message.operation()) { + case CLIENTID_TRACK_OP: + encodedMsg = ByteBuffer.allocate(OP_CODE_SIZE + MESSAGE_ID_SIZE); + encodedMsg.put(message.getOpCode()); + encodedMsg.put(ClusteredEhcacheIdentity.serialize(message.getClientId())); + encodedMsg.putLong(message.getId()); + return encodedMsg.array(); + case CHAIN_REPLICATION_OP: + ChainReplicationMessage chainReplicationMessage = (ChainReplicationMessage)message; + byte[] encodedChain = chainCodec.encode(chainReplicationMessage.getChain()); + int cacheIdLen = chainReplicationMessage.getCacheId().length(); + encodedMsg = ByteBuffer.allocate(OP_CODE_SIZE + CACHE_ID_LEN_SIZE + KEY_SIZE + MESSAGE_ID_SIZE + encodedChain.length + 2 * cacheIdLen); + encodedMsg.put(chainReplicationMessage.getOpCode()); + encodedMsg.put(ClusteredEhcacheIdentity.serialize(chainReplicationMessage.getClientId())); + encodedMsg.putLong(chainReplicationMessage.getId()); + encodedMsg.putInt(cacheIdLen); + CodecUtil.putStringAsCharArray(encodedMsg, chainReplicationMessage.getCacheId()); + encodedMsg.putLong(chainReplicationMessage.getKey()); + encodedMsg.put(encodedChain); + return encodedMsg.array(); + default: + throw new UnsupportedOperationException("This operation is not supported : " + message.operation()); + } + + } + + public EhcacheEntityMessage decode(byte[] payload) { + ByteBuffer byteBuffer = ByteBuffer.wrap(payload); + ReplicationOp replicationOp = ReplicationOp.getReplicationOp(byteBuffer.get()); + UUID clientId = getClientId(byteBuffer); + long msgId = byteBuffer.getLong(); + switch (replicationOp) { + case CHAIN_REPLICATION_OP: + int length = byteBuffer.getInt(); + String cacheId = CodecUtil.getStringFromBuffer(byteBuffer, length); + long key = byteBuffer.getLong(); + byte[] encodedChain = new byte[byteBuffer.remaining()]; + byteBuffer.get(encodedChain); + Chain chain = chainCodec.decode(encodedChain); + return new ChainReplicationMessage(cacheId, key, chain, msgId, clientId); + case CLIENTID_TRACK_OP: + return new ClientIDTrackerMessage(msgId, clientId); + default: + throw new UnsupportedOperationException("This operation code is not supported : " + replicationOp); + } + } + + private static UUID getClientId(ByteBuffer payload) { + long msb = payload.getLong(); + long lsb = payload.getLong(); + return new UUID(msb, lsb); + } +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java index 9010232e48..067f90716d 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java @@ -20,29 +20,32 @@ import org.terracotta.entity.MessageCodecException; import static org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage.Type.LIFECYCLE_OP; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage.Type.REPLICATION_OP; import static org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage.Type.SERVER_STORE_OP; import static org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage.Type.STATE_REPO_OP; public class EhcacheCodec implements MessageCodec { private static final MessageCodec SERVER_INSTANCE = - new EhcacheCodec(new ServerStoreOpCodec(), new LifeCycleMessageCodec(), new StateRepositoryOpCodec(), new ResponseCodec()); + new EhcacheCodec(new ServerStoreOpCodec(), new LifeCycleMessageCodec(), new StateRepositoryOpCodec(), new ResponseCodec(), new ClientIDTrackerMessageCodec()); private final ServerStoreOpCodec serverStoreOpCodec; private final LifeCycleMessageCodec lifeCycleMessageCodec; private final StateRepositoryOpCodec stateRepositoryOpCodec; private final ResponseCodec responseCodec; + private final ClientIDTrackerMessageCodec clientIDTrackerMessageCodec; public static MessageCodec messageCodec() { return SERVER_INSTANCE; } EhcacheCodec(ServerStoreOpCodec serverStoreOpCodec, LifeCycleMessageCodec lifeCycleMessageCodec, - StateRepositoryOpCodec stateRepositoryOpCodec, ResponseCodec responseCodec) { + StateRepositoryOpCodec stateRepositoryOpCodec, ResponseCodec responseCodec, ClientIDTrackerMessageCodec clientIDTrackerMessageCodec) { this.serverStoreOpCodec = serverStoreOpCodec; this.lifeCycleMessageCodec = lifeCycleMessageCodec; this.stateRepositoryOpCodec = stateRepositoryOpCodec; this.responseCodec = responseCodec; + this.clientIDTrackerMessageCodec = clientIDTrackerMessageCodec; } @Override @@ -54,6 +57,8 @@ public byte[] encodeMessage(EhcacheEntityMessage message) { return serverStoreOpCodec.encode((ServerStoreOpMessage) message); case STATE_REPO_OP: return stateRepositoryOpCodec.encode((StateRepositoryOpMessage) message); + case REPLICATION_OP: + return clientIDTrackerMessageCodec.encode((ClientIDTrackerMessage)message); default: throw new IllegalArgumentException("Undefined message type: " + message.getType()); } @@ -68,6 +73,8 @@ public EhcacheEntityMessage decodeMessage(byte[] payload) throws MessageCodecExc return serverStoreOpCodec.decode(payload); } else if (opCode <= STATE_REPO_OP.getCode()) { return stateRepositoryOpCodec.decode(payload); + } else if (opCode <= REPLICATION_OP.getCode()) { + return clientIDTrackerMessageCodec.decode(payload); } else { throw new UnsupportedOperationException("Undefined message code: " + opCode); } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java index bcebc157f0..d1bc2d4708 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java @@ -37,6 +37,7 @@ public enum Type { SERVER_STORE_OP((byte) 20), STATE_REPO_OP((byte) 30), SYNC_OP((byte) 40), + REPLICATION_OP((byte) 40) ; private final byte code; diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java index c5b67baf61..2604fd284c 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java @@ -116,7 +116,7 @@ public String toString() { return getType() + "#" + operation(); } - static abstract class KeyBasedServerStoreOpMessage extends ServerStoreOpMessage { + public static abstract class KeyBasedServerStoreOpMessage extends ServerStoreOpMessage { private final long key; diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessageCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessageCodecTest.java new file mode 100644 index 0000000000..339610af6d --- /dev/null +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessageCodecTest.java @@ -0,0 +1,67 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.internal.messages.ClientIDTrackerMessage.ChainReplicationMessage; +import org.ehcache.clustered.common.internal.store.Chain; +import org.junit.Test; + +import java.util.UUID; + +import static org.ehcache.clustered.common.internal.store.Util.createPayload; +import static org.ehcache.clustered.common.internal.store.Util.getChain; +import static org.ehcache.clustered.common.internal.store.Util.chainsEqual; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertThat; +import static org.junit.Assert.assertTrue; + + +public class ClientIDTrackerMessageCodecTest { + + @Test + public void testClientIDTrackerMessageCodec() { + ClientIDTrackerMessage clientIDTrackerMessage = new ClientIDTrackerMessage(200L, UUID.randomUUID()); + + ClientIDTrackerMessageCodec clientIDTrackerMessageCodec = new ClientIDTrackerMessageCodec(); + + ClientIDTrackerMessage decodedMsg = (ClientIDTrackerMessage)clientIDTrackerMessageCodec.decode(clientIDTrackerMessageCodec + .encode(clientIDTrackerMessage)); + + assertThat(decodedMsg.getClientId(), is(clientIDTrackerMessage.getClientId())); + assertThat(decodedMsg.getId(), is(clientIDTrackerMessage.getId())); + + } + + @Test + public void testChainReplicationMessageCodec() { + Chain chain = getChain(false, createPayload(2L), createPayload(20L)); + ChainReplicationMessage chainReplicationMessage = new ChainReplicationMessage("test", 2L, chain, 200L, UUID.randomUUID()); + + ClientIDTrackerMessageCodec clientIDTrackerMessageCodec = new ClientIDTrackerMessageCodec(); + + ChainReplicationMessage decodedMsg = (ChainReplicationMessage)clientIDTrackerMessageCodec.decode(clientIDTrackerMessageCodec + .encode(chainReplicationMessage)); + + assertThat(decodedMsg.getCacheId(), is(chainReplicationMessage.getCacheId())); + assertThat(decodedMsg.getClientId(), is(chainReplicationMessage.getClientId())); + assertThat(decodedMsg.getId(), is(chainReplicationMessage.getId())); + assertThat(decodedMsg.getKey(), is(chainReplicationMessage.getKey())); + assertTrue(chainsEqual(decodedMsg.getChain(), chainReplicationMessage.getChain())); + + } + +} diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java index 24736ee310..a64e8bb06f 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java @@ -36,25 +36,37 @@ public void encodeMessage() throws Exception { ServerStoreOpCodec serverStoreOpCodec = mock(ServerStoreOpCodec.class); LifeCycleMessageCodec lifeCycleMessageCodec = mock(LifeCycleMessageCodec.class); StateRepositoryOpCodec stateRepositoryOpCodec = mock(StateRepositoryOpCodec.class); - EhcacheCodec codec = new EhcacheCodec(serverStoreOpCodec, lifeCycleMessageCodec, stateRepositoryOpCodec, null); + ClientIDTrackerMessageCodec clientIDTrackerMessageCodec = mock(ClientIDTrackerMessageCodec.class); + EhcacheCodec codec = new EhcacheCodec(serverStoreOpCodec, lifeCycleMessageCodec, stateRepositoryOpCodec, null, clientIDTrackerMessageCodec); LifecycleMessage.DestroyServerStore lifecycleMessage = new LifecycleMessage.DestroyServerStore("foo", CLIENT_ID); codec.encodeMessage(lifecycleMessage); verify(lifeCycleMessageCodec, only()).encode(any(LifecycleMessage.class)); verify(serverStoreOpCodec, never()).encode(any(ServerStoreOpMessage.class)); verify(stateRepositoryOpCodec, never()).encode(any(StateRepositoryOpMessage.class)); + verify(clientIDTrackerMessageCodec, never()).encode(any(ClientIDTrackerMessage.class)); ServerStoreOpMessage.ClearMessage serverStoreOpMessage = new ServerStoreOpMessage.ClearMessage("foo", CLIENT_ID); codec.encodeMessage(serverStoreOpMessage); verify(lifeCycleMessageCodec, only()).encode(any(LifecycleMessage.class)); verify(serverStoreOpCodec, only()).encode(any(ServerStoreOpMessage.class)); verify(stateRepositoryOpCodec, never()).encode(any(StateRepositoryOpMessage.class)); + verify(clientIDTrackerMessageCodec, never()).encode(any(ClientIDTrackerMessage.class)); StateRepositoryOpMessage.EntrySetMessage stateRepositoryOpMessage = new StateRepositoryOpMessage.EntrySetMessage("foo", "bar", CLIENT_ID); codec.encodeMessage(stateRepositoryOpMessage); verify(lifeCycleMessageCodec, only()).encode(any(LifecycleMessage.class)); verify(serverStoreOpCodec, only()).encode(any(ServerStoreOpMessage.class)); verify(stateRepositoryOpCodec, only()).encode(any(StateRepositoryOpMessage.class)); + verify(clientIDTrackerMessageCodec, never()).encode(any(ClientIDTrackerMessage.class)); + + ClientIDTrackerMessage clientIDTrackerMessage = new ClientIDTrackerMessage(20L, CLIENT_ID); + codec.encodeMessage(clientIDTrackerMessage); + verify(lifeCycleMessageCodec, only()).encode(any(LifecycleMessage.class)); + verify(serverStoreOpCodec, only()).encode(any(ServerStoreOpMessage.class)); + verify(stateRepositoryOpCodec, only()).encode(any(StateRepositoryOpMessage.class)); + verify(clientIDTrackerMessageCodec, only()).encode(any(ClientIDTrackerMessage.class)); + } @Test @@ -62,7 +74,8 @@ public void decodeMessage() throws Exception { ServerStoreOpCodec serverStoreOpCodec = mock(ServerStoreOpCodec.class); LifeCycleMessageCodec lifeCycleMessageCodec = mock(LifeCycleMessageCodec.class); StateRepositoryOpCodec stateRepositoryOpCodec = mock(StateRepositoryOpCodec.class); - EhcacheCodec codec = new EhcacheCodec(serverStoreOpCodec, lifeCycleMessageCodec, stateRepositoryOpCodec, null); + ClientIDTrackerMessageCodec clientIDTrackerMessageCodec = mock(ClientIDTrackerMessageCodec.class); + EhcacheCodec codec = new EhcacheCodec(serverStoreOpCodec, lifeCycleMessageCodec, stateRepositoryOpCodec, null, clientIDTrackerMessageCodec); byte[] payload = new byte[1]; @@ -73,6 +86,7 @@ public void decodeMessage() throws Exception { verify(lifeCycleMessageCodec, times(10)).decode(payload); verify(serverStoreOpCodec, never()).decode(payload); verify(stateRepositoryOpCodec, never()).decode(payload); + verify(clientIDTrackerMessageCodec, never()).decode(payload); for (byte i = 11; i <= EhcacheEntityMessage.Type.SERVER_STORE_OP.getCode(); i++) { payload[0] = i; @@ -81,6 +95,7 @@ public void decodeMessage() throws Exception { verify(lifeCycleMessageCodec, times(10)).decode(payload); verify(serverStoreOpCodec, times(10)).decode(payload); verify(stateRepositoryOpCodec, never()).decode(payload); + verify(clientIDTrackerMessageCodec, never()).decode(payload); for (byte i = 21; i <= EhcacheEntityMessage.Type.STATE_REPO_OP.getCode(); i++) { payload[0] = i; @@ -89,6 +104,16 @@ public void decodeMessage() throws Exception { verify(lifeCycleMessageCodec, times(10)).decode(payload); verify(serverStoreOpCodec, times(10)).decode(payload); verify(stateRepositoryOpCodec, times(10)).decode(payload); + verify(clientIDTrackerMessageCodec, never()).decode(payload); + + for (byte i = 31; i <= EhcacheEntityMessage.Type.REPLICATION_OP.getCode(); i++) { + payload[0] = i; + codec.decodeMessage(payload); + } + verify(lifeCycleMessageCodec, times(10)).decode(payload); + verify(serverStoreOpCodec, times(10)).decode(payload); + verify(stateRepositoryOpCodec, times(10)).decode(payload); + verify(clientIDTrackerMessageCodec, times(10)).decode(payload); } } \ No newline at end of file diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/lock/VoltronReadWriteLockPassiveIntegrationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/lock/VoltronReadWriteLockPassiveIntegrationTest.java index 322494082b..836507bc06 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/lock/VoltronReadWriteLockPassiveIntegrationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/lock/VoltronReadWriteLockPassiveIntegrationTest.java @@ -33,7 +33,6 @@ import static org.ehcache.clustered.lock.VoltronReadWriteLockIntegrationTest.async; import static org.junit.Assert.fail; -@Ignore public class VoltronReadWriteLockPassiveIntegrationTest { @ClassRule diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheCRUDReplicationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheCRUDReplicationTest.java new file mode 100644 index 0000000000..93281289c4 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheCRUDReplicationTest.java @@ -0,0 +1,183 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.replication; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.ClusteredStoreConfiguration; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.terracotta.testing.rules.BasicExternalCluster; +import org.terracotta.testing.rules.Cluster; + +import java.io.File; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.assertThat; + +public class BasicClusteredCacheCRUDReplicationTest { + + private static final String RESOURCE_CONFIG = + "" + + "" + + "16" + + "" + + "\n"; + + @ClassRule + public static Cluster CLUSTER = + new BasicExternalCluster(new File("build/cluster"), 2, Collections.emptyList(), "", RESOURCE_CONFIG, ""); + + @Before + public void startServers() throws Exception { + CLUSTER.getClusterControl().waitForActive(); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + } + + @After + public void tearDown() throws Exception { + CLUSTER.getClusterControl().terminateActive(); + CLUSTER.getClusterControl().startAllServers(); + } + + @Test + public void testCRUD() throws Exception { + final CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/crud-cm-replication")) + .autoCreate() + .defaultServerResource("primary-server-resource")); + final PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true); + + try { + CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(100, EntryUnit.ENTRIES) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB))).build(); + + Cache cache = cacheManager.createCache("clustered-cache", config); + cache.put(1L, "The one"); + cache.put(2L, "The two"); + cache.put(1L, "Another one"); + cache.put(3L, "The three"); + cache.put(4L, "The four"); + assertThat(cache.get(1L), equalTo("Another one")); + assertThat(cache.get(2L), equalTo("The two")); + assertThat(cache.get(3L), equalTo("The three")); + cache.remove(4L); + + CLUSTER.getClusterControl().terminateActive(); + + assertThat(cache.get(1L), equalTo("Another one")); + assertThat(cache.get(2L), equalTo("The two")); + assertThat(cache.get(3L), equalTo("The three")); + assertThat(cache.get(4L), nullValue()); + + } finally { + cacheManager.close(); + } + } + + @Test + public void testBulkOps() throws Exception { + final CacheManagerBuilder clusteredCacheManagerBuilder = + newCacheManagerBuilder() + .with(cluster(CLUSTER.getConnectionURI().resolve("/bulk-cm-replication")).autoCreate()) + .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(100, EntryUnit.ENTRIES) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB))) + .add(new ClusteredStoreConfiguration(Consistency.STRONG))); + + final PersistentCacheManager cacheManager1 = clusteredCacheManagerBuilder.build(true); + try { + final Cache cache1 = cacheManager1.getCache("clustered-cache", Long.class, String.class); + + Map entriesMap = new HashMap(); + entriesMap.put(1L, "one"); + entriesMap.put(2L, "two"); + entriesMap.put(3L, "three"); + entriesMap.put(4L, "four"); + entriesMap.put(5L, "five"); + entriesMap.put(6L, "six"); + cache1.putAll(entriesMap); + + CLUSTER.getClusterControl().terminateActive(); + + Set keySet = entriesMap.keySet(); + Map all = cache1.getAll(keySet); + assertThat(all.get(1L), is("one")); + assertThat(all.get(2L), is("two")); + assertThat(all.get(3L), is("three")); + assertThat(all.get(4L), is("four")); + assertThat(all.get(5L), is("five")); + assertThat(all.get(6L), is("six")); + + } finally { + cacheManager1.close(); + } + } + + @Test + public void testCAS() throws Exception { + final CacheManagerBuilder clusteredCacheManagerBuilder = + newCacheManagerBuilder() + .with(cluster(CLUSTER.getConnectionURI().resolve("/cas-cm-replication")).autoCreate()) + .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(100, EntryUnit.ENTRIES) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB))) + .add(new ClusteredStoreConfiguration(Consistency.STRONG))); + + final PersistentCacheManager cacheManager1 = clusteredCacheManagerBuilder.build(true); + try { + final Cache cache1 = cacheManager1.getCache("clustered-cache", Long.class, String.class); + + assertThat(cache1.putIfAbsent(1L, "one"), nullValue()); + assertThat(cache1.putIfAbsent(2L, "two"), nullValue()); + assertThat(cache1.putIfAbsent(3L, "three"), nullValue()); + assertThat(cache1.replace(3L, "another one", "yet another one"), is(false)); + + CLUSTER.getClusterControl().terminateActive(); + + assertThat(cache1.putIfAbsent(1L, "another one"), is("one")); + assertThat(cache1.remove(2L, "not two"), is(false)); + assertThat(cache1.replace(3L, "three", "another three"), is(true)); + assertThat(cache1.replace(2L, "new two"), is("two")); + } finally { + cacheManager1.close(); + } + } + +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicLifeCyclePassiveReplicationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java similarity index 85% rename from clustered/integration-test/src/test/java/org/ehcache/clustered/BasicLifeCyclePassiveReplicationTest.java rename to clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java index 04cce2b581..27bcb4a0f2 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicLifeCyclePassiveReplicationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.ehcache.clustered; +package org.ehcache.clustered.replication; import org.ehcache.clustered.client.config.ClusteredResourcePool; import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; @@ -24,12 +24,14 @@ import org.ehcache.clustered.client.internal.service.ClusteredTierCreationException; import org.ehcache.clustered.client.internal.service.ClusteredTierDestructionException; import org.ehcache.clustered.client.internal.service.ClusteredTierManagerConfigurationException; +import org.ehcache.clustered.client.internal.service.ClusteredTierManagerValidationException; import org.ehcache.clustered.client.internal.service.ClusteringServiceFactory; import org.ehcache.clustered.client.service.ClusteringService; import org.ehcache.clustered.common.Consistency; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.exceptions.InvalidStoreException; import org.ehcache.clustered.common.internal.exceptions.InvalidStoreManagerException; +import org.ehcache.clustered.common.internal.exceptions.LifecycleException; import org.ehcache.impl.serialization.CompactJavaSerializer; import org.junit.After; import org.junit.Before; @@ -43,6 +45,7 @@ import java.util.Collections; import static org.ehcache.config.units.MemoryUnit.MB; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; @@ -162,6 +165,32 @@ public void testConfigureReplication() throws Exception { service.stop(); } + @Test + public void testValidateReplication() throws Exception { + ClusteringServiceConfiguration configuration = + ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI()) + .autoCreate() + .build(); + + ClusteringService service = new ClusteringServiceFactory().create(configuration); + + service.start(null); + + EhcacheClientEntity clientEntity = getEntity(service); + + CLUSTER.getClusterControl().terminateActive(); + + try { + clientEntity.validate(configuration.getServerConfiguration()); + fail("LifecycleException Expected."); + } catch (ClusteredTierManagerValidationException e) { + assertThat(e.getCause(), instanceOf(LifecycleException.class)); + assertThat(e.getCause().getMessage(), containsString("is already being tracked with Client Id")); + } + + service.stop(); + } + private static EhcacheClientEntity getEntity(ClusteringService clusteringService) throws NoSuchFieldException, IllegalAccessException { Field entity = clusteringService.getClass().getDeclaredField("entity"); diff --git a/clustered/integration-test/src/test/resources/clusteredConfiguration.txt b/clustered/integration-test/src/test/resources/clusteredConfiguration.txt index b04ba4816d..40899b9409 100644 --- a/clustered/integration-test/src/test/resources/clusteredConfiguration.txt +++ b/clustered/integration-test/src/test/resources/clusteredConfiguration.txt @@ -19,6 +19,6 @@ caches: services: - org.ehcache.clustered.client.config.ClusteringServiceConfiguration: clusterUri: terracotta://server-1:9510/my-server-entity-1 - readOperationTimeout: TimeoutDuration{5 SECONDS} + readOperationTimeout: TimeoutDuration{20 SECONDS} autoCreate: true - org.ehcache.management.registry.DefaultManagementRegistryConfiguration diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 3732e6e358..7036d8a319 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -47,8 +47,12 @@ import org.ehcache.clustered.common.internal.messages.LifecycleMessage; import org.ehcache.clustered.common.internal.messages.ReconnectData; import org.ehcache.clustered.common.internal.messages.ReconnectDataCodec; +import org.ehcache.clustered.common.internal.messages.ClientIDTrackerMessage; +import org.ehcache.clustered.common.internal.messages.ClientIDTrackerMessage.ChainReplicationMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.KeyBasedServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; +import org.ehcache.clustered.common.internal.store.Chain; import org.ehcache.clustered.common.internal.store.ServerStore; import org.ehcache.clustered.server.messages.EntityStateSyncMessage; import org.ehcache.clustered.server.state.EhcacheStateService; @@ -57,8 +61,10 @@ import org.slf4j.LoggerFactory; import org.terracotta.entity.ActiveServerEntity; +import org.terracotta.entity.BasicServiceConfiguration; import org.terracotta.entity.ClientCommunicator; import org.terracotta.entity.ClientDescriptor; +import org.terracotta.entity.IEntityMessenger; import org.terracotta.entity.MessageCodecException; import org.terracotta.entity.PassiveSynchronizationChannel; import org.terracotta.entity.ServiceConfiguration; @@ -106,6 +112,7 @@ class EhcacheActiveEntity implements ActiveServerEntity getServiceType() { if (ehcacheStateService == null) { throw new AssertionError("Server failed to retrieve EhcacheStateService."); } + entityMessenger = services.getService(new BasicServiceConfiguration<>(IEntityMessenger.class)); + if (entityMessenger == null) { + throw new AssertionError("Server failed to retrieve IEntityMessenger service."); + } } /** @@ -250,6 +261,8 @@ public EhcacheEntityResponse invoke(ClientDescriptor clientDescriptor, EhcacheEn return invokeServerStoreOperation(clientDescriptor, (ServerStoreOpMessage) message); case STATE_REPO_OP: return invokeStateRepositoryOperation(clientDescriptor, (StateRepositoryOpMessage) message); + case REPLICATION_OP: + return responseFactory.success(); default: throw new IllegalMessageException("Unknown message : " + message); } @@ -385,13 +398,16 @@ private EhcacheEntityResponse invokeServerStoreOperation(ClientDescriptor client } case APPEND: { ServerStoreOpMessage.AppendMessage appendMessage = (ServerStoreOpMessage.AppendMessage)message; - cacheStore.append(appendMessage.getKey(), appendMessage.getPayload()); + cacheStore.getAndAppend(appendMessage.getKey(), appendMessage.getPayload()); + sendMessageToSelfAndDeferRetirement(appendMessage, cacheStore.get(appendMessage.getKey())); invalidateHashForClient(clientDescriptor, appendMessage.getCacheId(), appendMessage.getKey()); return responseFactory.success(); } case GET_AND_APPEND: { ServerStoreOpMessage.GetAndAppendMessage getAndAppendMessage = (ServerStoreOpMessage.GetAndAppendMessage)message; - EhcacheEntityResponse response = responseFactory.response(cacheStore.getAndAppend(getAndAppendMessage.getKey(), getAndAppendMessage.getPayload())); + Chain result = cacheStore.getAndAppend(getAndAppendMessage.getKey(), getAndAppendMessage.getPayload()); + sendMessageToSelfAndDeferRetirement(getAndAppendMessage, cacheStore.get(getAndAppendMessage.getKey())); + EhcacheEntityResponse response = responseFactory.response(result); invalidateHashForClient(clientDescriptor, getAndAppendMessage.getCacheId(), getAndAppendMessage.getKey()); return response; } @@ -419,6 +435,14 @@ private EhcacheEntityResponse invokeServerStoreOperation(ClientDescriptor client } } + private void sendMessageToSelfAndDeferRetirement(KeyBasedServerStoreOpMessage message, Chain result) { + try { + entityMessenger.messageSelfAndDeferRetirement(message, new ChainReplicationMessage(message.getCacheId(), message.getKey(), result, message.getId(), message.getClientId())); + } catch (MessageCodecException e) { + LOGGER.error("Codec Exception", e); + } + } + private EhcacheEntityResponse invokeStateRepositoryOperation(ClientDescriptor clientDescriptor, StateRepositoryOpMessage message) throws ClusterException { validateClusteredTierManagerConfigured(clientDescriptor); return ehcacheStateService.getStateRepositoryManager().invoke(message); @@ -576,7 +600,14 @@ private void configure(ClientDescriptor clientDescriptor, ConfigureStoreManager private void validate(ClientDescriptor clientDescriptor, ValidateStoreManager message) throws ClusterException { validateClientConnected(clientDescriptor); if (trackedClients.contains(message.getClientId())) { - throw new InvalidClientIdException("Client ID : " + message.getClientId() + " is already being tracked by Active"); + throw new InvalidClientIdException("Client ID : " + message.getClientId() + " is already being tracked by Active paired with Client : " + clientDescriptor); + } else if (clientIdMap.get(clientDescriptor) != null) { + throw new LifecycleException("Client : " + clientDescriptor + " is already being tracked with Client Id : " + clientIdMap.get(clientDescriptor)); + } + try { + entityMessenger.messageSelfAndDeferRetirement(message, new ClientIDTrackerMessage(message.getId(), message.getClientId())); + } catch (MessageCodecException e) { + LOGGER.error("Codec Exception", e); } addClientId(clientDescriptor, message.getClientId()); ehcacheStateService.validate(message.getConfiguration()); @@ -584,6 +615,7 @@ private void validate(ClientDescriptor clientDescriptor, ValidateStoreManager me } private void addClientId(ClientDescriptor clientDescriptor, UUID clientId) { + LOGGER.info("Adding Client {} with client ID : {} ", clientDescriptor, clientId); clientIdMap.put(clientDescriptor, clientId); trackedClients.add(clientId); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index a697760a46..737e84a7c6 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -29,6 +29,8 @@ import org.ehcache.clustered.common.internal.messages.LifecycleMessage.ConfigureStoreManager; import org.ehcache.clustered.common.internal.messages.LifecycleMessage.CreateServerStore; import org.ehcache.clustered.common.internal.messages.LifecycleMessage.DestroyServerStore; +import org.ehcache.clustered.common.internal.messages.ClientIDTrackerMessage; +import org.ehcache.clustered.common.internal.messages.ClientIDTrackerMessage.ChainReplicationMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; import org.ehcache.clustered.server.messages.EntityStateSyncMessage; @@ -77,6 +79,9 @@ public void invoke(EhcacheEntityMessage message) { case SYNC_OP: invokeSyncOperation((EntityStateSyncMessage) message); break; + case REPLICATION_OP: + invokeRetirementMessages((ClientIDTrackerMessage)message); + break; default: throw new IllegalMessageException("Unknown message : " + message); } @@ -100,6 +105,27 @@ public void invoke(EhcacheEntityMessage message) { } } + private void invokeRetirementMessages(ClientIDTrackerMessage message) throws ClusterException { + + switch (message.operation()) { + case CHAIN_REPLICATION_OP: + ChainReplicationMessage retirementMessage = (ChainReplicationMessage)message; + ServerStoreImpl cacheStore = ehcacheStateService.getStore(retirementMessage.getCacheId()); + if (cacheStore == null) { + // An operation on a non-existent store should never get out of the client + throw new LifecycleException("Clustered tier does not exist : '" + retirementMessage.getCacheId() + "'"); + } + cacheStore.put(retirementMessage.getKey(), retirementMessage.getChain()); + ehcacheStateService.getClientMessageTracker().applied(message.getId(), message.getClientId()); + break; + case CLIENTID_TRACK_OP: + ehcacheStateService.getClientMessageTracker().add(message.getClientId()); + break; + default: + throw new IllegalMessageException("Unknown Retirement Message : " + message); + } + } + private void invokeServerStoreOperation(ServerStoreOpMessage message) throws ClusterException { ServerStoreImpl cacheStore = ehcacheStateService.getStore(message.getCacheId()); if (cacheStore == null) { @@ -108,15 +134,9 @@ private void invokeServerStoreOperation(ServerStoreOpMessage message) throws Clu } switch (message.operation()) { - //TODO: check if append and getandappend can be combined - case APPEND: { - ServerStoreOpMessage.AppendMessage appendMessage = (ServerStoreOpMessage.AppendMessage)message; - cacheStore.append(appendMessage.getKey(), appendMessage.getPayload()); - break; - } + case APPEND: case GET_AND_APPEND: { - ServerStoreOpMessage.GetAndAppendMessage getAndAppendMessage = (ServerStoreOpMessage.GetAndAppendMessage)message; - cacheStore.getAndAppend(getAndAppendMessage.getKey(), getAndAppendMessage.getPayload()); + ehcacheStateService.getClientMessageTracker().track(message.getId(), message.getClientId()); break; } case REPLACE: { @@ -139,7 +159,6 @@ private void invokeSyncOperation(EntityStateSyncMessage message) throws ClusterE ehcacheStateService.createStore(entry.getKey(), entry.getValue()); } message.getTrackedClients().stream().forEach(id -> ehcacheStateService.getClientMessageTracker().add(id)); - ehcacheStateService.getClientMessageTracker().setEntityConfiguredStamp(message.getClientId(), message.getId()); } private void invokeLifeCycleOperation(LifecycleMessage message) throws ClusterException { @@ -147,6 +166,9 @@ private void invokeLifeCycleOperation(LifecycleMessage message) throws ClusterEx case CONFIGURE: configure((ConfigureStoreManager) message); break; + case VALIDATE: + trackAndApplyMessage(message); + break; case CREATE_SERVER_STORE: createServerStore((CreateServerStore) message); break; @@ -166,7 +188,7 @@ private void configure(ConfigureStoreManager message) throws ClusterException { private void trackAndApplyMessage(LifecycleMessage message) { ClientMessageTracker clientMessageTracker = ehcacheStateService.getClientMessageTracker(); if (!clientMessageTracker.isAdded(message.getClientId())) { - clientMessageTracker.add(message.getClientId()); + throw new IllegalStateException("Untracked client id " + message.getClientId()); } clientMessageTracker.track(message.getId(), message.getClientId()); clientMessageTracker.applied(message.getId(), message.getClientId()); diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index 0b8da0e668..739ff2365d 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -41,6 +41,7 @@ import org.junit.Test; import org.terracotta.entity.ClientCommunicator; import org.terracotta.entity.ClientDescriptor; +import org.terracotta.entity.IEntityMessenger; import org.terracotta.entity.ServiceConfiguration; import org.terracotta.entity.ServiceRegistry; import org.terracotta.offheapresource.OffHeapResource; @@ -2789,6 +2790,8 @@ public Set getAllIdentifiers() { this.storeManagerService = new EhcacheStateServiceImpl(this, getIdentifiers(pools.keySet())); } return (T) (this.storeManagerService); + } else if (serviceConfiguration.getServiceType().equals(IEntityMessenger.class)) { + return (T) mock(IEntityMessenger.class); } throw new UnsupportedOperationException("Registry.getService does not support " + serviceConfiguration.getClass().getName()); diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java index 4de2c86a54..8637564321 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java @@ -22,10 +22,12 @@ import org.ehcache.clustered.common.internal.ClusteredEhcacheIdentity; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; +import org.ehcache.clustered.common.internal.messages.ClientIDTrackerMessage; import org.ehcache.clustered.server.state.EhcacheStateService; import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Test; +import org.terracotta.entity.IEntityMessenger; import org.terracotta.entity.ServiceConfiguration; import org.terracotta.entity.ServiceRegistry; import org.terracotta.offheapresource.OffHeapResource; @@ -47,6 +49,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; +import static org.mockito.Mockito.mock; public class EhcachePassiveEntityTest { @@ -241,7 +244,7 @@ public void testCreateDedicatedServerStore() throws Exception { .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build())); - + passiveEntity.invoke(new ClientIDTrackerMessage(0L, CLIENT_ID)); passiveEntity.invoke(MESSAGE_FACTORY.createServerStore("cacheAlias", new ServerStoreConfigBuilder() .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) @@ -278,6 +281,7 @@ public void testCreateSharedServerStore() throws Exception { .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build())); + passiveEntity.invoke(new ClientIDTrackerMessage(0L, CLIENT_ID)); passiveEntity.invoke( MESSAGE_FACTORY.createServerStore("cacheAlias", @@ -312,6 +316,7 @@ public void testDestroyServerStore() throws Exception { .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build())); + passiveEntity.invoke(new ClientIDTrackerMessage(0L, CLIENT_ID)); passiveEntity.invoke( MESSAGE_FACTORY.createServerStore("dedicatedCache", @@ -372,6 +377,7 @@ public void testSharedPoolCacheNameCollision() throws Exception { .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build())); + passiveEntity.invoke(new ClientIDTrackerMessage(0L, CLIENT_ID)); assertThat(registry.getStoreManagerService().getStores(), is(Matchers.empty())); passiveEntity.invoke( @@ -436,11 +442,11 @@ public void testDestroyWithStores() throws Exception { final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID); - passiveEntity.invoke( - MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() - .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) - .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) - .build())); + passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) + .build())); + passiveEntity.invoke(new ClientIDTrackerMessage(0L, CLIENT_ID)); passiveEntity.invoke( MESSAGE_FACTORY.createServerStore("dedicatedCache", @@ -649,6 +655,8 @@ public Set getAllIdentifiers() { this.storeManagerService = new EhcacheStateServiceImpl(this, getIdentifiers(pools.keySet())); } return (T) (this.storeManagerService); + } else if (serviceConfiguration.getServiceType().equals(IEntityMessenger.class)) { + return (T) mock(IEntityMessenger.class); } throw new UnsupportedOperationException("Registry.getService does not support " + serviceConfiguration.getClass().getName()); From 0333dea52b0ee81f72a7b9749a15ba73aa06587e Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Thu, 6 Oct 2016 14:16:35 +0200 Subject: [PATCH 054/218] Upgrade to gradle 3.1 Require Java 8 for the build Enforce most modules compile and test with Java 1.6 Simplify setup in modules requiring Java 8 Generalise javadoc flag since always run with Java 8 --- 107/build.gradle | 9 +-- build.gradle | 56 ++++++++++++------ .../src/main/groovy/MavenToolchain.groovy | 50 ---------------- clustered/client/build.gradle | 10 ++++ clustered/integration-test/build.gradle | 13 ++-- clustered/server/build.gradle | 19 +++++- gradle/wrapper/gradle-wrapper.jar | Bin 53556 -> 52818 bytes gradle/wrapper/gradle-wrapper.properties | 4 +- gradlew | 5 ++ gradlew.bat | 6 -- management/build.gradle | 8 +-- 11 files changed, 84 insertions(+), 96 deletions(-) delete mode 100644 buildSrc/src/main/groovy/MavenToolchain.groovy diff --git a/107/build.gradle b/107/build.gradle index aae8a5ad3f..9d51f9d5dd 100644 --- a/107/build.gradle +++ b/107/build.gradle @@ -14,6 +14,8 @@ * limitations under the License. */ +import org.gradle.internal.jvm.Jvm + apply plugin: EhDeploy configurations { @@ -41,12 +43,7 @@ javadoc { exclude '**/tck/**' } -def tckTestOnlyIfJava7 = { - JavaVersion.current().isJava7Compatible() -} - task unpackTckTests(type: Copy) { - onlyIf tckTestOnlyIfJava7 from { configurations.tckTestClasses.collect {zipTree(it)} } @@ -54,7 +51,7 @@ task unpackTckTests(type: Copy) { } task tckTest(type: Test, dependsOn: unpackTckTests) { - onlyIf tckTestOnlyIfJava7 + executable = Jvm.current().javaExecutable testClassesDir = sourceSets.tckTest.output.classesDir classpath += sourceSets.tckTest.runtimeClasspath diff --git a/build.gradle b/build.gradle index b850391e31..2bf9730307 100644 --- a/build.gradle +++ b/build.gradle @@ -36,12 +36,26 @@ ext { entityTestLibVersion = terracottaPassthroughTestingVersion galvanVersion = '1.0.6-beta2' + // Tools + findbugsVersion = '3.0.1' + utils = new Utils(baseVersion, logger) isReleaseVersion = !baseVersion.endsWith('SNAPSHOT') isCloudbees = System.getenv('JENKINS_URL')?.contains('cloudbees') logger.info("Is cloudbees? $isCloudbees") } +// Java 6 build setup +def java6Error = 'Set the poperty \'java6Home\' in your $HOME/.gradle/gradle.properties pointing to a Java 6 installation' +assert (JavaVersion.current().isJava8Compatible()) : 'The Ehcache 3 build requires Java 8 to run and a configured Java 6 installation\n' + java6Error +assert hasProperty('java6Home') : 'The Ehcache 3 build requires a configured Java 6 installation\n' + java6Error +def javaExecutablesPath = new File(java6Home, 'bin') +def javaExecutables = [:].withDefault { execName -> + def executable = new File(javaExecutablesPath, execName) + assert executable.exists() : "There is no ${execName} executable in ${javaExecutablesPath}" + executable +} + subprojects { apply plugin: 'java' apply plugin: 'eclipse' @@ -72,12 +86,11 @@ subprojects { } dependencies { - if (JavaVersion.current().compareTo(JavaVersion.VERSION_1_7) >= 0) { - compileOnly 'com.google.code.findbugs:annotations:3.0.0' - } else { - compileOnly 'com.google.code.findbugs:annotations:2.0.3' + compileOnly "com.google.code.findbugs:annotations:$parent.findbugsVersion" + testCompile 'junit:junit:4.12', 'org.hamcrest:hamcrest-library:1.3' + testCompile('org.mockito:mockito-core:1.9.5') { + exclude group:'org.hamcrest', module:'hamcrest-core' } - testCompile 'junit:junit:4.11', 'org.hamcrest:hamcrest-library:1.3', 'org.mockito:mockito-core:1.9.5' testRuntime "org.slf4j:slf4j-simple:$parent.slf4jVersion" } @@ -113,14 +126,6 @@ subprojects { exclude '**/internal/**' } - if (JavaVersion.current().isJava8Compatible()) { - allprojects { - tasks.withType(Javadoc) { - options.addStringOption('Xdoclint:none', '-quiet') - } - } - } - task javadocJar(type: Jar, dependsOn: javadoc) { from javadoc.destinationDir classifier = 'javadoc' @@ -136,17 +141,13 @@ subprojects { checkstyle { configFile = file("$rootDir/config/checkstyle.xml") configProperties = ['projectDir':projectDir, 'rootDir':rootDir] - toolVersion = '5.7' + toolVersion = '5.9' } findbugs { ignoreFailures = false sourceSets = [sourceSets.main] - if (JavaVersion.current().compareTo(JavaVersion.VERSION_1_7) >= 0) { - findbugs.toolVersion = '3.0.1' - } else { - findbugs.toolVersion = '2.0.3' - } + findbugs.toolVersion = parent.findbugsVersion } jacoco { @@ -159,6 +160,23 @@ subprojects { csv.enabled false } } + + tasks.withType(AbstractCompile) { + options.with { + fork = true + forkOptions.executable = javaExecutables.javac + } + } + tasks.withType(Test) { + executable = javaExecutables.java + } + tasks.withType(JavaExec) { + executable = javaExecutables.java + } + tasks.withType(Javadoc) { + options.addStringOption('Xdoclint:none', '-quiet') + } + } allprojects { diff --git a/buildSrc/src/main/groovy/MavenToolchain.groovy b/buildSrc/src/main/groovy/MavenToolchain.groovy deleted file mode 100644 index af5b930343..0000000000 --- a/buildSrc/src/main/groovy/MavenToolchain.groovy +++ /dev/null @@ -1,50 +0,0 @@ -import org.gradle.api.JavaVersion -import org.gradle.api.resources.MissingResourceException; -import org.gradle.internal.os.OperatingSystem; - -/** - * Emulates maven toolchains support by looking at the user's - * ~/.m2/toolchains.xml - * - * Throws if this file is not found - * - * Provides a closure to use to find the correct jvm's executable, eg: - * MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'javac') - */ -class MavenToolchain { - - static def mavenToolchainDefinitions = { - String userHome = System.getProperty("user.home"); - File toolchain = new File(userHome, ".m2" + File.separator + "toolchains.xml") - if (toolchain.isFile()) { - def xmlSlurper = new XmlSlurper() - return new XmlSlurper().parse(toolchain) - } else { - throw new MissingResourceException("toolchain file not found at ${toolchain}" ); - } - } - - static def toolchains; - static { - def xml = mavenToolchainDefinitions() - if (xml == null) { - toolchains = [:] - } else { - toolchains = xml.toolchain.findAll({ it.type.text() == 'jdk' }).collectEntries{[JavaVersion.toVersion(it.provides.version.text()), it.configuration.jdkHome.text()]} - } - } - - private static def exe = OperatingSystem.current().isWindows() ? '.exe' : '' - - static def javaHome = { v -> - def jdk = toolchains.get(v); - if (jdk == null) { - - throw new MissingResourceException("JDK $v not available - check your toolchains.xml") - } else { - return jdk; - } - } - - static def javaExecutable = { v, exec -> MavenToolchain.javaHome(v) + ['', 'bin', exec].join(File.separator) + exe } -} diff --git a/clustered/client/build.gradle b/clustered/client/build.gradle index 9e1c16d5d3..b3af80a2d4 100644 --- a/clustered/client/build.gradle +++ b/clustered/client/build.gradle @@ -14,6 +14,8 @@ * limitations under the License. */ +import org.gradle.internal.jvm.Jvm + apply plugin: EhDeploy dependencies { @@ -31,3 +33,11 @@ dependencies { testCompile "org.terracotta:entity-test-lib:$parent.entityTestLibVersion" testCompile "org.terracotta:passthrough-server:$parent.terracottaPassthroughTestingVersion" } + +compileTestJava { + options.forkOptions.executable = Jvm.current().javacExecutable +} + +test { + executable = Jvm.current().javaExecutable +} diff --git a/clustered/integration-test/build.gradle b/clustered/integration-test/build.gradle index cf833e2461..185e0b8e8d 100644 --- a/clustered/integration-test/build.gradle +++ b/clustered/integration-test/build.gradle @@ -14,6 +14,8 @@ * limitations under the License. */ +import org.gradle.internal.jvm.Jvm + dependencies { testCompile project(':dist') testCompile project(':clustered:clustered-dist') @@ -29,19 +31,14 @@ task unzipKit(type: Copy) { into 'build/ehcache-kit' } -def java8 = { - JavaVersion.current().isJava8Compatible() -} - compileTestJava { - options.fork = true; - options.forkOptions.executable = MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'javac') + options.forkOptions.executable = Jvm.current().javacExecutable } test { dependsOn unzipKit - executable = MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'java') - environment 'JAVA_HOME', MavenToolchain.javaHome(JavaVersion.VERSION_1_8) + executable = Jvm.current().javaExecutable + // If you want to see all mutations of the voltron monitoring tree, add to JAVA_OPTS: -Dorg.terracotta.management.service.monitoring.VoltronMonitoringService.DEBUG=true environment 'JAVA_OPTS', '-Dcom.tc.l2.lockmanager.greedy.locks.enabled=false' //If this directory does not exist, tests will fail with a cryptic assert failure systemProperty 'kitInstallationPath', "$unzipKit.destinationDir/${project(':clustered:clustered-dist').archivesBaseName}-$project.version-kit" diff --git a/clustered/server/build.gradle b/clustered/server/build.gradle index c7f9df290b..091474a5c5 100644 --- a/clustered/server/build.gradle +++ b/clustered/server/build.gradle @@ -14,8 +14,13 @@ * limitations under the License. */ +import org.gradle.internal.jvm.Jvm + apply plugin: EhDeploy +sourceCompatibility = 1.8 +targetCompatibility = 1.8 + configurations { provided } @@ -30,7 +35,7 @@ dependencies { } sourceSets { - main { + main { compileClasspath += configurations.provided } test { @@ -38,3 +43,15 @@ sourceSets { runtimeClasspath += configurations.provided } } + +compileJava { + options.forkOptions.executable = Jvm.current().javacExecutable +} + +compileTestJava { + options.forkOptions.executable = Jvm.current().javacExecutable +} + +test { + executable = Jvm.current().javaExecutable +} diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index ca78035ef0501d802d4fc55381ef2d5c3ce0ec6e..deedc7fa5e6310eac3148a7dd0b1f069b07364cb 100644 GIT binary patch delta 10610 zcmaKS1ymGW-2T#_Ae|!J-5nCrEh*iNfOM^N=h7+Ny`+S6w{(YufFOu8{8#XO-;eJ* z=RfD{nY+(C&#&grotZlqoC7nN4)a1u76$ei0Dy=H!1q9vj(tIi@@EepL|P*V002~D zU#npGgHIfx0RIt@{t$~vU?27))Q|fM=7&9q9%}8OQHu)g2>l2J!fro8@8NPEp;{b7 z4ERTk;loH*C>3bAJalknHAqh2~YqB*wn&gk@1t*)&y+Jdv zOXG^Cp7%xdtMj_DpF*fEMUF;1Q@Kwwj)v13?(fF-@1gBq+()bnM2N*#^eKIjh=`-@ zLc!0}P>w&8*V3euVeJMQ^ebrP;pSACx)>>~54yYwN~5#uu_x4KwHX5L99Po_5p_`? z58w#};=Z{8hmG~$%;PC+eqi&`>#@bw57`K3r8ri>(#QD5JamlB`b#ag4X0<;^iBe6 zHDW`X)$8Rd`VO4eDA}9X`TPzo4(+)rCWq4=!;PfutE!CWdeg;ioX8fP_Qs!zEIKuDPfuk$lTb$; zEtTe5v5V6JI3i^bxSeXq{N4JE*v<0`(`7ExS2jUQD4?S2LRlBm&Ci2NHD`|VRqP*{ zKeP50aTQxmO;m6B%x)Z7bl9JtR;SvS?$ZLzz_TNwJu`1yosJn>U$lhUuiJQ6V#6_x zfSrm+nhW@QBbdY`PC53&U%3=_v)BOh2Z?pyCe>=o47e`0fd{SrJlZCD<#Rps&9x$- zgjbVdj4faRFSjXs!d3&hRJbk&>|d8MQPq0Y@~-@##cS@fF6+e#RrAF<2e@a(0@Z!r zzs@u(zqknH$$U;V!^I`b9a+pcTSXYX9rLX^HwJ}LcGPe9I{gfxpRsF%VZ%fMKjIdSf3z)m2b;XNXKPJ>U zsu9!P5z!gPNWV{4;WQB&m3|hI9Y1kW)W@ghGAcDInk)}4Yh80xX}2-e2jp$4STGCJ zDjJd6IV8G&mqsLat{%Ihxc*EHDiQb6SaRm#evQJpWPIX3+8eH6$o_I5tv(EB;iikh z5~e$oX#%`6^2<+P$q?`HSfN`cOYYWImdc7WFq^f*+iyWfZ66sL=o;wQwh~zRI^Ek! zurE_z_TB6oZ)F_$#^4}5o{zRG9}VrElAR@KqrZ6?f!%Ylp}P#j=o41>=fqu_L+UN< zq|vhc2e&xTJ~jR{vhh+6CVh9@Vu!A=xd7OfIR2{%U zqv7k<&3(W3de8SfCY3goOmO=&RF>C&NTKNRpV@n$ttXq%`18m}D3Z#Li_LXb;Vxhk zI|+gplgR|juzj-h%$OG3zEjmP#U#G1x&Y{saAM}2AgQ1gWOTX3sRX2Yr9-m~>ICut zQS(BoTEj3o)6*@*H~fizcN}q09us1^u5=}y*nX`>apS>KOg)J!T8G*au(3UXhg&&1 zPERKc&1w!taCDjzWEX!g_?f2_gDhu-;EE5S#Lb?%A-r4W2AO^OKFItM`R!eW_fn(- zp(KLw50jefy-Nn~=WXY#cZeRW!O_R!K!nSbKIrQ7{FV?(=G0ieD|80W-Nh|R)EFUI zTiS*SryCn=nCP<=5uR3%-BJ0*GK$`Y9)}{^+bM85`DShAeE_ijNA`2#Gl8P7njLDD z{g-=a7d6F$g64Uyz5zUX!2^0V#W`-%yZ{-_><}@ zeF^nfp9k-s-g}{>Yrhh!VLsd-k38Nb9l-(s(?|dSk3 z!lL{dH5l*hQ`0{r&9|rUli%OooSOsmIuzMN-L!iRhh;Tm>_vJ>m zt1(8dT8mGtjLSe9(N57(JWEWS(%SINx3goTcUa*#jFV3SQmU~})?3XFr(&-$|IGcE zJ5l#l_pB{-TrD?7YBA9a%S<=Sj0%S%V}$Q#k-D$IuR3%blFv-csc*vaOO>@=7Y?=N zy~FaJ;jpPUrFO1Ul}WLRLT5d&!W&l5(U+dCZgh;zagl=d*!SY6_>p>=l-z2#4U0UD zq{&cOTb5z(4y53nOKv=rXLi3GF&mQ`b{IO<)~dI(E>Mp9q)dB~8Y^3DO8&Wn0b$r4 zOshE3-_x?KDp^;SYhq9ySjp~>*O~M2MdS;|1qO&}%_49Sn`xAhhR07Cj-3SaS}%qV zN=8t!Lw5=EiJqo2#(-|F8{rv48zB@F7(3oUzXQewh|c06;m}~^&>FLFNlPX$9=F^?u0#~Qq8%m znnP~t-Ms_#UMq5_uOfukMpEjONf*4McuT=Mt_pjE{tGubV})tHY;MjWGtdmBP&dU} ztu9%li*y?WVQlI-_#&W){-O>gy!dOhXRT@YOW?_mTNFE8I@GrA1Cz8S7A)`3&k7l% zbsa1?wiz;v0PFD&_u+0FBb=LU?$Im9dmh|&0{KU95ZY-6wvKuCrHbhK^e3wQ*z`PIaY7QVtRaCXxpS;pBB6k zl?T3F+^R(xe>G+sSvhHb$>N=LnS~0S(# zPlaxde-rguOqXIg2nKNQf@*KYm7L)&@J>?ah#MKv?ErQxmp@41 zU2y+36tp86=+`&lP(z(Bi0bGaw$MW%UAgVM5}M(eSyZgwNCu4w97Hj`ypj=1w}~bT z-Tb93L19h3d#@%sVw>4*P2pUU_&n*H@8=3hm{k0fNnjgEUA+2$@yFjoimb^g$cLps zBOu&*ezEz7wznG}-FD3*tw#aJH-lS%Qj?2Rb)|l zvo)5{i5#b$`4JH+l}f5_&nt|?q3FXFmmg6&DT|L2`ni0L^QR<^)lMzwImHBFN$S|( z22k9P0@(#W&(VeS9m0R_Cl4bdbw6`(FCqXCjR^qIJdBL`Jh;G6PfWW$O6kRCgU&= zaA#A4_qpZa`gNpUZoy*oWA1-PuOLJ1IjF&1gjl|Ugv zT|>h6NlzkOmtFz7O9GCNH*}!{C^szy$a9It1~vUop<1+i+0rYi2hNX=^@vvb5LU;! z5R$JuNQAmi;Z`nLDupn*KL`NhgQMYo3XI{V+(K6h5p`3$-BF-sJ=>TP00sq*!5=q~ z2#vve`S!Ha7~{5eQ|WC?C%K;`khBeMxA#NqKVRDyXzvFen8REIMYOtw&VOKI+Dt>f zV#L?5SL+oB6VU;Ud*yNS3VdDUWLx6rVG&~c%AJWvAomLyC0YjYEJJ0i=oLOE|8y)^ zPLSVOsdLhfJgwntnX}Fa76%d|Hb6IHfCyKVl6`{LYq&?}ZHFAuKH+MU`H`Dro_^=| z>XmLcM>l(5Q6a%ZPq829LqY76ob2oghWdN~E`#7yEX^V462-JQ*CXiQeHEp1uD@km zku3haw6{#>j8Iql_hhzdCy9L?>k?kRS%=Qd&w+9ss*KD1li9t}T@-?a;CN?oZcC0~ zEu_!n32UyR2fT&(&zvH@cr8n$GHA~fn%DVu4hNV4UzB3nSQm{+6{l>L6tf@N)T~D7 zh0)Hh1E@*}%lY%no%gE(7}eC>gRS-*kT70ZJ5q{_HN_`>5csV%#Ru1X_(SJi0()J$ zm6hZ#0m?~nUl9p8-OiJKb!`##T0V&&k4xwii4M7h%7WooS@fAd^NR>DZYj`$`C4{e{d5b(jYde-{r4bLDESKo zh4hGY9UTchsa75JlnCm|Ar^)j#gVhpgIKF~skC9j5C!)-ty~yic@&Y8zFt1RZW^klZ7(%efHEnj-H>M`pwA#WkXE~T0Lu90LuMm zlYThYNSP4Tfkl(gVs`YdoN&?%F;VDyus~Tc#M9HtS zxdGkcB9PwC$FE{2yc9P>p)bqGot&58eA`Qqz7~Y_#Tjn3=Uj9By!dd*^o+(0W~FG2 zMz3-W&jVYadppWVt^@U0Y!c*>V3m|Qi*Fd`Or z;jvb9a%yix$R1bJ*GjC2+RApbwbiwTXFh$UtNwBCwVHFh&F0poo|!pZskia8#;r)Z zUN!O&m0tM!32!Ju-Dq{_^AJzsJzuMQb{Eu+aEuCf-`!5(^a9T?c7MILne&y8W?CKa zNHA+kk6X$iM7%%&vV zEzLLZ3po@bN9z$*?hOFfDe&8w&uJX)oGdEs4EncrWg)6US`5>gXU+Da%H9$JYMk4x z0i~ooXE;L(pRG$wWN8)3Zfv#KG6<%CAL{vdkEm>-P=46kka_G>@Cn!wm|tSP5zo#v zh%2^?e0iR3xgyC@9x2NG-S5#~0-y>x$3bd1yEcJO@0gv@X zh3&sjM0B}*ESg1E^FI`KZQh+0cWv6O6Llq@nk;9F)yqUvI+TEHmLuxl>R1hS8 zsxrA8>=Q>wQu5Ln44sNg63O(*jDw(rv9keiZ-MUo1~S34L=suQW=3J5dLZTo`Usi2 zK{^(Jg=HqnkUWMf(|UbtA@#e}4@j9?H?hS@^M;{ybTCJck3dP*IG)AG;>~ASjU@+| z8>WTkJTZ!$+>9m*bd-RGh~q^4o-m6WI?;(aHICl?knl#daQ&{qUF;34365wrBm8$G zQvx$s69UmVp6|=%zK{Cd7i5nbe?Lye{WaC&Hb!{%kw>`wL$B^wh0)J2(^Bz zzIcu2xvD4r>&mo7IAZ5mfGlcNS5slr<;0Bq_G-~LVNQBt`HhO_F5NlCj!^RtZ#VYe z#_$OCL+Oktk z1gVQOud9W^-rZ0Y!sd5jbBqFWGhFJG-wo#)GpV#s<4Vk3 zjP`WEcu%_go^nv~)W4gUc~2osFyu>+=;BWGCe@aOg(o_{B7?zfk>(F3BSq)bU!X}S zj<`X`)H2ujM74n%(~TmJ^wH{peVi)Sc%9ZtVVtZ!p{rFO(g|Sf(1{<&89#wyLDoTa zl3$!xA7}-DRacy_CE+9uGr$}v-+G>ghGSMvZ~C1N-ST^hQx+W+;wz27#wnZJN)UVK0^B5ZiCKq`g{U`mef@EYzt#a_ziW$0&CXM zoKtGV%{tge!L*6^WuIVyCf&=wi&gvrKW)a3V~`C*Nco}-nC+FxnZ3lq(x(NaU;I+r z;<4|SZa3hdx-1$j@$PS$q|=Edn!KkqhGF&%qr^tSn^FQ_EqfMb zv~RdQB1kd`Rhu6x=Uri(N7|Y$P%bt<>@(#E5#)MhfCjI)&~WUdY`n7a<9q^)e$u!f zDlh2nc&kR^FyzEgR`+O+Vj?}+;6tHC`J?8tyQ2!vFGKn1l)TS z<-?DwWk%F1WbSQhOz%NMi)u^Jt~741JNR$il3-i4d!DsOc6h3l^kwo`pOA#D;k=!++gE;`;+ZUV}6@L~g_H@oPB{Gv-P@PsNh zFx^BusIk3ddbk9NH>NHkh_eznvQ9TopS6()x6`0z#RcofMP#0A4#iDq++39SRZ46B zNF18IZ4|rn>56_|a)HxUzQ*t4rGWud>^c7l({!WM6MbxqE!?4qeIwTc@L^J_Ev>9; zE!{je07|Z|FSi1hUj=)fqd5KkjR@NuwMYHSUQRJ0=(^m5gLQ#SX~v97B{8!q=$GaK z8X|T~gDW!F#l;zHeyQ3-bB(~y4_`C>t@aa~c+X;PXi=S*fuZhg%*Vy0MxD2aEweUx*%5`b*|$PwD*iA2W3^;kX%%dBu3}W2enMaM$M23SxzuojuOf=@SMVe81sI7HQw6{|5;CH& z@p+c-4wW}>qW1^lJEA$8JS2=J?9=e0_e0Sw4}@L@N63(wZNTD7`^lanT`3kakhd|@ z(*=txa^SlIX|@}&O>YFm7aS!VcSH12290HLd4K1ZTQCT~B)Ralz!x9bu`BS08*lFx zss1Lbq;Z-L&MUPieN`ACwZInIKJ)zkiw(ox@2I!v#tZ3m-X;{Q2+*b9%_NO~E5NHe zck>Wb4vSTeMSmmHNYlYjaE7z+YfWO+d?@5rJ1uyMgDSW8k3PFxu_5;P{1NIw~)>I#!6> zNW8C{A$3GZV`#(~R{;clTA7MFp~IW~d(n*N+*GSoTJyqmuImzMXQ^!8d1fuSg`z_ENaw zQ_$-AIlyO%|_V{Va}W9 z_c)r{>um~JNye;tJkeb)rzT`(-^t6MMhB!(#(+JCli~L%hxL|W-p!mdsWSKu>RWI}Jixbm`IA*hLQfb5E4n^Nwa41uRab~mRbSh4Z zBiWhbHzlqsRAAGLP@|sekPUJuGtprw7l*zf`xHVauNvMHrkN8;P#v$3 zwh3&TTxGwUOj6LCXp9w%w}Qp4mczuA_V$*vETBpZ$A;!#_$A&67A1#YMmHx5AY&Cw zASg5!lN>SWSL5y;MdwT9*vv*uTS6B!4X~Qcw@Ne1w>*k9mgt`BmZ_GJLAEri!@@;g z_|8mZveq zO;fU33K5T%h>TU@a3H#q4=b7y7pyRL|4qXjp_7(EwQPebf8CuhOA%?SB%Vvf6%yGJ zaWM9q{p8wgSyo<}rMSt+rR&XnP6$NZ zHkjbomE56`ZDl&0WEGrFC6w=r3~(l$uJQUNXV1Zxs&HadPEYeWu(v-^!$k4bXJ6|?(xQql! zBGCkD7St?ZW+*lBzF@ZhI}z{7WZt%?Ay|lNDZ8 z=7ArkwH#$cx-Wv?P2xrGInqcMxK)+g=^6eaShU)alsTn0`(#_=z{54k7he7mi2Z5V zX@$|c;JtQOwX8(>)Vdd8m@Y4O&?ds=pkolT(D<{)(e$|0$%pU^Ix_Sb zw5^4gTY|zz^Me}h$Ho+7tZ%u3gPJ}#xsaTE))v7wx1U=s;Lj*r!~-2>mgtAL*5g)W z5mRO9IVE3b;p*icRA?Q|F-^6w3Xbx8eN{%>X=T&<0w4s-8URBWRJm>k1(azd9-S{; zA|9Skasbkp>nl7XC1%m7q^3aT)r5J2f{BK@Koi`Av-l;U(YJJ)2lV zdUm5qCs9_nUq!Dp!@>5(ev0VCF>F|xw}?Lcpzo*3UFC~lTzHwQU~i~8VgE^FFur{X zY?AD;FRf}!O-&PxCWR#~9MBsDA2v0dT6ww*7F#sDP)R<%2cKc!6IDMbf8_2HaFfE% zv9_$qq>F;dO5NY83o8*7LAIdE&)VP{g}OWmN8ogq$UW?g+}Z|h8D*g}epyzvCn~hg z?8L~vr6uFCS~LNN0USX!_RFf5@^gxXy8iONMsV-~GCNHhQ5NkHQh`6oZ2ItZLO)p4 zd2|1`2zHY^Toddx-^9Ds@Ne`5avV@;EB) z+(xc9sp9mPWpA`Ox~17q?W|w#MbdGIVoQZ##J6e3m8E2{&p!exAK>ZDjSGjCQXNNp zs9-C{Y`iLCERAbxTADVYGzHuSprG@8v_fWce!&u2v3@9+e5S`K=JGQ0&6Nz*NbOI; z&f62UgYQ=yVw&Bnq~8fFhCX?}@4}?1J=?R7pBwnPJUuNX@4^kw$38dJYDi#Q**!Pj zm0J;Mp7ssp`?IT2;9=dEhuetfSnM?&c)VkB!1gz@JvOL4ZPoKYzb|v-7xe{XT5m`b zV3jOYnjB`GG8*~IHd(LDPx-y!!VkmFAEq@jFJIDR$0n|bESg4I^_(7ib!wnrK~q(e zn9&Lzi}Fj*F6}L}jw$F~Rg)^kb+maZ9a7M9U$gImgtEl%Mdh|K4!|^|k*0F@Dae|Z z`Y#8TJ9U}PZJEwbXpv(q@hRmDScIeP2G>=7IHI>$VeI$Z!#sVStyNT5`ny(uSq=X0 zS^-`SRYa6OPrI9erlTMx= zffYEPGMO1}bJ*&VWol<7)Z9?R0$pwR;z9p z4E@jotsCz?LL(-3k5KKH`R?D=XRA1<$D_(NM2P5rq=eQu)g#aVz#?SP1UD$VmLkpq zzP7|^=dU8cl@I2{L-HO2@8aM9fD{x^l^=GT$@AJvm#CTMcW*`4`nw7Wux8+w@2m5%GzYH4Y zu_aP@^!F%7jm}@c@wKo2^5z=G{$iy~Z=oL|kf=vI7lZ)-1|grk_#Yw=9)Sb3jAPfz zw@AXE{K*dWzDolOa%wR^+3i#m|8|OR|FbJVL1V%Ex9$Z*^o9ND!WWR<7f#@x9tN=k zfF%5|LB6#8-OvEHBFT{Udq;D~dHm1V|5iNzM*#SvX!)x!cQiDyG5!AvA=B1?zv4g2 zoj+694FB7cg`bI8E`&G@`H@3@|7>7i0w@q`4>m{v3hn=hdR78wupk-PL$VS7M*-P` zeeeDeA%R9aIEeq`vU8yMp#*Yp5%ME@LUsa?cCtQjV2eGLbsz?nAqE+raP&Y9odS=G zfT}wQpy3c7g5pIa0U>rHll-wijG`wI3(;OeI_y*J%NGzbAxNf+_|brE7XkD++Jk}2 zF4{+I2!hRGKVUaqcn=1jEE#$dj1}Ia7!MPq(~bOSz^0o3I+6GR&*;W~F!1D)>JtM; zY!5^wb^_3TAM!)$PrfodA=83bdbl49{`*3X2YD%f!r>HoFqqy${y>}`MR)rIsRJ0Y z+UaSniRJ!=hyc3o!FnKDkyyomp#T7N$fdyZ#Ol4u134291*ETr2eRyataO~Q%u9U8 z2N79R0D$y~07>mZpa6vks_Dgeu>S9`3L!r2ScbZwEPYB4QXewO!$9)yC<*{T^80rK z`+m^{u^JG76#I}LE_9H79|5$L9>}xr)#Lwk$o~{QP)s2{$lLItqL27B1g9_r`8tt< z`1&y)G39;&Xf&e-ET*6O5&I0mFiasBZQKCaLoB=GC?}GTlE^~(6YIZ`JalYx2#*k? zH$eVC6vnlf%l)h zKx-e-L2Cnl>*qB@0KEqUNtcmj(xN3`sqBelj>e@thC}F-Q&>t63j& zD;p+&7K?yjWFVGkc+m0iqm#H10_Y!^4^-h1qDQJB1l!2|6HnX#Jg960EFcbzf^rlhLo`c(%GNZKcEbhJ^D}m zAwt(mh-1i!t3mx^@WHd#1kUz%NX5J$o&}zGo~?R_JZS8152+p_fPPsA8X1#(5Hez0 z_#r^Neujk2_C)xj{z16Z^ib{x@{_yF6Y`s;2lC@hhxCE`@BIcsBz{6(YysVlv%)j9 H{z3mAxA?u1 delta 11465 zcma)i1yEeg5-#rUEKYED2$lfB-6as*f)hNz;slq)Ex5Zo!QI_0NN`B75Xjp=?td@$ zzIyex>eTf1`C6vWZ1G6VXR?uo-o8)ddt1A5D-7X-f)63c(Gr|YcvqXVV`jHXbVtJ6fEWn z%oDYP8wG{>NB@}PVJI#<1cWWfmw*juOr(kVFgJ(m4&n>_uu$i3L>s}hh$pQ|CyjkJ z`=+eoCxgOLqm|VQJBHKdmXe+zM{K%lFBqt}J2-qD`kq=y@2^o%?TdPE>lvo6;M;gl zE!RePIoDGgem{)g^FpTeb!_p4c9Ev_su083_RzPHizx@~(8(>Am7_`C zQqjU`?y3+guf!d{79h0-B$zppb=aZ98~EeGcDLb?^jyj_EZQZzk#8 zGkNeN?mw$W(f_m+rtC={#M(a@SkZH)uKbXKqK}`tj`1y(-BWhguKz5Zpq;4y`pm1- zfi{S&=S=LSJ8ftS5?Z)cn2y5$&|Y_0i>@iiltJl3e$leO_^5-EWM_;l=w9qa_jgX! zZqWC{e#zjyf_&N!#)&vo61En+b=Jz9$cyv0s-6$Vzf*}~iVSEg3lT%Mdk@m^hY2V9u$D{QFO*?xU$*QI_Fnehcnzz59knSPmJ z=WXuBhfk6_%>53M#kp!^of-`Z;3iN`P4|XT$g4% zyF29O7EZG7MIQAT=jIY0SLx!c<(ySPYCUX3P~QeHHJmKJCR6QzMXfn**4s~ zZ2{{wK(`-RlG&~4_@g)0rqOC~@))T4s}|CzxhdNJh4RW=qME^*q6GOSFH}F|hgEC# z#z>2Krtg$F(ERS2sRZJJI(leBxTj^d`Tj>u9?U0o?32WJ1f&&(9p|(mp!lXyx*Yudp zC-NTAkrv(rdo?8wz?np3lZ>{N{ z|6bX)T(YcM;}48YrF${YoEo5d=F1=Rj-+VE{$BWe;=scDXt6m5as@`$^TL<1cc}D0>aQ;AvdC z3Xfv4ErLB1C-R=t8gH56iU3FyM?7dTUtiin`eq0A=&{zwpyGJ109Y!^9loo~=^Cen zc{HXFs!AAO51Y*`&u>{p<*|n8WDWo$LPY*Y285`*kDRz~#sYaT(m@?YxJQexJY45OXuFoduKy$)#({CmE;UY%^KkW$NQl%=! zIe2;jouKtTgkYCW4#H>bY%{s+MV4NEAbyR_&IRI6XL7+GX1*EYXh&FL524SnFfz+$ z7h}?kGd~ZC8L6f+kMhw%-yu+1(9H(Lm`r^ zyp~`sZl}$0)Ek~#T;I|$%ReuEy?ufHK3Z5dUs$qY zo<+;wJ&io_AdqB9ls~{z3362cNOJmJt3Not&J$oGHu6LXe1aoH5nr;C5qId~AR$##{W z9$Tu_*5G~tv@J;(EtQ5`W70?3N4M#{nt|pnE|K5|G%$}!DK=w`hL2hDr#hV#YNSxs z&amN;w@~bjq8K*mdTYJ*FVRzD7X8eHno6?7*_<-kC&%3@MPRQ;FcMi_(I=z!%RWd+ zfD&?)x{3r7ee`m8WR06XIGq{}Z+TLCDzcfs+L~1nxH9R@?p_veTM(4rw71wzwcNlx zJs`7@w=fy5CO1krsAu-l#6uL9U-yup~cF%Dfl^Q^qnUADp zAOZ48W{Qq@ke6DR_Eiw@yx{OpOX0Y4=oBpDfHpP){X#JC@YVj9u;b@K!B_r1oxg8u z_aoAv8AZOqyHToP*!rA=b*QCqu^R6&4SJ;_3 zipjNJL~$czh?jr!gr493J%|4A{q+WOL?7_5Hf)M%;)sMl=*$n#c|T`z4?$o;F=q>4 z08ue57llnxy%qeMMj&K&G%Ln9I@KMf-%+<%_*Cc}u~aj}r_i~c%8p}#V|OQ}sMfTr@Gu*r>+yXOOKdyG zVt3th_>28ti0wYt^LyE6!yUl%Y(}Ha4jn2_zZ2shzZ^7tU67EGuya@GFk3|q9fpJo z_~i}#VO;$N+#He*!1$dB* z(;Kh;k9S``pua*gXv4bLX7*me{PNbHt~1?x1@{dh8eA4|!&bYgKdJ%NzJkmSeT&4+ z90RwJ^^*+ok8M2>wQU2~m+Q>kA5HkExE57Evrtay7~N8TK8 ze0Xsmgrcavm3osPbz4uij+l+Vj!V6cymfcun{?Gp?j1k|e~$f&%jYwF;rbL23E{~x zfaDtq@q!TlO2bDBH~T_!HxoIL!cz{Lin*8>6>Jh^;ILdS4G+E6luhnbLPE8R#b9{3 zfoXZ=n^;TE3;%ARKrA5v*e$>5 zeD<z&eN2DnYZ{D=HeC(nHRXkVr(;FxtyVuY3ub8smcR$Mx>ul1&|sgFbu2E(dT* zCatNbl8X#-f8E(TdaHq>=`JWFba$y0)K%CuZ`1Ql%UPc?58Wic&-uffNLj&%q=c2M zWW=mhao^FpAc`#K@-JCXX+ylUDu{=kuzR?5z&ea?#%`5jzM_WC8OIs->|RUa{pRvC z1Z{o2FPp8GW(v4f*JGtP1^PKDO=jfi__Ey7bI9}FlH_!^VU^a#up1+vzgs}uMUlEY z|IoKSQ`g8mMZgUj*%b5$Q^;c&D1!c8AfKxWTxhMQv$r>>HItHb16<(YKMt}Wb8M&`Jd1LG4^j5Ech}+Ve7WV zzmnxhgr#(a+b`4;s{Uw-J!^_FXr1RFvjMI&8(kIKetyZ884~v)H0^C^ZezRTFxB4O zaZ^}qZAoFFPmz9%;e1F9k}Eyk^5LgaV?x6G+0Rl1gAE(jZ13m8Re#sPUe2>z;Fdo8 zj+eo2r7zEH@>!=?)~)u+5-UmSVJ&Lbk{?*BFp(;};r8z4*Ryxy165S{dVet5$hpc>oxE3wS^d5woMuO zDrI}~+^RwoH}V+JHe_(=2^b!HUa4rWs8;Cd=xCnP3g&D%H%j+@7S|)~k2aGpz6rDc zrOtEMnmQ>Rk!ZoinYU#Z8J?;x$Y>2bh!6Qjmy;y`cwlMyJ+N#K^IMhYftrY0+=DCT zh=*gaF(Tnd^;tXRmz2IZRO^;6mv*=bH$TZRf9VSrY`Kr&d@*PU-K#svCHVF8*3;o_ zZD@EU3+u>Uxf~^7;?$M-$X7N-ZFQWyfaFzWh$K~C86u$zmeYyR+T%%%)Az0_Kx5n^ z=}!jw0$IxODuSHo65e*2XEzShp{LMc5!57+dn^%^8QBbu+)Wa^bwQ~}5zM+Dm~D`( zirgN?{OaV7H)EuQ%7U+{V+YR~NqH{mjahN?QD4lGlFA&QeB=YU$b)Q$tjfGlRexCa z?r9Fev^nB7AUzDXT^=+U5h(Ol02v_jx+-AQ^Hh+|aM^x^7Bk4kF?6VuG0@V<;yR%b z7`8M`5LF;KhpB2^#GB3_$ZxR=NihNz!Co_&G0yKo$(CY z5m#MeGH%j4vP@lx*StVue3>@f<~Dx0rlJrsD3Md~?lE1;9 zxMeD4%~D)UqZi(!6cBei**bO*3bm#yu&MDg>8(h{Qhbuj4D9r~u0@A{76CNOmk-=L z9F~APN5wv*(2zbF=b~+DN3!peiPdStRM&HF-8$!Y7IGBYd)<5tnQBnBh8o{rg{g@w zXWM>gm>jsbB>dh~bqkx+xS zQ*2K6N4mwHdJHNQjw}GHKEB`EkUgB8a(Q^KFjW+iYL#s5LRh#<9m1}W|FHAb?Gm~egw`3+S~N&OHhtkJqi+K<3d z*9=sNEW??SAmA5#zpM+KH`y%=Y;7X08*<5@+a9tu!BfZ4lb@s|?wgW2Gz_3*4pzpt z6bN6!131&#T3LEwILE*@3Wb?!Gc^=s;a&`8d*d~`pVPLgo=-u~GRzqdda?ColG1+W&TR5x;ob>ojC#@~q_qcCg)95g z)+g$4mXD;4&tLf!Hti-;d73}M7gqs7se!L7x<+ zrKYJ)l%cKKi$Lk2e!G-A4TWo$kEj?6 zVp*5k)_i_>ANbO>TfFhiN47=?b(}Q}(NL9lT=Mrnm8m$tX=G>%<#;JR&at@j6;)=P zc+3nc9UC!S7FF9C#byfw(Z;3dyY;~@!2_to=4dgzbeu_XAm-|QSr)UY!WDM$eiCb~ zaEkc2f=XSPwzU@me!RgRh1%8fiS;slM%^dX+KPY#Ge`QWgg`hvZ$#+XZj8&C#%(4$ zka5sXTjD!idAr_qg!@DD?4w@ci&Il)sWaFsB3%@jv-d|0sRK{|kz2`)>~k3-ip{s& z!E5YLo(@1{;Bi|(N4ppT+CW2u`&-eRVkOBw{=PaE{D62E)n*acSq){wPm5~(cF>!6 z(A_*UC63~4Lu0SLQP;V>EU(sc9cfN^)qcG4Q0=RaX!@n7?4e5`{yacnqq%q@*=prG z$;mCpOMSN+hVjknl}#&$^f2jI3E4!%I{qbio}WuP2f*Yj|7(1cb?%-EorE*zCa5^a zB-R#;cj7;AKe`CLP%#>R=Rbd89c{KQrR4b%g6iQH?-qaMB6IKSTyeuwzPyRxLm_Is z0ZWt(RF_imDf#l;vN*K^srSq@-y{?>BNM1JPI~}hOv^Y+PTq(I+_u?BXiqexRKITVi_?em$w+UwAaOWH>>+k?Ib*=PmBrD38FhPf;8alnMSOBaz7##Zx^bXk zk#n~G@ap+1Z~!2;q#FDREJ^?Y1;OyR2QvGP`eQNcpUv{V&{Pe@@4CykGGF;xBt zhgQMW<8<^}o4~EPt26&9v#5SM`si`1`+b}HDXOCI_p=;`xv&U^`b`#GqBD-BR$Z0~ zr85~^SRj_Jk0w3ec7MseTf|D6M(?401KIV>xd*JBk}^%z(3yn0TSX}E<|xuycJ>rx zhk7qstNev$i|!glqHA#@`l-s9E$r#}wl|_jMd*&(XTR$btw8ibt)LntQYGaVO)J6Z z3^&na=*5M-!Roo}6dg1>7ty+;SH?q8QH5vQn7}5WTIWk#vR-2b(*AxmPvXx?=hA{% z8>&dcWk#GCYUhNWb5Y~cDdTtNu(0}ZpY>yd6$iUnWhKC`$hd?1CElL$o%4Cju7f*7 z-YYD?j&J=9YMw;v%4d47+G#^NDz@NnvUI(3xB72FLO*YrBYBr+qjz8wF1cscwUun! z1A%t^zTGVz%>roGM~8<;i;Nii{P~|Jy4i($F^fkHk!fQRw4*=5U%cNX97Tbo(H zd@)%-7DA{Z&6(W~_v4M)iC*S7Ac6*V&IOnp5_2XcN)}1D2}1PpT#*WJU0N0~pD&Z# z<{IMd)Ayxz4!)4D&pQmkyeK*2!&A~5Yq<<~UF46+-7S#K?q86%jR*?Xn30nCql$zZzXyb61pKfiNb{6sw~T8EDUw;PkP-=o7n*LZGl<+99-WymL(s`$t;n5`H7% z2+gmV^Lu@cF(vZ#;TO$QN4l6qTHLrBKKv6V zNdt!k&z&_h?~ERw{Kd$+Fl%;`qv&znsQt-sY%B6I6YTp&xcT=4oy-8+K6yYSaFa$O z2U(8_wWDX+NxfWvkozHCAXlE=h$n)Bp6eGTchHgsGYH*{_X?9 zBzQDdsLt(Zh2xKHSFQ>YMkW7b16uifa8&93?vVOpi?e2xN4`q2LEB0-oRJoM{ zmbE0x4I8n!p?VjoL~(m#6^xsb6eBd>gwV){CsPh_S4qL(C})MpRB2(#asid8i@(*r zJYl>S0LQIXCzSQ#wk@A*JREoHUvtx?JBxT4& zPS<4Rm{EQ$CNkPXAdycp&O%6C!4$F#ct1~Nm1>@6IUZvy(KFS9RHY(=VrdkQgO9Sz z+eE*qyQ3|v!^~o4ah*azL=F6!9gtrFqb}7OOrJMZl53SMQ99Y9qs&+tBDkW19lG30 z6GzNTIhmtu%!d>4IT%<&dBCL{Wu=`VG=%(O-Yj2=cUf#&*kOlT)|d!qvM|IsJ7iut z+C~dIaf3&^H1T>HkAJhZ_l=5O^f3hA9B1ADa}7`xz>Q9)3eu(+3$((_$uA#;}VRP2t!nj$6l6RRb_=$bx?Pqi% zrtbLg9|T%d%UnJxVAKP@mGwlS54%sdr0ys()f|t0^SG@#BNfNrL8A}Q4v6HFXwyIPv875EVArn=6dHC&g>Taf zNdTG0Xj}n@2|xXTZwx31rV0BQ1^pZb{QDTA!-Oo31x#q8xeo#kU#}IuI~8}$IA7fh zcbs+3*i+FtmlF{l4L-YdAJ=H_@+W(N8IoLW$~BUnQv)5d{Qcs+fPZ%e@9J;C)xIR_ zOSOB^R!$Pf<6msI3dzo*Us_O4?dt1ijAs6`HaStkbfKnM*(=z7RAicjEUpEvC( zEwFT3-gH(KjzBgm{jU40QZIV?0M9x5w#aCpL1(=+V>wAe%9u@U4AXO$*i`OacDx*>6Y zt)q#k<4ib}@HqtVdobIS<#it@2f{H)zF_I)KN97<0#4g#8Vj}6V8Ue;R3bJQaoS2E zzw;~O^JhFK(6Kp<5Fn*oQx@=yx_J3gCJJwp;>|6!*abORLXMWy%j<2po=+} zZQep{wz0AlFLh-tppZ=K9Oy^_KTq+TPbN?{aLydUC~!~~Sigi?{k2;%hUyg1ig(tK z(4HoGtOOLxw1v6^W%`%sDjDNh^ikxgp;$=5^6$rz)GNypYc$5le0ktEBgz}XuVy^s z;H?QAET6n}<)Q81uem5$xI!M-+C>S|z^@q6;EQnnWrf5A-({)G;d|M1)LgTmZn|)O zc~aeySPt?yE&me4#gp$~2}sPVvK;rK`HJzp3E)H!lM{?XRbcYa7$Ab>(gg2GS*)F#MCZq&aN)pDj>ZG-!U#1 z*9u^2-@*&_x#JyW&FMuv_uyu~_75MAYT8=&h4u!#sOu*>rv`1VSS!;9*GErRuPThk z;eKNOV#wa5W`b0g%Z-=o1>}Et;YF06SYa^Jz-HshX?V@1ofm-`Hz6f!S8!Y{Q?~`{ zMzP6==!FT=!l+3ib*m!Dp*pcYUNbLXxnEGnpp!9PUoCWsjZeL|B#svT{N_~dYx+?w z9YtWC-0x%*Ekk{x!PTyGmg`ZL>nZvlF_wtba+$AqqijdER1O?5Ti;_H=KY3#{=Qqo zEVlyp)H;Hy3ir321YcbZ;jxKiK7CbR8Qh?v2y)Y70IH6SD$Ic_9Fecvf%=bAHv@3T3f80eCcWwzM{3b~H6MwX}C)v3CTW=ph25 zI&HhjQG!lehty1aWzgV-h-qZh{LEzsc`rs+cq_NaK3CPL+`h_mUk)a!KDKqNj}PCMP&`vQ+W(Kg z-~R*vkj>SIsq61AjP=SmM8=f$vMh{R=6uIgCUkb${ z7V=T<6~4vqesK3sFg65)&>y*)Y0G2yCwPwoDfAPy>%wl@~+&Y-2yWvGp(2t(if2sVC1XfrK_vAT4&0uNcUwB}J?q5%K zDqsHU&D_$y)VXlP<>`u`LJF9!$^TRRfu!Jn-y z#(&!Y8MXtUNFYHJ2)H0tC`3?dJ2v?r+r4Xnb2wmk?ZK|T`NuW{M6E9*)L$?=sCf|O z5&W#P?HSk=<`JykLG%cODe_p=1#49XYh`{0G=v2yix7cII-c6;K;s<%C;6+OI%+M0hjs#>`p6#Tl;q$+V`OlkfzGF?KWi6wCQrusC_v5$ z04;ub0{{E84gA}F24>-V1ncnO{u9*y_+J8o;Tc#`3>+(_|9zC3T>vN%g}-j1|L=YF zZU7X8`r~~zRq8+SdyG}Au?!I*0tAFCIs^p8GeeA;k6>0P1kgk`)+6|z`Tz#Lcn0>@ z2YKmHGyUfR`}5oT?;QUxnOa|51JH^d0A$mH@|X}nUOfOPLPJn;56RQ_X7G0@L(pa} zA?UP+hWvk$qpJgmc*cm{=aiY>jyxYI{mc>2fB0kx5ckUv$UIFYqsEI{_2^cVAn?hd}q>sE7|}n z>h*Z^M8OT^@tOW{JIz{mya4QWJ~%1zJyVkE^N&^cfxnSX82~^f`GHc3C_xJYPstl} z0_NF=f>whCAD8cA^#3#SKU4Z99IRC8PtcyCKQst{x`+hxC_xBAe-)Ar0ie>eK!#0p zAfq9|CmayWi^=`#5a|#At47pHKu2VnM=W5v-~sdXQkvB zkh9{SX%Cj-{X2eWBLFCxYVbW`P(=m$KNiI%@pccuvGWAyUBPFHf@(m%BQj5}j)R4V z8bF65l4O74!;EX;2M>0$1)SYDo(ZLcsz#|m%ng`-3_mWHXBF!i_D2giMna&Dktg8u V+VyW_t#4=txRSBMp?&$o{6A(>Qtki% diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 88ef034727..4a444e5b24 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ -#Wed Jun 15 22:53:20 CEST 2016 +#Mon Sep 19 15:49:17 PDT 2016 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-2.14-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-3.1-all.zip diff --git a/gradlew b/gradlew index 27309d9231..9aa616c273 100755 --- a/gradlew +++ b/gradlew @@ -161,4 +161,9 @@ function splitJvmOpts() { eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME" +# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong +if [[ "$(uname)" == "Darwin" ]] && [[ "$HOME" == "$PWD" ]]; then + cd "$(dirname "$0")" +fi + exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@" diff --git a/gradlew.bat b/gradlew.bat index 832fdb6079..f9553162f1 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -49,7 +49,6 @@ goto fail @rem Get command-line arguments, handling Windows variants if not "%OS%" == "Windows_NT" goto win9xME_args -if "%@eval[2+2]" == "4" goto 4NT_args :win9xME_args @rem Slurp the command line arguments. @@ -60,11 +59,6 @@ set _SKIP=2 if "x%~1" == "x" goto execute set CMD_LINE_ARGS=%* -goto execute - -:4NT_args -@rem Get arguments from the 4NT Shell from JP Software -set CMD_LINE_ARGS=%$ :execute @rem Setup the command line diff --git a/management/build.gradle b/management/build.gradle index aaf037ec6f..b1188ed2c0 100644 --- a/management/build.gradle +++ b/management/build.gradle @@ -14,6 +14,8 @@ * limitations under the License. */ +import org.gradle.internal.jvm.Jvm + apply plugin: EhDeploy dependencies { @@ -40,11 +42,9 @@ dependencies { compileTestJava { sourceCompatibility = 1.8 targetCompatibility = 1.8 - options.fork = true; - options.forkOptions.executable = MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'javac') + options.forkOptions.executable = Jvm.current().javacExecutable } test { - executable = MavenToolchain.javaExecutable(JavaVersion.VERSION_1_8, 'java') - environment 'JAVA_HOME', MavenToolchain.javaHome(JavaVersion.VERSION_1_8) + executable = Jvm.current().javaExecutable } From bf58bf16eead30ac92cabfb47f80f10e2176ecb8 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Thu, 6 Oct 2016 14:17:25 +0200 Subject: [PATCH 055/218] Options to run tests with a more recent Java version Adds a property testVM which can have the values 6, 7 or 8. Using the value 7 requires a java7Home property. This does not impact the tests that mandate Java 8. --- build.gradle | 29 ++++++++++++++----- buildSrc/src/main/groovy/scripts/Utils.groovy | 14 +++++++++ 2 files changed, 35 insertions(+), 8 deletions(-) diff --git a/build.gradle b/build.gradle index 2bf9730307..bfb692af2d 100644 --- a/build.gradle +++ b/build.gradle @@ -14,6 +14,7 @@ * limitations under the License. */ import scripts.* +import org.gradle.internal.jvm.Jvm ext { baseVersion = '3.1.2-SNAPSHOT' @@ -49,11 +50,23 @@ ext { def java6Error = 'Set the poperty \'java6Home\' in your $HOME/.gradle/gradle.properties pointing to a Java 6 installation' assert (JavaVersion.current().isJava8Compatible()) : 'The Ehcache 3 build requires Java 8 to run and a configured Java 6 installation\n' + java6Error assert hasProperty('java6Home') : 'The Ehcache 3 build requires a configured Java 6 installation\n' + java6Error -def javaExecutablesPath = new File(java6Home, 'bin') -def javaExecutables = [:].withDefault { execName -> - def executable = new File(javaExecutablesPath, execName) - assert executable.exists() : "There is no ${execName} executable in ${javaExecutablesPath}" - executable +def java6HomeLocation = new File(java6Home) +def testJavaHomeLocation = java6HomeLocation + +if (hasProperty('testVM')) { + switch (testVM) { + case '6': + break + case '7': + assert hasProperty('java7Home') : 'Set the poperty \'java7Home\' in your $HOME/.gradle/gradle.properties pointing to a Java 7 installation' + testJavaHomeLocation = new File(java7Home) + break + case '8': + testJavaHomeLocation = Jvm.current().javaHome + break + default: + throw new AssertionError("Unrecognized 'testVM' value $testVM - Accepted values are 7 or 8") + } } subprojects { @@ -164,14 +177,14 @@ subprojects { tasks.withType(AbstractCompile) { options.with { fork = true - forkOptions.executable = javaExecutables.javac + forkOptions.executable = utils.executables(java6HomeLocation).javac } } tasks.withType(Test) { - executable = javaExecutables.java + executable = utils.executables(testJavaHomeLocation).java } tasks.withType(JavaExec) { - executable = javaExecutables.java + executable = utils.executables(testJavaHomeLocation).java } tasks.withType(Javadoc) { options.addStringOption('Xdoclint:none', '-quiet') diff --git a/buildSrc/src/main/groovy/scripts/Utils.groovy b/buildSrc/src/main/groovy/scripts/Utils.groovy index 1388470e90..9a128b22d7 100644 --- a/buildSrc/src/main/groovy/scripts/Utils.groovy +++ b/buildSrc/src/main/groovy/scripts/Utils.groovy @@ -20,6 +20,7 @@ class Utils { String version String revision + Map> executablesPath = [:] Utils(version, logger) { this.version = version @@ -85,4 +86,17 @@ class Utils { } } } + + def executables(path) { + def execMap = executablesPath.get(path) + if (execMap == null) { + execMap = [:].withDefault { execName -> + def executable = new File(path, 'bin' + File.separator + execName) + assert executable.exists(): "There is no ${execName} executable in ${path}" + executable + } + executablesPath.put(path, execMap) + } + execMap + } } From 6e42478a75f9370b76d128323e811bbfa63b67c4 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Thu, 6 Oct 2016 14:46:01 +0200 Subject: [PATCH 056/218] Ability to switch deploy location for releases Fixed some config issues in transactions module: EhDeploy was wrongly applied. --- build.gradle | 24 +++++++++++++++++++-- buildSrc/src/main/groovy/EhDeploy.groovy | 4 ++-- buildSrc/src/main/groovy/EhPomMangle.groovy | 4 ++-- gradle.properties | 4 +++- transactions/build.gradle | 3 +-- 5 files changed, 30 insertions(+), 9 deletions(-) diff --git a/build.gradle b/build.gradle index bfb692af2d..0744c92b81 100644 --- a/build.gradle +++ b/build.gradle @@ -16,8 +16,17 @@ import scripts.* import org.gradle.internal.jvm.Jvm +if (hasProperty('overrideVersion')) { + ext { + baseVersion = overrideVersion + } +} else { + ext { + baseVersion = '3.1.2-SNAPSHOT' + } +} + ext { - baseVersion = '3.1.2-SNAPSHOT' // Third parties offheapVersion = '2.2.2' @@ -43,7 +52,18 @@ ext { utils = new Utils(baseVersion, logger) isReleaseVersion = !baseVersion.endsWith('SNAPSHOT') isCloudbees = System.getenv('JENKINS_URL')?.contains('cloudbees') - logger.info("Is cloudbees? $isCloudbees") +} + +if (deployUrl.contains('nexus')) { + ext { + deployUser = tcDeployUser + deployPwd = tcDeployPassword + } +} else { + ext { + deployUser = sonatypeUser + deployPwd = sonatypePwd + } } // Java 6 build setup diff --git a/buildSrc/src/main/groovy/EhDeploy.groovy b/buildSrc/src/main/groovy/EhDeploy.groovy index 282c40b8e1..7f4e7ebf73 100644 --- a/buildSrc/src/main/groovy/EhDeploy.groovy +++ b/buildSrc/src/main/groovy/EhDeploy.groovy @@ -43,8 +43,8 @@ class EhDeploy implements Plugin { beforeDeployment { MavenDeployment deployment -> project.signing.signPom(deployment)} if (project.isReleaseVersion) { - repository(id: 'sonatype-nexus-staging', url: 'https://oss.sonatype.org/service/local/staging/deploy/maven2/') { - authentication(userName: project.sonatypeUser, password: project.sonatypePwd) + repository(url: project.deployUrl) { + authentication(userName: project.deployUser, password: project.deployPwd) } } else { repository(id: 'sonatype-nexus-snapshot', url: 'https://oss.sonatype.org/content/repositories/snapshots') { diff --git a/buildSrc/src/main/groovy/EhPomMangle.groovy b/buildSrc/src/main/groovy/EhPomMangle.groovy index a20274e57e..302e10bf2b 100644 --- a/buildSrc/src/main/groovy/EhPomMangle.groovy +++ b/buildSrc/src/main/groovy/EhPomMangle.groovy @@ -81,8 +81,8 @@ class EhPomMangle implements Plugin { beforeDeployment { MavenDeployment deployment -> project.signing.signPom(deployment)} if (project.isReleaseVersion) { - repository(id: 'sonatype-nexus-staging', url: 'https://oss.sonatype.org/service/local/staging/deploy/maven2/') { - authentication(userName: project.sonatypeUser, password: project.sonatypePwd) + repository(url: project.deployUrl) { + authentication(userName: project.deployUser, password: project.deployPwd) } } else { repository(id: 'sonatype-nexus-snapshot', url: 'https://oss.sonatype.org/content/repositories/snapshots') { diff --git a/gradle.properties b/gradle.properties index 71b12fe25b..95d91e9e83 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,5 +1,7 @@ sonatypeUser = OVERRIDE_ME sonatypePwd = OVERRIDE_ME +deployUrl = 'https://oss.sonatype.org/service/local/staging/deploy/maven2/' + # Enable the daemon by adding org.gradle.daemon in USER_HOME/.gradle/gradle.properties -org.gradle.parallel=true \ No newline at end of file +org.gradle.parallel=true diff --git a/transactions/build.gradle b/transactions/build.gradle index c61ea6d5d0..9f658db346 100644 --- a/transactions/build.gradle +++ b/transactions/build.gradle @@ -17,7 +17,6 @@ group = 'org.ehcache' apply plugin: EhOsgi -apply plugin: EhDeploy apply plugin: EhPomMangle dependencies { @@ -31,4 +30,4 @@ dependencies { dependencies { pomOnlyCompile "org.ehcache:ehcache:$parent.baseVersion" pomOnlyProvided 'javax.transaction:jta:1.1', 'org.codehaus.btm:btm:2.1.4' -} \ No newline at end of file +} From e259e48cd84c27adf870ffacccd8ca7541fd2fde Mon Sep 17 00:00:00 2001 From: Henri Tremblay Date: Wed, 5 Oct 2016 16:20:07 -0400 Subject: [PATCH 057/218] Use org.junit AssumptionViolatedException since the org.junit.internal one is now deprecated --- .../exceptions/BaseClusteredEhcacheExceptionTest.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/BaseClusteredEhcacheExceptionTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/BaseClusteredEhcacheExceptionTest.java index ecf25303ef..8c5b68f8e1 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/BaseClusteredEhcacheExceptionTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/exceptions/BaseClusteredEhcacheExceptionTest.java @@ -18,7 +18,7 @@ import org.hamcrest.Matchers; import org.junit.Test; -import org.junit.internal.AssumptionViolatedException; +import org.junit.AssumptionViolatedException; import java.lang.reflect.Constructor; @@ -150,4 +150,4 @@ private void checkWithClientStack(T baseException) { assertThat(copyException.getMessage(), is(baseException.getMessage())); assertThat(copyException.getCause(), Matchers.is(baseException)); } -} \ No newline at end of file +} From 8171b9f3ccd58fdfc572f529054b755b812df34b Mon Sep 17 00:00:00 2001 From: Henri Tremblay Date: Thu, 6 Oct 2016 09:32:04 -0400 Subject: [PATCH 058/218] startForMaintenance should take a more flexible serviceProvider --- .../ehcache/spi/service/MaintainableService.java | 2 +- .../service/DefaultClusteringService.java | 2 +- .../persistence/DefaultDiskResourceService.java | 2 +- .../DefaultLocalPersistenceService.java | 2 +- .../persistence/TestDiskResourceService.java | 15 +++++++-------- 5 files changed, 11 insertions(+), 12 deletions(-) diff --git a/api/src/main/java/org/ehcache/spi/service/MaintainableService.java b/api/src/main/java/org/ehcache/spi/service/MaintainableService.java index 6403678917..a9d3e76adf 100644 --- a/api/src/main/java/org/ehcache/spi/service/MaintainableService.java +++ b/api/src/main/java/org/ehcache/spi/service/MaintainableService.java @@ -39,6 +39,6 @@ enum MaintenanceScope { * @param maintenanceScope the scope of the maintenance * */ - void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope); + void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope); } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java index b1860cc6e0..18f0f83320 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java @@ -164,7 +164,7 @@ public void start(final ServiceProvider serviceProvider) { } @Override - public void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope) { + public void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope) { initClusterConnection(); createEntityFactory(); if(maintenanceScope == MaintenanceScope.CACHE_MANAGER) { diff --git a/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java b/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java index 193d604a35..e13d2ae7f4 100644 --- a/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java +++ b/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java @@ -69,7 +69,7 @@ public void start(final ServiceProvider serviceProvider) { * {@inheritDoc} */ @Override - public void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope) { + public void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope) { persistenceService = serviceProvider.getService(LocalPersistenceService.class); isStarted = true; } diff --git a/impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java b/impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java index f9ed490873..b1b6e506c5 100644 --- a/impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java +++ b/impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java @@ -164,7 +164,7 @@ public void destroyAll(String owner) { } @Override - public synchronized void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope) { + public synchronized void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope) { internalStart(); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java b/impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java index fdc1d39c90..63e9d18b5f 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java +++ b/impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java @@ -16,26 +16,25 @@ package org.ehcache.impl.internal.persistence; -import java.io.File; - +import org.ehcache.CachePersistenceException; import org.ehcache.config.CacheConfiguration; - import org.ehcache.config.ResourceType; import org.ehcache.core.spi.service.DiskResourceService; +import org.ehcache.core.spi.service.FileBasedPersistenceContext; +import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; -import org.ehcache.CachePersistenceException; import org.ehcache.impl.persistence.DefaultDiskResourceService; import org.ehcache.impl.persistence.DefaultLocalPersistenceService; import org.ehcache.spi.persistence.StateRepository; -import org.ehcache.spi.service.ServiceProvider; -import org.ehcache.core.spi.service.FileBasedPersistenceContext; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.spi.service.MaintainableService; import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceProvider; import org.junit.rules.ExternalResource; import org.junit.rules.TemporaryFolder; import org.mockito.Mockito; +import java.io.File; + import static org.mockito.Mockito.mock; /** @@ -124,7 +123,7 @@ public void start(ServiceProvider serviceProvider) { } @Override - public void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope) { + public void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope) { //ignore } From 04f448ec3ba9005eaebc6447df523277086e4749 Mon Sep 17 00:00:00 2001 From: Henri Tremblay Date: Thu, 6 Oct 2016 10:37:38 -0400 Subject: [PATCH 059/218] Move startWithMaintenance near start --- .../DefaultDiskResourceService.java | 55 ++++++++---------- .../DefaultLocalPersistenceService.java | 58 +++++++++---------- 2 files changed, 52 insertions(+), 61 deletions(-) diff --git a/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java b/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java index e13d2ae7f4..f7b9d73a9d 100644 --- a/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java +++ b/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java @@ -44,14 +44,10 @@ public class DefaultDiskResourceService implements DiskResourceService { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultDiskResourceService.class); static final String PERSISTENCE_SPACE_OWNER = "file"; - private final ConcurrentMap knownPersistenceSpaces; + private final ConcurrentMap knownPersistenceSpaces = new ConcurrentHashMap(); private volatile LocalPersistenceService persistenceService; private volatile boolean isStarted; - public DefaultDiskResourceService() { - this.knownPersistenceSpaces = new ConcurrentHashMap(); - } - private boolean isStarted() { return isStarted; } @@ -61,8 +57,7 @@ private boolean isStarted() { */ @Override public void start(final ServiceProvider serviceProvider) { - persistenceService = serviceProvider.getService(LocalPersistenceService.class); - isStarted = true; + innerStart(serviceProvider); } /** @@ -70,6 +65,10 @@ public void start(final ServiceProvider serviceProvider) { */ @Override public void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope) { + innerStart(serviceProvider); + } + + private void innerStart(ServiceProvider serviceProvider) { persistenceService = serviceProvider.getService(LocalPersistenceService.class); isStarted = true; } @@ -155,9 +154,8 @@ private PersistenceSpace createSpace(String name, boolean persistent) throws Cac } } return persistenceSpace; - } else { - return null; } + return null; } private void checkStarted() { @@ -203,20 +201,22 @@ public void destroyAll() { @Override public StateRepository getStateRepositoryWithin(PersistenceSpaceIdentifier identifier, String name) throws CachePersistenceException { + PersistenceSpace persistenceSpace = getPersistenceSpace(identifier); - if (persistenceSpace != null) { - FileBasedStateRepository stateRepository = new FileBasedStateRepository( - FileUtils.createSubDirectory(persistenceSpace.identifier.persistentSpaceId.getRoot(), name)); - FileBasedStateRepository previous = persistenceSpace.stateRepositories.putIfAbsent(name, stateRepository); - if (previous != null) { - return previous; - } - return stateRepository; + if(persistenceSpace == null) { + throw newCachePersistenceException(identifier); + } + + FileBasedStateRepository stateRepository = new FileBasedStateRepository( + FileUtils.createSubDirectory(persistenceSpace.identifier.persistentSpaceId.getRoot(), name)); + FileBasedStateRepository previous = persistenceSpace.stateRepositories.putIfAbsent(name, stateRepository); + if (previous != null) { + return previous; } - throw newCachePersistenceException(identifier); + return stateRepository; } - private CachePersistenceException newCachePersistenceException(PersistenceSpaceIdentifier identifier) { + private CachePersistenceException newCachePersistenceException(PersistenceSpaceIdentifier identifier) throws CachePersistenceException { return new CachePersistenceException("Unknown space: " + identifier); } @@ -235,20 +235,11 @@ private PersistenceSpace getPersistenceSpace(PersistenceSpaceIdentifier ident @Override public FileBasedPersistenceContext createPersistenceContextWithin(PersistenceSpaceIdentifier identifier, String name) throws CachePersistenceException { - if (containsSpace(identifier)) { - return new DefaultFileBasedPersistenceContext( - FileUtils.createSubDirectory(((DefaultPersistenceSpaceIdentifier)identifier).persistentSpaceId.getRoot(), name)); - } - throw newCachePersistenceException(identifier); - } - - private boolean containsSpace(PersistenceSpaceIdentifier identifier) { - for (PersistenceSpace persistenceSpace : knownPersistenceSpaces.values()) { - if (persistenceSpace.identifier.equals(identifier)) { - return true; - } + if(getPersistenceSpace(identifier) == null) { + throw newCachePersistenceException(identifier); } - return false; + return new DefaultFileBasedPersistenceContext( + FileUtils.createSubDirectory(((DefaultPersistenceSpaceIdentifier)identifier).persistentSpaceId.getRoot(), name)); } private static class PersistenceSpace { diff --git a/impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java b/impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java index b1b6e506c5..4a406f2068 100644 --- a/impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java +++ b/impl/src/main/java/org/ehcache/impl/persistence/DefaultLocalPersistenceService.java @@ -74,6 +74,35 @@ public synchronized void start(final ServiceProvider serviceProvider) { internalStart(); } + @Override + public synchronized void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope) { + internalStart(); + } + + private void internalStart() { + if (!started) { + createLocationIfRequiredAndVerify(rootDirectory); + try { + rw = new RandomAccessFile(lockFile, "rw"); + } catch (FileNotFoundException e) { + // should not happen normally since we checked that everything is fine right above + throw new RuntimeException(e); + } + try { + lock = rw.getChannel().lock(); + } catch (Exception e) { + try { + rw.close(); + } catch (IOException e1) { + // ignore silently + } + throw new RuntimeException("Couldn't lock rootDir: " + rootDirectory.getAbsolutePath(), e); + } + started = true; + LOGGER.debug("RootDirectory Locked"); + } + } + /** * {@inheritDoc} */ @@ -163,35 +192,6 @@ public void destroyAll(String owner) { } } - @Override - public synchronized void startForMaintenance(ServiceProvider serviceProvider, MaintenanceScope maintenanceScope) { - internalStart(); - } - - private void internalStart() { - if (!started) { - createLocationIfRequiredAndVerify(rootDirectory); - try { - rw = new RandomAccessFile(lockFile, "rw"); - } catch (FileNotFoundException e) { - // should not happen normally since we checked that everything is fine right above - throw new RuntimeException(e); - } - try { - lock = rw.getChannel().lock(); - } catch (Exception e) { - try { - rw.close(); - } catch (IOException e1) { - // ignore silently - } - throw new RuntimeException("Couldn't lock rootDir: " + rootDirectory.getAbsolutePath(), e); - } - started = true; - LOGGER.debug("RootDirectory Locked"); - } - } - private void destroy(SafeSpace ss, boolean verbose) { if (verbose) { LOGGER.debug("Destroying file based persistence context for {}", ss.identifier); From 3db30de6ea96a53e1c8efbc70c990cc96d22cf31 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Fri, 7 Oct 2016 14:22:50 +0200 Subject: [PATCH 060/218] Support new build on Windows Groovify my java --- build.gradle | 12 ++---------- buildSrc/src/main/groovy/scripts/Utils.groovy | 7 +++++-- 2 files changed, 7 insertions(+), 12 deletions(-) diff --git a/build.gradle b/build.gradle index 0744c92b81..e989c2b333 100644 --- a/build.gradle +++ b/build.gradle @@ -16,18 +16,10 @@ import scripts.* import org.gradle.internal.jvm.Jvm -if (hasProperty('overrideVersion')) { - ext { - baseVersion = overrideVersion - } -} else { - ext { - baseVersion = '3.1.2-SNAPSHOT' - } -} - ext { + baseVersion = findProperty('overrideVersion') ?: '3.1.2-SNAPSHOT' + // Third parties offheapVersion = '2.2.2' statisticVersion = '1.1.0' diff --git a/buildSrc/src/main/groovy/scripts/Utils.groovy b/buildSrc/src/main/groovy/scripts/Utils.groovy index 9a128b22d7..40a88267d3 100644 --- a/buildSrc/src/main/groovy/scripts/Utils.groovy +++ b/buildSrc/src/main/groovy/scripts/Utils.groovy @@ -14,7 +14,9 @@ * limitations under the License. */ -package scripts; +package scripts + +import org.gradle.internal.os.OperatingSystem class Utils { @@ -91,7 +93,8 @@ class Utils { def execMap = executablesPath.get(path) if (execMap == null) { execMap = [:].withDefault { execName -> - def executable = new File(path, 'bin' + File.separator + execName) + def extension = OperatingSystem.current().isWindows() ? ".exe" : "" + def executable = new File(path, 'bin' + File.separator + execName + extension) assert executable.exists(): "There is no ${execName} executable in ${path}" executable } From 898efe281786d632bea45dcbe2823f477c01a912 Mon Sep 17 00:00:00 2001 From: Henri Tremblay Date: Fri, 7 Oct 2016 12:50:55 -0400 Subject: [PATCH 061/218] Remove generic warnings --- .../store/StoreRemoveKeyValueTest.java | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyValueTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyValueTest.java index 7d16535134..0d30c1599e 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyValueTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyValueTest.java @@ -25,6 +25,7 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; /** * Test the {@link Store#remove(Object, Object)} contract of the @@ -41,7 +42,7 @@ public StoreRemoveKeyValueTest(final StoreFactory factory) { } protected Store kvStore; - protected Store kvStore2; + protected Store kvStore2; @After public void tearDown() { @@ -68,8 +69,8 @@ public void removeEntryForKeyIfMappedToValue() K equalKey = factory.createKey(1L); V equalValue = factory.createValue(1L); - assertThat(key.equals(equalKey), is(true)); - assertThat(value.equals(equalValue), is(true)); + assertThat(key, is(equalKey)); + assertThat(value, is(equalValue)); try { kvStore.remove(equalKey, equalValue); @@ -110,7 +111,7 @@ public void doNothingForWrongValue() V notEqualValue = factory.createValue(2L); - assertThat(value.equals(notEqualValue), is(false)); + assertThat(value, not(notEqualValue)); try { assertThat(kvStore.remove(key, notEqualValue), is(RemoveStatus.KEY_PRESENT)); @@ -192,7 +193,7 @@ public void nullValueThrowsException() } @SPITest - @SuppressWarnings({ "unchecked", "rawtypes" }) + @SuppressWarnings("unchecked" ) public void wrongKeyTypeThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { kvStore2 = factory.newStore(); @@ -201,9 +202,9 @@ public void wrongKeyTypeThrowsException() try { if (this.factory.getKeyType() == String.class) { - kvStore2.remove(1.0f, value); + kvStore2.remove((K) (Object) 1.0f, value); } else { - kvStore2.remove("key", value); + kvStore2.remove((K) (Object) "key", value); } throw new AssertionError("Expected ClassCastException because the key is of the wrong type"); } catch (ClassCastException e) { @@ -214,7 +215,7 @@ public void wrongKeyTypeThrowsException() } @SPITest - @SuppressWarnings({ "unchecked", "rawtypes" }) + @SuppressWarnings("unchecked") public void wrongValueTypeThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { kvStore2 = factory.newStore(); @@ -223,9 +224,9 @@ public void wrongValueTypeThrowsException() try { if (this.factory.getValueType() == String.class) { - kvStore2.remove(key, 1.0f); + kvStore2.remove(key, (V) (Object) 1.0f); } else { - kvStore2.remove(key, "value"); + kvStore2.remove(key, (V) (Object) "value"); } throw new AssertionError("Expected ClassCastException because the value is of the wrong type"); } catch (ClassCastException e) { From c7240b2e4b0b381d52c7b921c053a61400aa73cb Mon Sep 17 00:00:00 2001 From: Albin Suresh Date: Fri, 7 Oct 2016 19:10:48 +0530 Subject: [PATCH 062/218] Issue #1209 Improved test coverage --- .../clustered/sync/PassiveSyncTest.java | 93 +++++++++++++++++++ .../messages/EntityStateSyncMessage.java | 3 + .../server/EhcacheActiveEntityTest.java | 52 +++++++++++ 3 files changed, 148 insertions(+) create mode 100644 clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java new file mode 100644 index 0000000000..d2e21f8d6e --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java @@ -0,0 +1,93 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.sync; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.terracotta.testing.rules.BasicExternalCluster; +import org.terracotta.testing.rules.Cluster; + +import java.io.File; +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; +import static org.junit.Assert.assertThat; + +public class PassiveSyncTest { + private static final String RESOURCE_CONFIG = + "" + + "" + + "16" + + "" + + "\n"; + + @ClassRule + public static Cluster CLUSTER = + new BasicExternalCluster(new File("build/cluster"), 2, Collections.emptyList(), "", RESOURCE_CONFIG, ""); + + @Before + public void startServers() throws Exception { + CLUSTER.getClusterControl().waitForActive(); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + } + + @After + public void tearDown() throws Exception { + CLUSTER.getClusterControl().terminateActive(); + CLUSTER.getClusterControl().startAllServers(); + } + + @Test + public void testSync() throws Exception { + CLUSTER.getClusterControl().terminateOnePassive(); + + final CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/crud-cm")) + .autoCreate() + .defaultServerResource("primary-server-resource")); + final PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); + cacheManager.init(); + + try { + CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB))).build(); + + Cache cache = cacheManager.createCache("clustered-cache", config); + + CLUSTER.getClusterControl().startOneServer(); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + CLUSTER.getClusterControl().terminateActive(); + + cache.put(1L, "The one"); //If this doesn't throw it means that the state replication worked + } finally { + cacheManager.close(); + } + } +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/messages/EntityStateSyncMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/messages/EntityStateSyncMessage.java index 63153232fb..67537a8e6d 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/messages/EntityStateSyncMessage.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/messages/EntityStateSyncMessage.java @@ -20,11 +20,14 @@ import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; +import com.tc.classloader.CommonComponent; + import java.io.Serializable; import java.util.Map; import java.util.Set; import java.util.UUID; +@CommonComponent public class EntityStateSyncMessage extends EhcacheEntityMessage implements Serializable { private final ServerSideConfiguration configuration; diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index 739ff2365d..908475bd1d 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -35,13 +35,16 @@ import org.ehcache.clustered.common.internal.messages.LifecycleMessage; import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; +import org.ehcache.clustered.server.messages.EntityStateSyncMessage; import org.ehcache.clustered.server.state.EhcacheStateService; import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Test; +import org.mockito.ArgumentCaptor; import org.terracotta.entity.ClientCommunicator; import org.terracotta.entity.ClientDescriptor; import org.terracotta.entity.IEntityMessenger; +import org.terracotta.entity.PassiveSynchronizationChannel; import org.terracotta.entity.ServiceConfiguration; import org.terracotta.entity.ServiceRegistry; import org.terracotta.offheapresource.OffHeapResource; @@ -64,6 +67,7 @@ import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -71,6 +75,8 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class EhcacheActiveEntityTest { @@ -2584,6 +2590,52 @@ public void testCreateServerStoreWithUnknownPool() throws Exception { } } + @Test + public void testSyncToPassive() throws Exception { + final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); + registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); + registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); + + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + ClientDescriptor client = new TestClientDescriptor(); + activeEntity.connected(client); + + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() + .defaultResource("serverResource1") + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) + .build(); + + activeEntity.invoke(client, + MESSAGE_FACTORY.configureStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client, + MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client, + MESSAGE_FACTORY.createServerStore("myCache", + new ServerStoreConfigBuilder() + .shared("primary") + .build())); + + PassiveSynchronizationChannel syncChannel = mock(PassiveSynchronizationChannel.class); + activeEntity.synchronizeKeyToPassive(syncChannel, 1); + + ArgumentCaptor captor = ArgumentCaptor.forClass(EntityStateSyncMessage.class); + verify(syncChannel).synchronizeToPassive(captor.capture()); + + EntityStateSyncMessage capturedSyncMessage = captor.getValue(); + ServerSideConfiguration configuration = capturedSyncMessage.getConfiguration(); + assertThat(configuration.getDefaultServerResource(), is("serverResource1")); + assertThat(configuration.getResourcePools().keySet(), containsInAnyOrder("primary", "secondary")); + + Map storeConfigs = capturedSyncMessage.getStoreConfigs(); + assertThat(storeConfigs.keySet(), containsInAnyOrder("myCache")); + assertThat(storeConfigs.get("myCache").getPoolAllocation(), instanceOf(PoolAllocation.Shared.class)); + assertThat(capturedSyncMessage.getTrackedClients(), containsInAnyOrder(CLIENT_ID)); + + } + private void assertSuccess(EhcacheEntityResponse response) throws Exception { if (!response.equals(EhcacheEntityResponse.Success.INSTANCE)) { throw ((Failure) response).getCause(); From 2f8116d962d6a0081c9b5ad717a06fb747761f9d Mon Sep 17 00:00:00 2001 From: Abhilash Date: Tue, 11 Oct 2016 13:17:07 +0530 Subject: [PATCH 063/218] Wrong byte value for replication op #1211 --- .../messages/ClientIDTrackerMessage.java | 8 ++--- ....java => ClientIDTrackerMessageCodec.java} | 0 .../internal/messages/EhcacheCodec.java | 3 +- .../messages/EhcacheEntityMessage.java | 2 +- .../internal/messages/EhcacheCodecTest.java | 2 +- .../messages/EhcacheEntityMessageTest.java | 35 +++++++++++++++++++ 6 files changed, 43 insertions(+), 7 deletions(-) rename clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/{ClientIdTrackerMessageCodec.java => ClientIDTrackerMessageCodec.java} (100%) create mode 100644 clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessageTest.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessage.java index 495c97cc1f..b1cb45b4a7 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessage.java @@ -26,8 +26,8 @@ public class ClientIDTrackerMessage extends EhcacheEntityMessage { public enum ReplicationOp { - CHAIN_REPLICATION_OP((byte) 31), - CLIENTID_TRACK_OP((byte) 32) + CHAIN_REPLICATION_OP((byte) 41), + CLIENTID_TRACK_OP((byte) 42) ; private final byte replicationOpCode; @@ -43,9 +43,9 @@ public byte getReplicationOpCode() { public static ReplicationOp getReplicationOp(byte replicationOpCode) { switch (replicationOpCode) { - case 31: + case 41: return CHAIN_REPLICATION_OP; - case 32: + case 42: return CLIENTID_TRACK_OP; default: throw new IllegalArgumentException("Replication operation not defined for : " + replicationOpCode); diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIdTrackerMessageCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessageCodec.java similarity index 100% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIdTrackerMessageCodec.java rename to clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessageCodec.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java index 067f90716d..c5d52fd90e 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java @@ -23,6 +23,7 @@ import static org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage.Type.REPLICATION_OP; import static org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage.Type.SERVER_STORE_OP; import static org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage.Type.STATE_REPO_OP; +import static org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage.Type.SYNC_OP; public class EhcacheCodec implements MessageCodec { @@ -73,7 +74,7 @@ public EhcacheEntityMessage decodeMessage(byte[] payload) throws MessageCodecExc return serverStoreOpCodec.decode(payload); } else if (opCode <= STATE_REPO_OP.getCode()) { return stateRepositoryOpCodec.decode(payload); - } else if (opCode <= REPLICATION_OP.getCode()) { + } else if (opCode > SYNC_OP.getCode() && opCode <= REPLICATION_OP.getCode()) { return clientIDTrackerMessageCodec.decode(payload); } else { throw new UnsupportedOperationException("Undefined message code: " + opCode); diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java index d1bc2d4708..756d7b64c3 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java @@ -37,7 +37,7 @@ public enum Type { SERVER_STORE_OP((byte) 20), STATE_REPO_OP((byte) 30), SYNC_OP((byte) 40), - REPLICATION_OP((byte) 40) + REPLICATION_OP((byte) 50) ; private final byte code; diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java index a64e8bb06f..926a5db284 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java @@ -106,7 +106,7 @@ public void decodeMessage() throws Exception { verify(stateRepositoryOpCodec, times(10)).decode(payload); verify(clientIDTrackerMessageCodec, never()).decode(payload); - for (byte i = 31; i <= EhcacheEntityMessage.Type.REPLICATION_OP.getCode(); i++) { + for (byte i = 41; i <= EhcacheEntityMessage.Type.REPLICATION_OP.getCode(); i++) { payload[0] = i; codec.decodeMessage(payload); } diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessageTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessageTest.java new file mode 100644 index 0000000000..4ca522e6f0 --- /dev/null +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessageTest.java @@ -0,0 +1,35 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.junit.Test; + +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertThat; + +public class EhcacheEntityMessageTest { + + @Test + public void testEhcacheEntityMessageTypes() { + assertThat(EhcacheEntityMessage.Type.LIFECYCLE_OP.getCode(), is((byte) 10)); + assertThat(EhcacheEntityMessage.Type.SERVER_STORE_OP.getCode(), is((byte) 20)); + assertThat(EhcacheEntityMessage.Type.STATE_REPO_OP.getCode(), is((byte) 30)); + assertThat(EhcacheEntityMessage.Type.SYNC_OP.getCode(), is((byte) 40)); + assertThat(EhcacheEntityMessage.Type.REPLICATION_OP.getCode(), is((byte) 50)); + } + +} From cfff7548ef0f8e50277911458ce01cd7896ddb17 Mon Sep 17 00:00:00 2001 From: Chris Dennis Date: Fri, 7 Oct 2016 09:04:32 -0400 Subject: [PATCH 064/218] Issue #1509 ServiceLocator dependency system rework --- .../store/ClusteredStoreProviderTest.java | 32 +- .../java/org/ehcache/core/EhcacheManager.java | 36 +- .../core/internal/service/ServiceLocator.java | 747 ++++++++++-------- .../core/spi/service/ServiceFactory.java | 16 +- .../org/ehcache/core/EhcacheManagerTest.java | 2 +- .../service/ServiceLocatorPluralTest.java | 111 +-- .../internal/service/ServiceLocatorTest.java | 54 +- .../core/internal/store/StoreSupportTest.java | 27 +- .../builders/UserManagedCacheBuilder.java | 21 +- .../builders/UserManagedCacheBuilderTest.java | 2 +- .../ehcache/core/spi/ServiceProviderTest.java | 24 +- .../DefaultTimeSourceServiceTest.java | 13 +- .../disk/OffHeapDiskStoreProviderTest.java | 4 +- .../store/disk/OffHeapDiskStoreSPITest.java | 3 +- .../store/disk/OffHeapDiskStoreTest.java | 14 +- .../ByteSizedOnHeapStoreByRefSPITest.java | 3 +- .../ByteSizedOnHeapStoreByValueSPITest.java | 3 +- .../store/heap/OnHeapStoreByRefSPITest.java | 3 +- .../store/heap/OnHeapStoreByValueSPITest.java | 3 +- .../OnHeapStoreCachingTierByRefSPITest.java | 3 +- .../OnHeapStoreCachingTierByValueSPITest.java | 5 +- .../OnHeapStoreCachingTierByRefSPITest.java | 3 +- .../OnHeapStoreCachingTierByValueSPITest.java | 5 +- .../store/offheap/OffHeapStoreSPITest.java | 3 +- .../tiering/CompoundCachingTierSPITest.java | 3 +- .../TieredStoreFlushWhileShutdownTest.java | 13 +- .../store/tiering/TieredStoreSPITest.java | 9 +- .../store/tiering/TieredStoreTest.java | 3 +- .../tiering/TieredStoreWith3TiersSPITest.java | 6 +- .../integration/EhcacheBulkMethodsITest.java | 3 +- .../transactions/xa/internal/XAStoreTest.java | 15 +- 31 files changed, 642 insertions(+), 547 deletions(-) diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java index f63a5a5a45..327487a4d4 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java @@ -46,6 +46,8 @@ import java.util.HashSet; import java.util.List; import java.util.Map; + +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.startsWith; import static org.junit.Assert.*; @@ -59,12 +61,12 @@ public class ClusteredStoreProviderTest { @Test public void testRank() throws Exception { ClusteredStore.Provider provider = new ClusteredStore.Provider(); - ServiceLocator serviceLocator = new ServiceLocator( - new TieredStore.Provider(), - new OnHeapStore.Provider(), - new OffHeapStore.Provider(), - new OffHeapDiskStore.Provider(), - mock(ClusteringService.class)); + ServiceLocator serviceLocator = dependencySet() + .with(new TieredStore.Provider()) + .with(new OnHeapStore.Provider()) + .with(new OffHeapStore.Provider()) + .with(new OffHeapDiskStore.Provider()) + .with(mock(ClusteringService.class)).build(); provider.start(serviceLocator); assertRank(provider, 1, ClusteredResourceType.Types.DEDICATED); @@ -77,13 +79,13 @@ public void testRank() throws Exception { @Test public void testRankTiered() throws Exception { TieredStore.Provider provider = new TieredStore.Provider(); - ServiceLocator serviceLocator = new ServiceLocator( - provider, - new ClusteredStore.Provider(), - new OnHeapStore.Provider(), - new OffHeapStore.Provider(), - new OffHeapDiskStore.Provider(), - mock(ClusteringService.class)); + ServiceLocator serviceLocator = dependencySet() + .with(provider) + .with(new ClusteredStore.Provider()) + .with(new OnHeapStore.Provider()) + .with(new OffHeapStore.Provider()) + .with(new OffHeapDiskStore.Provider()) + .with(mock(ClusteringService.class)).build(); serviceLocator.startAllServices(); assertRank(provider, 0, ClusteredResourceType.Types.DEDICATED, ResourceType.Core.DISK); @@ -115,7 +117,7 @@ public void testRankTiered() throws Exception { @Test public void testAuthoritativeRank() throws Exception { ClusteredStore.Provider provider = new ClusteredStore.Provider(); - ServiceLocator serviceLocator = new ServiceLocator(mock(ClusteringService.class)); + ServiceLocator serviceLocator = dependencySet().with(mock(ClusteringService.class)).build(); provider.start(serviceLocator); assertThat(provider.rankAuthority(ClusteredResourceType.Types.DEDICATED, Collections.EMPTY_LIST), is(1)); @@ -212,4 +214,4 @@ public int getTierHeight() { return 10; } } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/ehcache/core/EhcacheManager.java b/core/src/main/java/org/ehcache/core/EhcacheManager.java index 141ec363de..5062b065b8 100644 --- a/core/src/main/java/org/ehcache/core/EhcacheManager.java +++ b/core/src/main/java/org/ehcache/core/EhcacheManager.java @@ -73,6 +73,8 @@ import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CopyOnWriteArrayList; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; + /** * Implementation class for the {@link org.ehcache.CacheManager} and {@link PersistentCacheManager} *

@@ -82,17 +84,6 @@ */ public class EhcacheManager implements PersistentCacheManager, InternalCacheManager { - @ServiceDependencies({ Store.Provider.class, - CacheLoaderWriterProvider.class, - WriteBehindProvider.class, - CacheEventDispatcherFactory.class, - CacheEventListenerProvider.class }) - private static class ServiceDeps { - private ServiceDeps() { - throw new UnsupportedOperationException("This is an annotation placeholder, not to be instantiated"); - } - } - private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheManager.class); private final DefaultConfiguration configuration; @@ -119,10 +110,9 @@ public EhcacheManager(Configuration config, Collection services, boolea this.simpleName = (simpleName.isEmpty() ? this.getClass().getName() : simpleName); this.configuration = new DefaultConfiguration(config); this.cacheManagerClassLoader = config.getClassLoader() != null ? config.getClassLoader() : ClassLoading.getDefaultClassLoader(); - this.serviceLocator = new ServiceLocator(services.toArray(new Service[services.size()])); this.useLoaderInAtomics = useLoaderInAtomics; validateServicesConfigs(); - resolveServices(); + this.serviceLocator = resolveServices(services); } private void validateServicesConfigs() { @@ -134,17 +124,21 @@ private void validateServicesConfigs() { } } - private void resolveServices() { - if (serviceLocator.getService(CacheManagerProviderService.class) == null) { - this.serviceLocator.addService(new DefaultCacheManagerProviderService(this)); + private ServiceLocator resolveServices(Collection services) { + ServiceLocator.DependencySet builder = dependencySet() + .with(Store.Provider.class) + .with(CacheLoaderWriterProvider.class) + .with(WriteBehindProvider.class) + .with(CacheEventDispatcherFactory.class) + .with(CacheEventListenerProvider.class) + .with(services); + if (!builder.contains(CacheManagerProviderService.class)) { + builder = builder.with(new DefaultCacheManagerProviderService(this)); } for (ServiceCreationConfiguration serviceConfig : configuration.getServiceCreationConfigurations()) { - Service service = serviceLocator.getOrCreateServiceFor(serviceConfig); - if (service == null) { - throw new IllegalArgumentException("Couldn't resolve Service " + serviceConfig.getServiceType().getName()); - } + builder = builder.with(serviceConfig); } - serviceLocator.loadDependenciesOf(ServiceDeps.class); + return builder.build(); } /** diff --git a/core/src/main/java/org/ehcache/core/internal/service/ServiceLocator.java b/core/src/main/java/org/ehcache/core/internal/service/ServiceLocator.java index b388420467..d03cbe61f6 100644 --- a/core/src/main/java/org/ehcache/core/internal/service/ServiceLocator.java +++ b/core/src/main/java/org/ehcache/core/internal/service/ServiceLocator.java @@ -16,6 +16,7 @@ package org.ehcache.core.internal.service; +import org.ehcache.config.Builder; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.spi.service.PluralService; import org.ehcache.spi.service.Service; @@ -27,175 +28,371 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Deque; +import java.util.HashMap; import java.util.HashSet; import java.util.IdentityHashMap; +import java.util.Iterator; import java.util.LinkedHashSet; +import java.util.LinkedList; import java.util.List; +import java.util.Map; import java.util.ServiceLoader; import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; +import static java.util.Collections.*; + /** * Provides discovery and tracking services for {@link Service} implementations. */ public final class ServiceLocator implements ServiceProvider { private static final Logger LOGGER = LoggerFactory.getLogger(ServiceLocator.class); - private final ConcurrentMap, Set> services = - new ConcurrentHashMap, Set>(); - - @SuppressWarnings("rawtypes") - private final ServiceLoader serviceFactory = ClassLoading.libraryServiceLoaderFor(ServiceFactory.class); + private final ServiceMap services; private final ReadWriteLock runningLock = new ReentrantReadWriteLock(); private final AtomicBoolean running = new AtomicBoolean(false); - public ServiceLocator(Service... services) { - for (Service service : services) { - addService(service); + public static DependencySet dependencySet() { + return new DependencySet(); + } + + private ServiceLocator(ServiceMap services) { + this.services = services; + } + + @Override + public T getService(Class serviceType) { + if (serviceType.isAnnotationPresent(PluralService.class)) { + throw new IllegalArgumentException(serviceType.getName() + " is marked as a PluralService"); } + final Collection registeredServices = getServicesOfType(serviceType); + if (registeredServices.size() > 1) { + throw new AssertionError("The non-PluralService type" + serviceType.getName() + + " has more than one service registered"); + } + return (registeredServices.isEmpty() ? null : registeredServices.iterator().next()); + } + + @Override + public Collection getServicesOfType(Class serviceType) { + return services.get(serviceType); } - /** - * For the {@link Service} class specified, attempt to instantiate the service using the - * {@link ServiceFactory} infrastructure. If a configuration is provided, only the first - * discovered factory is used to instantiate one copy of the service; if no configuration - * is provided, use each discovered factory for the service type to attempt to create a - * service from that factory. - * - * @param serviceClass the {@code Service} type to create - * @param config the service configuration to use; may be null - * @param the type of the {@code Service} - * - * @return the collection of created services; may be empty - * - * @throws IllegalStateException if the configured service is already registered or the configured service - * implements a {@code Service} subtype that is not marked with the {@link PluralService} annotation - * but is already registered - */ - private Collection discoverServices(Class serviceClass, ServiceCreationConfiguration config) { - final List addedServices = new ArrayList(); - for (ServiceFactory factory : ServiceLocator. getServiceFactories(serviceFactory)) { - final Class factoryServiceType = factory.getServiceType(); - if (serviceClass.isAssignableFrom(factoryServiceType)) { - if (services.containsKey(factoryServiceType)) { - // Can have only one service registered under a concrete type - continue; + public boolean knowsServiceFor(ServiceConfiguration serviceConfig) { + return services.contains(serviceConfig.getServiceType()); + } + + public void startAllServices() throws Exception { + Deque started = new LinkedList(); + final Lock lock = runningLock.writeLock(); + lock.lock(); + try { + if (!running.compareAndSet(false, true)) { + throw new IllegalStateException("Already started!"); + } + + /* + * This ensures that we start services in dependency order + */ + LinkedList unstarted = new LinkedList(services.all()); + int totalServices = unstarted.size(); + long start = System.currentTimeMillis(); + LOGGER.debug("Starting {} Services...", totalServices); + while (!unstarted.isEmpty()) { + boolean startedSomething = false; + for (Iterator it = unstarted.iterator(); it.hasNext(); ) { + Service s = it.next(); + if (hasUnstartedDependencies(s, unstarted)) { + LOGGER.trace("Delaying starting {}", s); + } else { + LOGGER.trace("Starting {}", s); + s.start(this); + started.push(s); + it.remove(); + startedSomething = true; + } } - T service = factory.create(config); - addService(service); - addedServices.add(service); - if (config != null) { - // Each configuration should be manifested in exactly one service; look no further - return addedServices; + if (startedSomething) { + LOGGER.trace("Cycle complete: " + unstarted.size() + " Services remaining"); + } else { + throw new IllegalStateException("Cyclic dependency in Service set: " + unstarted); } } + LOGGER.debug("All Services successfully started, {} Services in {}ms", totalServices, System.currentTimeMillis() - start); + } catch (Exception e) { + while(!started.isEmpty()) { + Service toBeStopped = started.pop(); + try { + toBeStopped.stop(); + } catch (Exception e1) { + LOGGER.error("Stopping Service failed due to ", e1); + } + } + throw e; + } finally { + lock.unlock(); } - return addedServices; } - @SuppressWarnings("unchecked") - private static Iterable> getServiceFactories(@SuppressWarnings("rawtypes") ServiceLoader serviceFactory) { - List> list = new ArrayList>(); - for (ServiceFactory factory : serviceFactory) { - list.add((ServiceFactory)factory); + public void stopAllServices() throws Exception { + Exception firstException = null; + Lock lock = runningLock.writeLock(); + lock.lock(); + try { + if(!running.compareAndSet(true, false)) { + throw new IllegalStateException("Already stopped!"); + } + + /* + * This ensures that we stop services in dependency order + */ + Collection running = new LinkedList(services.all()); + int totalServices = running.size(); + long start = System.currentTimeMillis(); + LOGGER.debug("Stopping {} Services...", totalServices); + while (!running.isEmpty()) { + boolean stoppedSomething = false; + for (Iterator it = running.iterator(); it.hasNext(); ) { + Service s = it.next(); + if (hasRunningDependencies(s, running)) { + LOGGER.trace("Delaying stopping {}", s); + } else { + LOGGER.trace("Stopping {}", s); + try { + s.stop(); + } catch (Exception e) { + if (firstException == null) { + firstException = e; + } else { + LOGGER.error("Stopping Service failed due to ", e); + } + } + it.remove(); + stoppedSomething = true; + } + } + if (stoppedSomething) { + LOGGER.trace("Cycle complete: " + running.size() + " Services remaining"); + } else { + throw new AssertionError("Cyclic dependency in Service set: " + running); + } + } + LOGGER.debug("All Services successfully stopped, {} Services in {}ms", totalServices, System.currentTimeMillis() - start); + } finally { + lock.unlock(); + } + if(firstException != null) { + throw firstException; } - return list; } - /** - * Registers the {@code Service} provided with this {@code ServiceLocator}. If the service is - * already registered, the registration fails. The service specified is also registered under - * each {@code Service} subtype it implements. Duplicate registration of implemented {@code Service} - * subtypes causes registration failure unless that subtype is marked with the {@link PluralService} - * annotation. - * - * @param service the concrete {@code Service} to register - * - * @throws IllegalStateException if the configured service is already registered or {@code service} - * implements a {@code Service} subtype that is not marked with the {@link PluralService} annotation - * but is already registered - */ - public void addService(final Service service) { - final Lock lock = runningLock.readLock(); - lock.lock(); - try { - Set> serviceClazzes = new HashSet>(); + private boolean hasUnstartedDependencies(Service service, Iterable unstarted) { + for (Class dep : identifyTransitiveDependenciesOf(service.getClass())) { + for (Service s : unstarted) { + if (dep.isInstance(s)) { + return true; + } + } + } + return false; + } - for (Class i : getAllInterfaces(service.getClass())) { - if (Service.class != i && Service.class.isAssignableFrom(i)) { + private boolean hasRunningDependencies(Service service, Iterable running) { + for (Class dep : identifyTransitiveDependenciesOf(service.getClass())) { + for (Service s : running) { + if (dep.isInstance(s)) { + return true; + } + } + } + return false; + } - @SuppressWarnings("unchecked") - Class serviceClass = (Class) i; + public static class DependencySet implements Builder { - serviceClazzes.add(serviceClass); + @SuppressWarnings("rawtypes") + private final ServiceLoader serviceLoader = ClassLoading.libraryServiceLoaderFor(ServiceFactory.class); + + private final ServiceMap provided = new ServiceMap(); + private final Set> requested = new HashSet>(); + + public DependencySet with(Service service) { + provided.add(service); + return this; + } + + public DependencySet with(Iterable services) { + for (Service s : services) { + with(s); + } + return this; + } + + public DependencySet with(ServiceCreationConfiguration config) { + Class serviceType = config.getServiceType(); + + //TODO : This stanza is due to the way we use configure the JSR-107 service + if (provided.contains(serviceType) && !serviceType.isAnnotationPresent(PluralService.class)) { + return this; + } + + Iterable> serviceFactories = ServiceLocator.getServiceFactories(serviceLoader); + boolean success = false; + for (ServiceFactory factory : serviceFactories) { + final Class factoryServiceType = factory.getServiceType(); + if (serviceType.isAssignableFrom(factoryServiceType)) { + with(((ServiceFactory) factory).create(config)); + success = true; } } + if (success) { + return this; + } else { + throw new IllegalStateException("No factories exist for " + serviceType); + } + } - if (services.putIfAbsent(service.getClass(), Collections.singleton(service)) != null) { - throw new IllegalStateException("Registration of duplicate service " + service.getClass()); + public DependencySet with(Class clazz) { + requested.add(clazz); + return this; + } + + public boolean contains(Class serviceClass) { + return provided.contains(serviceClass); + } + + public T providerOf(Class serviceClass) { + if (serviceClass.isAnnotationPresent(PluralService.class)) { + throw new IllegalArgumentException("Cannot retrieve single provider for plural service"); + } else { + Collection providers = providersOf(serviceClass); + switch (providers.size()) { + case 0: + return null; + case 1: + return providers.iterator().next(); + default: + throw new AssertionError(); + } } + } - /* - * Register the concrete service under all Service subtypes it implements. If - * the Service subtype is annotated with @PluralService, permit multiple registrations; - * otherwise, fail the registration, - */ - for (Class serviceClazz : serviceClazzes) { - if (serviceClazz.isAnnotationPresent(PluralService.class)) { - // Permit multiple registrations - Set registeredServices = services.get(serviceClazz); - if (registeredServices == null) { - registeredServices = new LinkedHashSet(); - services.put(serviceClazz, registeredServices); - } - registeredServices.add(service); + public Collection providersOf(Class serviceClass) { + return provided.get(serviceClass); + } - } else { - // Only a single registration permitted - if (services.putIfAbsent(serviceClazz, Collections.singleton(service)) != null) { - final StringBuilder message = new StringBuilder("Duplicate service implementation(s) found for ") - .append(service.getClass()); - for (Class serviceClass : serviceClazzes) { - if (!serviceClass.isAnnotationPresent(PluralService.class)) { - final Service declaredService = services.get(serviceClass).iterator().next(); - if (declaredService != null) { - message - .append("\n\t\t- ") - .append(serviceClass) - .append(" already has ") - .append(declaredService.getClass()); - } + @Override + public ServiceLocator build() { + try { + ServiceMap resolvedServices = new ServiceMap(); + + for (Service service : provided.all()) { + resolvedServices = lookupDependenciesOf(resolvedServices, service.getClass()).add(service); + } + + for (Class request : requested) { + if (request.isAnnotationPresent(PluralService.class)) { + try { + resolvedServices = lookupService(resolvedServices, request); + } catch (DependencyException e) { + if (!resolvedServices.contains(request)) { + throw e; } } - throw new IllegalStateException(message.toString()); + } else if (!resolvedServices.contains(request)) { + resolvedServices = lookupService(resolvedServices, request); } } + + return new ServiceLocator(resolvedServices); + } catch (DependencyException e) { + throw new IllegalStateException(e); } + } - if (running.get()) { - loadDependenciesOf(service.getClass()); - service.start(this); + ServiceMap lookupDependenciesOf(ServiceMap resolved, Class requested) throws DependencyException { + for (Class dependency : identifyImmediateDependenciesOf(requested)) { + resolved = lookupService(resolved, dependency); } - } finally { - lock.unlock(); + return resolved; + } + + private ServiceMap lookupService(ServiceMap resolved, Class requested) throws DependencyException { + //Have we already resolved this dependency? + if (resolved.contains(requested) && !requested.isAnnotationPresent(PluralService.class)) { + return resolved; + } + //Attempt resolution from the provided services + resolved = new ServiceMap(resolved).addAll(provided.get(requested)); + if (resolved.contains(requested) && !requested.isAnnotationPresent(PluralService.class)) { + return resolved; + } + Collection> serviceFactories = discoverServices(resolved, requested); + if (serviceFactories.size() > 1 && !requested.isAnnotationPresent(PluralService.class)) { + throw new DependencyException("Multiple factories for non-plural service"); + } + for(ServiceFactory factory : serviceFactories) { + if (!resolved.contains(factory.getServiceType())) { + try { + resolved = lookupDependenciesOf(resolved, factory.getServiceType()); + } catch (DependencyException e) { + continue; + } + + T service = factory.create(null); + + //we copy the service map so that if upstream dependency resolution fails we don't pollute the real resolved set + resolved = new ServiceMap(resolved).add(service); + } + } + if (resolved.contains(requested)) { + return resolved; + } else { + throw new DependencyException("Failed to find provider with satisfied dependency set for " + requested + " [candidates " + serviceFactories + "]"); + } + } + + /** + * For the {@link Service} class specified, attempt to instantiate the service using the + * {@link ServiceFactory} infrastructure. + * + * @param serviceClass the {@code Service} type to create + * @param the type of the {@code Service} + * + * @return the collection of created services; may be empty + * + * @throws IllegalStateException if the configured service is already registered or the configured service + * implements a {@code Service} subtype that is not marked with the {@link PluralService} annotation + * but is already registered + */ + private Collection> discoverServices(ServiceMap resolved, Class serviceClass) { + Collection> serviceFactories = new ArrayList>(); + for (ServiceFactory factory : ServiceLocator.getServiceFactories(serviceLoader)) { + final Class factoryServiceType = factory.getServiceType(); + if (serviceClass.isAssignableFrom(factoryServiceType) && !factory.getClass().isAnnotationPresent(ServiceFactory.RequiresConfiguration.class)) { + if (provided.contains(factoryServiceType) || resolved.contains(factoryServiceType)) { + // Can have only one service registered under a concrete type + continue; + } + serviceFactories.add((ServiceFactory) factory); + } + } + return serviceFactories; } } - private Collection> getAllInterfaces(final Class clazz) { + private static Collection> getAllInterfaces(final Class clazz) { ArrayList> interfaces = new ArrayList>(); - for(Class c = clazz; c != null; c = c.getSuperclass()) { + for (Class c = clazz; c != null; c = c.getSuperclass()) { for (Class i : c.getInterfaces()) { interfaces.add(i); interfaces.addAll(getAllInterfaces(i)); @@ -204,60 +401,55 @@ private Collection> getAllInterfaces(final Class clazz) { return interfaces; } - /** - * Obtains the service supporting the configuration provided. If a registered service - * is not available, an attempt to create the service using the {@link ServiceFactory} - * discovery process will be made. - * - * @param config the configuration for the service - * @param the expected service type - * @return the service instance for {@code T} type, or {@code null} if it couldn't be located or instantiated - * - * @throws IllegalArgumentException if {@link ServiceCreationConfiguration#getServiceType() config.getServiceType()} - * is marked with the {@link org.ehcache.spi.service.PluralService PluralService} annotation - */ - public T getOrCreateServiceFor(ServiceCreationConfiguration config) { - return getServiceInternal(config.getServiceType(), config, true); - } + private static Set> identifyImmediateDependenciesOf(final Class clazz) { + if (clazz == null) { + return emptySet(); + } - /** - * Obtains the identified service. If a registered service is not available, an attempt - * to create the service using the {@link ServiceFactory} discovery process will be made. - * - * @param serviceType the {@code class} of the service being looked up - * @param the expected service type - * @return the service instance for {@code T} type, or {@code null} if a {@code Service} of type - * {@code serviceType} is not available - * - * @throws IllegalArgumentException if {@code serviceType} is marked with the - * {@link org.ehcache.spi.service.PluralService PluralService} annotation; - * use {@link #getServicesOfType(Class)} for plural services - */ - @Override - public T getService(Class serviceType) { - return getServiceInternal(serviceType, null, false); - } + Set> dependencies = new HashSet>(); + final ServiceDependencies annotation = clazz.getAnnotation(ServiceDependencies.class); + if (annotation != null) { + for (final Class dependency : annotation.value()) { + if (Service.class.isAssignableFrom(dependency)) { + dependencies.add((Class) dependency); + } else { + throw new IllegalStateException("Service dependency declared by " + clazz.getName() + + " is not a Service: " + dependency.getName()); + } + } + } - private T getServiceInternal( - final Class serviceType, final ServiceCreationConfiguration config, final boolean shouldCreate) { - if (serviceType.isAnnotationPresent(PluralService.class)) { - throw new IllegalArgumentException(serviceType.getName() + " is marked as a PluralService"); + for (Class interfaceClazz : clazz.getInterfaces()) { + if (Service.class.isAssignableFrom(interfaceClazz)) { + dependencies.addAll(identifyImmediateDependenciesOf(Service.class.getClass().cast(interfaceClazz))); + } } - final Collection registeredServices = findServices(serviceType, config, shouldCreate); - if (registeredServices.size() > 1) { - throw new AssertionError("The non-PluralService type" + serviceType.getName() - + " has more than one service registered"); + + dependencies.addAll(identifyImmediateDependenciesOf(clazz.getSuperclass())); + + return dependencies; + } + + private static Set> identifyTransitiveDependenciesOf(final Class clazz) { + Set> transitive = new HashSet>(); + + Set> dependencies = identifyImmediateDependenciesOf(clazz); + transitive.addAll(dependencies); + + for (Class klazz : dependencies) { + transitive.addAll(identifyTransitiveDependenciesOf(klazz)); } - return (registeredServices.isEmpty() ? null : registeredServices.iterator().next()); + + return transitive; } - private Collection findServices( - Class serviceType, ServiceCreationConfiguration config, boolean shouldCreate) { - final Collection registeredServices = getServicesOfTypeInternal(serviceType); - if (shouldCreate && (registeredServices.isEmpty() || serviceType.isAnnotationPresent(PluralService.class))) { - registeredServices.addAll(discoverServices(serviceType, config)); + @SuppressWarnings("unchecked") + private static Iterable> getServiceFactories(@SuppressWarnings("rawtypes") ServiceLoader serviceFactory) { + List> list = new ArrayList>(); + for (ServiceFactory factory : serviceFactory) { + list.add((ServiceFactory)factory); } - return registeredServices; + return list; } public static Collection findAmongst(Class clazz, Collection instances) { @@ -289,186 +481,111 @@ public static T findSingletonAmongst(Class clazz, Object ... instances) { } } - public void startAllServices() throws Exception { - Deque started = new ArrayDeque(); - final Lock lock = runningLock.writeLock(); - lock.lock(); - try { - resolveMissingDependencies(); + private static class DependencyException extends Exception { + public DependencyException(String s) { + super(s); + } + } - if (!running.compareAndSet(false, true)) { - throw new IllegalStateException("Already started!"); - } + private static class ServiceMap { - for (Set registeredServices : services.values()) { - for (Service service : registeredServices) { - if (!started.contains(service)) { - service.start(this); - started.push(service); - } - } - } - LOGGER.debug("All Services successfully started."); - } catch (Exception e) { - while(!started.isEmpty()) { - Service toBeStopped = started.pop(); - try { - toBeStopped.stop(); - } catch (Exception e1) { - LOGGER.error("Stopping Service failed due to ", e1); - } + private final Map, Set> services; + + public ServiceMap(ServiceMap resolved) { + this.services = new HashMap, Set>(); + for (Map.Entry, Set> e : resolved.services.entrySet()) { + Set copy = newSetFromMap(new IdentityHashMap()); + copy.addAll(e.getValue()); + this.services.put(e.getKey(), copy); } - throw e; - } finally { - lock.unlock(); } - } - private void resolveMissingDependencies() { - for (Set registeredServices : services.values()) { - for (Service service : registeredServices) { - loadDependenciesOf(service.getClass()); - } + public ServiceMap() { + this.services = new HashMap, Set>(); } - } - public void stopAllServices() throws Exception { - Exception firstException = null; - Lock lock = runningLock.writeLock(); - lock.lock(); - try { - if(!running.compareAndSet(true, false)) { - throw new IllegalStateException("Already stopped!"); + public Set get(Class serviceType) { + Set s = services.get(serviceType); + if (s == null) { + return emptySet(); + } else { + return (Set) unmodifiableSet(s); } - Set stoppedServices = Collections.newSetFromMap(new IdentityHashMap()); - for (Set registeredServices : services.values()) { - for (Service service : registeredServices) { - if (stoppedServices.contains(service)) { - continue; - } - try { - service.stop(); - } catch (Exception e) { - if (firstException == null) { - firstException = e; - } else { - LOGGER.error("Stopping Service failed due to ", e); - } - } - stoppedServices.add(service); - } - } - } finally { - lock.unlock(); } - if(firstException != null) { - throw firstException; - } - } - /** - * Ensures the dependencies, as declared using the {@link ServiceDependencies} annotation, - * of the specified class are registered in this {@code ServiceLocator}. If a dependency - * is not registered when this method is invoked, an attempt to load it will be made using - * the {@link ServiceFactory} infrastructure. - * - * @param clazz the class for which dependency availability is checked - */ - public void loadDependenciesOf(Class clazz) { - final Collection> transitiveDependencies = identifyTransitiveDependenciesOf(clazz); - for (Class aClass : transitiveDependencies) { - if (findServices(aClass, null, true).isEmpty()) { - throw new IllegalStateException("Unable to resolve dependent service: " + aClass.getName()); + public ServiceMap addAll(Iterable services) { + for (Service s : services) { + add(s); } + return this; } - } - /** - * Identifies, transitively, all dependencies declared for the designated class through - * {@link ServiceDependencies} annotations. This method intentionally accepts - * {@code ServiceDependencies} annotations on non-{@code Service} implementations to - * permit classes like cache manager implementations to declare dependencies on - * services. All types referred to by the {@code ServiceDependencies} annotation - * must be subtypes of {@link Service}. - * - * @param clazz the top-level class instance for which the dependencies are to be determined - * - * @return the collection of declared dependencies - * - * @see #identifyTransitiveDependenciesOf(Class, Set) - */ - // Package-private for unit tests - Collection> identifyTransitiveDependenciesOf(final Class clazz) { - return identifyTransitiveDependenciesOf(clazz, new LinkedHashSet>()); - } + public ServiceMap add(Service service) { + Set> serviceClazzes = new HashSet>(); - /** - * Identifies the transitive dependencies of the designated class as declared through - * {@link ServiceDependencies} annotations. - * - * @param clazz the class to check for declared dependencies - * @param dependencies the current set of declared dependencies; this set will be added updated - * - * @return the set {@code dependencies} - * - * @see #identifyTransitiveDependenciesOf(Class) - */ - @SuppressWarnings("unchecked") - private Collection> identifyTransitiveDependenciesOf(final Class clazz, final Set> dependencies) { - if (clazz == null || clazz == Object.class) { - return dependencies; - } + serviceClazzes.add(service.getClass()); + for (Class i : getAllInterfaces(service.getClass())) { + if (Service.class != i && Service.class.isAssignableFrom(i)) { - final ServiceDependencies annotation = clazz.getAnnotation(ServiceDependencies.class); - if (annotation != null) { - for (final Class dependency : annotation.value()) { - if (!dependencies.contains(dependency)) { - if (!Service.class.isAssignableFrom(dependency)) { - throw new IllegalStateException("Service dependency declared by " + clazz.getName() + - " is not a Service: " + dependency.getName()); + @SuppressWarnings("unchecked") + Class serviceClass = (Class) i; + + serviceClazzes.add(serviceClass); + } + } + + /* + * Register the concrete service under all Service subtypes it implements. If + * the Service subtype is annotated with @PluralService, permit multiple registrations; + * otherwise, fail the registration, + */ + for (Class serviceClazz : serviceClazzes) { + if (serviceClazz.isAnnotationPresent(PluralService.class)) { + // Permit multiple registrations + Set registeredServices = services.get(serviceClazz); + if (registeredServices == null) { + registeredServices = new LinkedHashSet(); + services.put(serviceClazz, registeredServices); + } + registeredServices.add(service); + } else { + // Only a single registration permitted + Set registeredServices = services.get(serviceClazz); + if (registeredServices == null || registeredServices.isEmpty()) { + services.put(serviceClazz, singleton(service)); + } else if (!registeredServices.contains(service)) { + final StringBuilder message = new StringBuilder("Duplicate service implementation(s) found for ") + .append(service.getClass()); + for (Class serviceClass : serviceClazzes) { + if (!serviceClass.isAnnotationPresent(PluralService.class)) { + Set s = this.services.get(serviceClass); + final Service declaredService = s == null ? null : s.iterator().next(); + if (declaredService != null) { + message + .append("\n\t\t- ") + .append(serviceClass) + .append(" already has ") + .append(declaredService.getClass()); + } + } + } + throw new IllegalStateException(message.toString()); } - dependencies.add((Class) dependency); - identifyTransitiveDependenciesOf(dependency, dependencies); } } + return this; } - for (Class interfaceClazz : clazz.getInterfaces()) { - if (Service.class != interfaceClazz && Service.class.isAssignableFrom(interfaceClazz)) { - identifyTransitiveDependenciesOf(interfaceClazz, dependencies); + public Set all() { + Set all = newSetFromMap(new IdentityHashMap()); + for (Set s : services.values()) { + all.addAll(s); } + return unmodifiableSet(all); } - identifyTransitiveDependenciesOf(clazz.getSuperclass(), dependencies); - - return dependencies; - } - - public boolean knowsServiceFor(ServiceConfiguration serviceConfig) { - return !getServicesOfType(serviceConfig.getServiceType()).isEmpty(); - } - - @Override - public Collection getServicesOfType(Class serviceType) { - return getServicesOfTypeInternal(serviceType); - } - - /** - * Gets the collection of services implementing the type specified. - * - * @param serviceType the subtype of {@code Service} to return - * @param the {@code Service} subtype - * - * @return a collection, possibly empty, of the registered services implementing {@code serviceType} - */ - private Collection getServicesOfTypeInternal(final Class serviceType) { - HashSet result = new LinkedHashSet(); - final Set registeredServices = this.services.get(serviceType); - if (registeredServices != null) { - for (Service service : registeredServices) { - result.add(serviceType.cast(service)); - } + public boolean contains(Class request) { + return services.containsKey(request); } - return result; } } diff --git a/core/src/main/java/org/ehcache/core/spi/service/ServiceFactory.java b/core/src/main/java/org/ehcache/core/spi/service/ServiceFactory.java index c0ea1b2a31..df1275a7fb 100644 --- a/core/src/main/java/org/ehcache/core/spi/service/ServiceFactory.java +++ b/core/src/main/java/org/ehcache/core/spi/service/ServiceFactory.java @@ -20,6 +20,13 @@ import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceCreationConfiguration; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import static java.lang.annotation.RetentionPolicy.RUNTIME; + /** * A factory abstraction that can create {@link Service} instances. */ @@ -41,5 +48,12 @@ public interface ServiceFactory { * * @return the class of the produced service. */ - Class getServiceType(); + Class getServiceType(); + + + @Retention(RUNTIME) + @Target(ElementType.TYPE) + @interface RequiresConfiguration { + + } } diff --git a/core/src/test/java/org/ehcache/core/EhcacheManagerTest.java b/core/src/test/java/org/ehcache/core/EhcacheManagerTest.java index b0c713e498..4ee9ca9ef3 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheManagerTest.java +++ b/core/src/test/java/org/ehcache/core/EhcacheManagerTest.java @@ -152,7 +152,7 @@ public Class getServiceType() { try { new EhcacheManager(config); fail("Should have thrown..."); - } catch (IllegalArgumentException e) { + } catch (IllegalStateException e) { assertThat(e.getMessage(), containsString(NoSuchService.class.getName())); } } diff --git a/core/src/test/java/org/ehcache/core/internal/service/ServiceLocatorPluralTest.java b/core/src/test/java/org/ehcache/core/internal/service/ServiceLocatorPluralTest.java index 408297a008..b1541e1534 100644 --- a/core/src/test/java/org/ehcache/core/internal/service/ServiceLocatorPluralTest.java +++ b/core/src/test/java/org/ehcache/core/internal/service/ServiceLocatorPluralTest.java @@ -27,11 +27,12 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.everyItem; import static org.hamcrest.Matchers.isOneOf; +import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; @@ -46,27 +47,27 @@ public class ServiceLocatorPluralTest { */ @Test public void testMultipleInstanceRegistration() throws Exception { - final ServiceLocator serviceLocator = new ServiceLocator(); + final ServiceLocator.DependencySet serviceLocator = dependencySet(); final ConcreteService firstSingleton = new ConcreteService(); final ConcreteService secondSingleton = new ConcreteService(); - serviceLocator.addService(firstSingleton); + serviceLocator.with(firstSingleton); - assertThat(serviceLocator.getServicesOfType(ConcreteService.class), contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(AdditionalService.class), Matchers.contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(AggregateService.class), Matchers.contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(FooService.class), Matchers.contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(BarService.class), Matchers.contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(FoundationService.class), Matchers.contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(AugmentedService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(ConcreteService.class), contains(firstSingleton)); + assertThat(serviceLocator.providersOf(AdditionalService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(AggregateService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(FooService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(BarService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(FoundationService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(AugmentedService.class), Matchers.contains(firstSingleton)); try { - serviceLocator.addService(secondSingleton); + serviceLocator.with(secondSingleton); fail(); } catch (IllegalStateException e) { // expected - assertThat(e.getMessage(), containsString("duplicate service class " + ConcreteService.class.getName())); + assertThat(e.getMessage(), containsString(ConcreteService.class.getName())); } } @@ -76,30 +77,26 @@ public void testMultipleInstanceRegistration() throws Exception { */ @Test public void testMultipleImplementationRegistration() throws Exception { - final ServiceLocator serviceLocator = new ServiceLocator(); + final ServiceLocator.DependencySet serviceLocator = dependencySet(); final ConcreteService firstSingleton = new ConcreteService(); final ExtraConcreteService secondSingleton = new ExtraConcreteService(); - serviceLocator.addService(firstSingleton); + serviceLocator.with(firstSingleton); - assertThat(serviceLocator.getServicesOfType(ConcreteService.class), contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(AdditionalService.class), Matchers.contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(AggregateService.class), Matchers.contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(FooService.class), Matchers.contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(BarService.class), Matchers.contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(FoundationService.class), Matchers.contains(firstSingleton)); - assertThat(serviceLocator.getServicesOfType(AugmentedService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(ConcreteService.class), contains(firstSingleton)); + assertThat(serviceLocator.providersOf(AdditionalService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(AggregateService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(FooService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(BarService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(FoundationService.class), Matchers.contains(firstSingleton)); + assertThat(serviceLocator.providersOf(AugmentedService.class), Matchers.contains(firstSingleton)); try { - serviceLocator.addService(secondSingleton); + serviceLocator.with(secondSingleton); fail(); } catch (IllegalStateException e) { // expected - - // This assertion is here to point out a potentially unwanted side-effect -- a partial registration - assertThat(serviceLocator.getServicesOfType(ExtraConcreteService.class), contains(secondSingleton)); - final String message = e.getMessage(); assertThat(message, containsString(AdditionalService.class.getName())); assertThat(message, containsString(AggregateService.class.getName())); @@ -116,73 +113,29 @@ public void testMultipleImplementationRegistration() throws Exception { */ @Test public void testPluralRegistration() throws Exception { - final ServiceLocator serviceLocator = new ServiceLocator(); + final ServiceLocator.DependencySet dependencySet = dependencySet(); final AlphaServiceProviderImpl alphaServiceProvider = new AlphaServiceProviderImpl(); final BetaServiceProviderImpl betaServiceProvider = new BetaServiceProviderImpl(); - serviceLocator.addService(alphaServiceProvider); + dependencySet.with(alphaServiceProvider); - assertThat(serviceLocator.getServicesOfType(AlphaServiceProviderImpl.class), + assertThat(dependencySet.providersOf(AlphaServiceProviderImpl.class), everyItem(isOneOf(alphaServiceProvider))); - assertThat(serviceLocator.getServicesOfType(AlphaServiceProvider.class), + assertThat(dependencySet.providersOf(AlphaServiceProvider.class), everyItem(Matchers.isOneOf(alphaServiceProvider))); - assertThat(serviceLocator.getServicesOfType(PluralServiceProvider.class), + assertThat(dependencySet.providersOf(PluralServiceProvider.class), everyItem(Matchers.isOneOf(alphaServiceProvider))); - serviceLocator.addService(betaServiceProvider); + dependencySet.with(betaServiceProvider); - assertThat(serviceLocator.getServicesOfType(BetaServiceProviderImpl.class), + assertThat(dependencySet.providersOf(BetaServiceProviderImpl.class), everyItem(isOneOf(betaServiceProvider))); - assertThat(serviceLocator.getServicesOfType(BetaServiceProvider.class), + assertThat(dependencySet.providersOf(BetaServiceProvider.class), everyItem(Matchers.isOneOf(betaServiceProvider))); - assertThat(serviceLocator.getServicesOfType(PluralServiceProvider.class), + assertThat(dependencySet.providersOf(PluralServiceProvider.class), everyItem(Matchers.isOneOf(alphaServiceProvider, betaServiceProvider))); } - - /** - * Ensures dependencies declared in {@link ServiceDependencies} on a {@code Service} subtype - * can be discovered. - */ - @Test - public void testDependencyDiscoveryOverService() throws Exception { - final ServiceLocator serviceLocator = new ServiceLocator(); - - final Collection> concreteServiceDependencies = - serviceLocator.identifyTransitiveDependenciesOf(ConcreteService.class); - assertThat(concreteServiceDependencies, - everyItem(Matchers.>isOneOf( - BetaService.class, - BetaServiceProvider.class, - InitialService.class, - FooService.Provider.class, - BarService.Provider.class, - AlphaService.class, - AlphaServiceProvider.class, - FoundationService.Provider.class - ))); - } - - /** - * Ensures dependencies declared in {@link ServiceDependencies} on a non-{@code Service} type - * can be discovered. - */ - @Test - public void testDependencyDiscoveryOverNonService() throws Exception { - final ServiceLocator serviceLocator = new ServiceLocator(); - - final Collection> nonServiceDependencies = - serviceLocator.identifyTransitiveDependenciesOf(NotAService.class); - System.out.printf("NotAService dependencies : %s%n", nonServiceDependencies); - assertThat(nonServiceDependencies, - everyItem(Matchers.>isOneOf( - BetaService.class, - BetaServiceProvider.class, - AlphaService.class, - AlphaServiceProvider.class - ))); - } - } class StartStopCounter { diff --git a/core/src/test/java/org/ehcache/core/internal/service/ServiceLocatorTest.java b/core/src/test/java/org/ehcache/core/internal/service/ServiceLocatorTest.java index 1ab7da22c4..b6247263c5 100644 --- a/core/src/test/java/org/ehcache/core/internal/service/ServiceLocatorTest.java +++ b/core/src/test/java/org/ehcache/core/internal/service/ServiceLocatorTest.java @@ -37,8 +37,10 @@ import org.ehcache.core.spi.services.TestProvidedService; import org.ehcache.core.spi.services.TestService; import org.hamcrest.CoreMatchers; +import org.junit.Ignore; import org.junit.Test; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; @@ -60,14 +62,14 @@ public class ServiceLocatorTest { @Test public void testClassHierarchies() { - ServiceLocator provider = new ServiceLocator(); + ServiceLocator.DependencySet dependencySet = dependencySet(); final Service service = new ChildTestService(); - provider.addService(service); - assertThat(provider.getService(FooProvider.class), sameInstance(service)); + dependencySet.with(service); + assertThat(dependencySet.providerOf(FooProvider.class), sameInstance(service)); final Service fancyCacheProvider = new FancyCacheProvider(); - provider.addService(fancyCacheProvider); + dependencySet.with(fancyCacheProvider); - final Collection servicesOfType = provider.getServicesOfType(CacheProvider.class); + final Collection servicesOfType = dependencySet.providersOf(CacheProvider.class); assertThat(servicesOfType, is(not(empty()))); assertThat(servicesOfType.iterator().next(), sameInstance(fancyCacheProvider)); } @@ -81,8 +83,7 @@ public Enumeration getResources(String name) throws IOException { } }); - ServiceLocator serviceLocator = new ServiceLocator(); - serviceLocator.getService(TestService.class); + dependencySet().with(TestService.class).build().getService(TestService.class); } @Test @@ -90,7 +91,7 @@ public void testAttemptsToStopStartedServicesOnInitFailure() { Service s1 = new ParentTestService(); FancyCacheProvider s2 = new FancyCacheProvider(); - ServiceLocator locator = new ServiceLocator(s1, s2); + ServiceLocator locator = dependencySet().with(s1).with(s2).build(); try { locator.startAllServices(); fail(); @@ -108,7 +109,7 @@ public void testAttemptsToStopAllServicesOnCloseFailure() { Service s2 = mock(FooProvider.class); Service s3 = mock(CacheLoaderWriterProvider.class); - ServiceLocator locator = new ServiceLocator(s1, s2, s3); + ServiceLocator locator = dependencySet().with(s1).with(s2).with(s3).build(); try { locator.startAllServices(); } catch (Exception e) { @@ -132,7 +133,7 @@ public void testAttemptsToStopAllServicesOnCloseFailure() { public void testStopAllServicesOnlyStopsEachServiceOnce() throws Exception { Service s1 = mock(CacheProvider.class, withSettings().extraInterfaces(CacheLoaderWriterProvider.class)); - ServiceLocator locator = new ServiceLocator(s1); + ServiceLocator locator = dependencySet().with(s1).build(); try { locator.startAllServices(); } catch (Exception e) { @@ -145,7 +146,7 @@ public void testStopAllServicesOnlyStopsEachServiceOnce() throws Exception { @Test public void testCanOverrideDefaultServiceFromServiceLoader() { - ServiceLocator locator = new ServiceLocator(new ExtendedTestService()); + ServiceLocator locator = dependencySet().with(new ExtendedTestService()).build(); TestService testService = locator.getService(TestService.class); assertThat(testService, instanceOf(ExtendedTestService.class)); } @@ -153,8 +154,8 @@ public void testCanOverrideDefaultServiceFromServiceLoader() { @Test public void testCanOverrideServiceDependencyWithoutOrderingProblem() throws Exception { final AtomicBoolean started = new AtomicBoolean(false); - ServiceLocator serviceLocator = new ServiceLocator(new TestServiceConsumerService()); - serviceLocator.addService(new TestService() { + ServiceLocator serviceLocator = dependencySet().with(new TestServiceConsumerService()) + .with(new TestService() { @Override public void start(ServiceProvider serviceProvider) { started.set(true); @@ -164,7 +165,7 @@ public void start(ServiceProvider serviceProvider) { public void stop() { // no-op } - }); + }).build(); serviceLocator.startAllServices(); assertThat(started.get(), is(true)); } @@ -200,12 +201,12 @@ public void stop() { Consumer1 consumer1 = spy(new Consumer1()); Consumer2 consumer2 = new Consumer2(); - ServiceLocator serviceLocator = new ServiceLocator(); + ServiceLocator.DependencySet dependencySet = dependencySet(); // add some services - serviceLocator.addService(consumer1); - serviceLocator.addService(consumer2); - serviceLocator.addService(new TestService() { + dependencySet.with(consumer1); + dependencySet.with(consumer2); + dependencySet.with(new TestService() { @Override public void start(ServiceProvider serviceProvider) { } @@ -217,7 +218,8 @@ public void stop() { }); // simulate what is done in ehcachemanager - serviceLocator.loadDependenciesOf(TestServiceConsumerService.class); + dependencySet.with(TestService.class); + ServiceLocator serviceLocator = dependencySet.build(); serviceLocator.startAllServices(); serviceLocator.stopAllServices(); @@ -232,12 +234,13 @@ public void stop() { @Test public void testRedefineDefaultServiceWhileDependingOnIt() throws Exception { - ServiceLocator serviceLocator = new ServiceLocator(new YetAnotherCacheProvider()); + ServiceLocator serviceLocator = dependencySet().with(new YetAnotherCacheProvider()).build(); serviceLocator.startAllServices(); } @Test + @Ignore public void testCircularDeps() throws Exception { final class StartStopCounter { @@ -311,7 +314,7 @@ public void stop() { } } - ServiceLocator serviceLocator = new ServiceLocator(); + ServiceLocator.DependencySet dependencySet = dependencySet(); Consumer1 consumer1 = new Consumer1(); Consumer2 consumer2 = new Consumer2(); @@ -319,11 +322,12 @@ public void stop() { DependsOnMe dependsOnMe = new DependsOnMe(); // add some services - serviceLocator.addService(consumer1); - serviceLocator.addService(consumer2); - serviceLocator.addService(myTestProvidedService); - serviceLocator.addService(dependsOnMe); + dependencySet.with(consumer1); + dependencySet.with(consumer2); + dependencySet.with(myTestProvidedService); + dependencySet.with(dependsOnMe); + ServiceLocator serviceLocator = dependencySet.build(); // simulate what is done in ehcachemanager serviceLocator.startAllServices(); diff --git a/core/src/test/java/org/ehcache/core/internal/store/StoreSupportTest.java b/core/src/test/java/org/ehcache/core/internal/store/StoreSupportTest.java index 2ab77db6a3..ae223eac4e 100644 --- a/core/src/test/java/org/ehcache/core/internal/store/StoreSupportTest.java +++ b/core/src/test/java/org/ehcache/core/internal/store/StoreSupportTest.java @@ -31,6 +31,8 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; +import static java.util.Arrays.asList; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; @@ -69,14 +71,13 @@ public String toString() { public void testSelectStoreProvider() throws Exception { final TestBaseProvider expectedProvider = new PrimaryProvider1(); - final TestBaseProvider[] storeProviders = { + Collection storeProviders = asList( new SecondaryProvider1(), new ZeroProvider(), expectedProvider - }; - - final ServiceLocator serviceLocator = new ServiceLocator(storeProviders); + ); + final ServiceLocator serviceLocator = dependencySet().with(storeProviders).build(); final Store.Provider selectedProvider = StoreSupport.selectStoreProvider(serviceLocator, Collections.>singleton(anyResourceType), Collections.>emptyList()); @@ -92,15 +93,15 @@ public void testSelectStoreProvider() throws Exception { public void testSelectStoreProviderMultiple() throws Exception { final TestBaseProvider expectedProvider = new PrimaryProvider1(); - final TestBaseProvider[] storeProviders = { + final Collection storeProviders = asList( new SecondaryProvider1(), new ZeroProvider(), expectedProvider, new SecondaryProvider2(), new PrimaryProvider2() - }; + ); - final ServiceLocator serviceLocator = new ServiceLocator(storeProviders); + final ServiceLocator serviceLocator = dependencySet().with(storeProviders).build(); try { StoreSupport.selectStoreProvider(serviceLocator, @@ -119,10 +120,8 @@ public void testSelectStoreProviderMultiple() throws Exception { @Test public void testSelectStoreProviderNoProviders() throws Exception { - - final ServiceLocator serviceLocator = new ServiceLocator(); try { - StoreSupport.selectStoreProvider(serviceLocator, + StoreSupport.selectStoreProvider(dependencySet().build(), Collections.>singleton(anyResourceType), Collections.>emptyList()); fail(); @@ -154,13 +153,13 @@ public int getTierHeight() { } }; - final TestBaseProvider[] storeProviders = { + final Collection storeProviders = asList( new SecondaryProvider1(), new ZeroProvider(), new PrimaryProvider1() - }; + ); - final ServiceLocator serviceLocator = new ServiceLocator(storeProviders); + final ServiceLocator serviceLocator = dependencySet().with(storeProviders).build(); try { StoreSupport.selectStoreProvider(serviceLocator, Collections.>singleton(otherResourceType), @@ -252,4 +251,4 @@ public void stop() { throw new UnsupportedOperationException("TestBaseProvider.stop not implemented"); } } -} \ No newline at end of file +} diff --git a/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java b/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java index 9b3cbe411f..8682da4e33 100644 --- a/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java +++ b/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java @@ -79,6 +79,7 @@ import static org.ehcache.config.ResourceType.Core.DISK; import static org.ehcache.config.ResourceType.Core.OFFHEAP; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_MAX_OBJECT_SIZE; import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_OBJECT_GRAPH_SIZE; import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_UNIT; @@ -101,13 +102,6 @@ */ public class UserManagedCacheBuilder> implements Builder { - @ServiceDependencies(Store.Provider.class) - private static class ServiceDeps { - private ServiceDeps() { - throw new UnsupportedOperationException("This is an annotation placeholder, not to be instantiated"); - } - } - private static final Logger LOGGER = LoggerFactory.getLogger(UserManagedCacheBuilder.class); private static final AtomicLong instanceId = new AtomicLong(0L); @@ -169,18 +163,17 @@ private UserManagedCacheBuilder(UserManagedCacheBuilder toCopy) { this.sizeOfUnit = toCopy.sizeOfUnit; } - T build(ServiceLocator serviceLocator) throws IllegalStateException { + T build(ServiceLocator.DependencySet serviceLocatorBuilder) throws IllegalStateException { validateListenerConfig(); + ServiceLocator serviceLocator; try { for (ServiceCreationConfiguration serviceCreationConfig : serviceCreationConfigurations) { - Service service = serviceLocator.getOrCreateServiceFor(serviceCreationConfig); - if (service == null) { - throw new IllegalArgumentException("Couldn't resolve Service " + serviceCreationConfig.getServiceType().getName()); - } + serviceLocatorBuilder = serviceLocatorBuilder.with(serviceCreationConfig); } - serviceLocator.loadDependenciesOf(ServiceDeps.class); + serviceLocatorBuilder = serviceLocatorBuilder.with(Store.Provider.class); + serviceLocator = serviceLocatorBuilder.build(); serviceLocator.startAllServices(); } catch (Exception e) { throw new IllegalStateException("UserManagedCacheBuilder failed to build.", e); @@ -394,7 +387,7 @@ T cast(UserManagedCache cache) { * @throws IllegalStateException if the user managed cache cannot be built */ public final T build(final boolean init) throws IllegalStateException { - final T build = build(new ServiceLocator(services.toArray(new Service[services.size()]))); + final T build = build(dependencySet().with(services)); if (init) { build.init(); } diff --git a/impl/src/test/java/org/ehcache/config/builders/UserManagedCacheBuilderTest.java b/impl/src/test/java/org/ehcache/config/builders/UserManagedCacheBuilderTest.java index 02a2c68591..f7f97e8319 100644 --- a/impl/src/test/java/org/ehcache/config/builders/UserManagedCacheBuilderTest.java +++ b/impl/src/test/java/org/ehcache/config/builders/UserManagedCacheBuilderTest.java @@ -50,7 +50,7 @@ public void testIsExtensible() { public UserManagedCacheBuilder> builder(final UserManagedCacheBuilder> builder) { return new UserManagedCacheBuilder>(String.class, Object.class) { @Override - TestUserManagedCache build(final ServiceLocator serviceProvider) { + TestUserManagedCache build(final ServiceLocator.DependencySet dependencySet) { return new TestUserManagedCache(); } }; diff --git a/impl/src/test/java/org/ehcache/core/spi/ServiceProviderTest.java b/impl/src/test/java/org/ehcache/core/spi/ServiceProviderTest.java index ae9260bbf6..7a36576ecd 100644 --- a/impl/src/test/java/org/ehcache/core/spi/ServiceProviderTest.java +++ b/impl/src/test/java/org/ehcache/core/spi/ServiceProviderTest.java @@ -22,9 +22,12 @@ import org.ehcache.impl.internal.store.offheap.OffHeapStore; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; import org.ehcache.core.spi.store.tiering.CachingTier; +import org.hamcrest.core.IsCollectionContaining; import org.hamcrest.core.IsSame; import org.junit.Test; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.hamcrest.core.IsCollectionContaining.hasItem; import static org.junit.Assert.assertThat; /** @@ -35,23 +38,24 @@ public class ServiceProviderTest { @Test public void testSupportsMultipleAuthoritativeTierProviders() throws Exception { - ServiceLocator serviceLocator = new ServiceLocator(); + ServiceLocator.DependencySet dependencySet = dependencySet(); OnHeapStore.Provider cachingTierProvider = new OnHeapStore.Provider(); OffHeapStore.Provider authoritativeTierProvider = new OffHeapStore.Provider(); OffHeapDiskStore.Provider diskStoreProvider = new OffHeapDiskStore.Provider(); - serviceLocator.addService(cachingTierProvider); - serviceLocator.addService(authoritativeTierProvider); - serviceLocator.addService(diskStoreProvider); + dependencySet.with(cachingTierProvider); + dependencySet.with(authoritativeTierProvider); + dependencySet.with(diskStoreProvider); + ServiceLocator serviceLocator = dependencySet.build(); serviceLocator.startAllServices(); - assertThat(serviceLocator.getServicesOfType(CachingTier.Provider.class).iterator().next(), - IsSame.sameInstance(cachingTierProvider)); - assertThat(serviceLocator.getServicesOfType(AuthoritativeTier.Provider.class).iterator().next(), - IsSame.sameInstance(authoritativeTierProvider)); - assertThat(serviceLocator.getServicesOfType(diskStoreProvider.getClass()).iterator().next(), - IsSame.sameInstance(diskStoreProvider)); + assertThat(serviceLocator.getServicesOfType(CachingTier.Provider.class), + IsCollectionContaining.hasItem(IsSame.sameInstance(cachingTierProvider))); + assertThat(serviceLocator.getServicesOfType(AuthoritativeTier.Provider.class), + IsCollectionContaining.hasItem(IsSame.sameInstance(authoritativeTierProvider))); + assertThat(serviceLocator.getServicesOfType((Class) diskStoreProvider.getClass()), + IsCollectionContaining.hasItem(IsSame.sameInstance(diskStoreProvider))); } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/DefaultTimeSourceServiceTest.java b/impl/src/test/java/org/ehcache/impl/internal/DefaultTimeSourceServiceTest.java index f102d03379..7a3d6d7a49 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/DefaultTimeSourceServiceTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/DefaultTimeSourceServiceTest.java @@ -20,9 +20,11 @@ import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.time.TimeSourceService; import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceDependencies; import org.junit.Test; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.hamcrest.CoreMatchers.sameInstance; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; @@ -30,13 +32,12 @@ /** * DefaultTimeSourceServiceTest */ -@ServiceDependencies(TimeSourceService.class) public class DefaultTimeSourceServiceTest { @Test public void testResolvesDefaultTimeSource() { - ServiceLocator serviceLocator = new ServiceLocator(); - serviceLocator.loadDependenciesOf(this.getClass()); + ServiceLocator.DependencySet dependencySet = dependencySet().with(TimeSourceService.class); + ServiceLocator serviceLocator = dependencySet.build(); assertThat(serviceLocator.getService(TimeSourceService.class).getTimeSource(), sameInstance(SystemTimeSource.INSTANCE)); } @@ -44,9 +45,9 @@ public void testResolvesDefaultTimeSource() { @Test public void testCanConfigureAlternateTimeSource() { TimeSource timeSource = mock(TimeSource.class); - ServiceLocator serviceLocator = new ServiceLocator(); - TimeSourceService timeSourceService = serviceLocator.getOrCreateServiceFor(new TimeSourceConfiguration(timeSource)); + ServiceLocator serviceLocator = dependencySet().with(new TimeSourceConfiguration(timeSource)).build(); + TimeSourceService timeSourceService = serviceLocator.getService(TimeSourceService.class); assertThat(timeSourceService.getTimeSource(), sameInstance(timeSource)); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java index 03b5c15677..813ba422d1 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java @@ -46,6 +46,7 @@ import java.util.Set; import static java.util.Collections.singleton; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.Matchers.empty; import static org.junit.Assert.assertThat; @@ -64,7 +65,8 @@ public class OffHeapDiskStoreProviderTest { public void testStatisticsAssociations() throws Exception { OffHeapDiskStore.Provider provider = new OffHeapDiskStore.Provider(); - ServiceLocator serviceLocator = new ServiceLocator(mock(SerializationProvider.class), new DefaultTimeSourceService(null), mock(DiskResourceService.class)); + ServiceLocator serviceLocator = dependencySet().with(mock(SerializationProvider.class)) + .with(new DefaultTimeSourceService(null)).with(mock(DiskResourceService.class)).build(); provider.start(serviceLocator); OffHeapDiskStore store = provider.createStore(getStoreConfig(), mock(PersistableResourceService.PersistenceSpaceIdentifier.class)); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreSPITest.java index e572f73c4b..8792d687b1 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreSPITest.java @@ -55,6 +55,7 @@ import static org.ehcache.config.ResourceType.Core.DISK; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -165,7 +166,7 @@ public ServiceConfiguration[] getServiceConfigurations() { @Override public ServiceLocator getServiceProvider() { - ServiceLocator serviceLocator = new ServiceLocator(); + ServiceLocator serviceLocator = dependencySet().build(); try { serviceLocator.startAllServices(); } catch (Exception e) { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java index 0a6e8c5dca..0d91a8dc43 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java @@ -78,6 +78,7 @@ import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.ehcache.config.units.MemoryUnit.MB; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.ehcache.expiry.Expirations.noExpiration; import static org.ehcache.impl.internal.spi.TestServiceProvider.providerContaining; import static org.hamcrest.CoreMatchers.containsString; @@ -119,9 +120,7 @@ public void testRecovery() throws StoreAccessException, IOException { @Test public void testRecoveryFailureWhenValueTypeChangesToIncompatibleClass() throws Exception { OffHeapDiskStore.Provider provider = new OffHeapDiskStore.Provider(); - ServiceLocator serviceLocator = new ServiceLocator(); - serviceLocator.addService(diskResourceService); - serviceLocator.addService(provider); + ServiceLocator serviceLocator = dependencySet().with(diskResourceService).with(provider).build(); serviceLocator.startAllServices(); CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); @@ -169,9 +168,7 @@ public void testRecoveryFailureWhenValueTypeChangesToIncompatibleClass() throws @Test public void testRecoveryWithArrayType() throws Exception { OffHeapDiskStore.Provider provider = new OffHeapDiskStore.Provider(); - ServiceLocator serviceLocator = new ServiceLocator(); - serviceLocator.addService(diskResourceService); - serviceLocator.addService(provider); + ServiceLocator serviceLocator = dependencySet().with(diskResourceService).with(provider).build(); serviceLocator.startAllServices(); CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); @@ -269,8 +266,7 @@ protected void destroyStore(AbstractOffHeapStore store) { @Test public void testStoreInitFailsWithoutLocalPersistenceService() throws Exception { OffHeapDiskStore.Provider provider = new OffHeapDiskStore.Provider(); - ServiceLocator serviceLocator = new ServiceLocator(); - serviceLocator.addService(provider); + ServiceLocator serviceLocator = dependencySet().with(provider).build(); serviceLocator.startAllServices(); Store.Configuration storeConfig = mock(Store.Configuration.class); when(storeConfig.getKeyType()).thenReturn(String.class); @@ -454,4 +450,4 @@ public boolean equals(Object o) { } } } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByRefSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByRefSPITest.java index 13de6b7c7f..1a1118d54b 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByRefSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByRefSPITest.java @@ -37,6 +37,7 @@ import org.junit.Before; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; public class ByteSizedOnHeapStoreByRefSPITest extends StoreSPITest { @@ -127,7 +128,7 @@ public void close(final Store store) { @Override public ServiceLocator getServiceProvider() { - ServiceLocator locator = new ServiceLocator(); + ServiceLocator locator = dependencySet().build(); try { locator.startAllServices(); } catch (Exception e) { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByValueSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByValueSPITest.java index 73a22f70b8..fd1ae624de 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByValueSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByValueSPITest.java @@ -40,6 +40,7 @@ import static java.lang.ClassLoader.getSystemClassLoader; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; public class ByteSizedOnHeapStoreByValueSPITest extends StoreSPITest { @@ -133,7 +134,7 @@ public void close(final Store store) { @Override public ServiceLocator getServiceProvider() { - ServiceLocator serviceLocator = new ServiceLocator(); + ServiceLocator serviceLocator = dependencySet().build(); try { serviceLocator.startAllServices(); } catch (Exception e) { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefSPITest.java index 35c1e12790..41d4f0d10c 100755 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefSPITest.java @@ -37,6 +37,7 @@ import org.junit.Before; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; /** * Test the {@link org.ehcache.internal.store.heap.OnHeapStore} compliance to the @@ -132,7 +133,7 @@ public void close(final Store store) { @Override public ServiceLocator getServiceProvider() { - ServiceLocator locator = new ServiceLocator(); + ServiceLocator locator = dependencySet().build(); try { locator.startAllServices(); } catch (Exception e) { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueSPITest.java index 590f70f55a..53ea429bdb 100755 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByValueSPITest.java @@ -40,6 +40,7 @@ import static java.lang.ClassLoader.getSystemClassLoader; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; /** * Test the {@link OnHeapStore} compliance to the @@ -138,7 +139,7 @@ public void close(final Store store) { @Override public ServiceLocator getServiceProvider() { - ServiceLocator serviceLocator = new ServiceLocator(); + ServiceLocator serviceLocator = dependencySet().build(); try { serviceLocator.startAllServices(); } catch (Exception e) { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByRefSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByRefSPITest.java index dc0e2ab845..7251a473dc 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByRefSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByRefSPITest.java @@ -37,6 +37,7 @@ import org.junit.Before; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; /** * This factory instantiates a CachingTier @@ -124,7 +125,7 @@ public void disposeOf(CachingTier tier) { @Override public ServiceProvider getServiceProvider() { - return new ServiceLocator(); + return dependencySet().build(); } }; diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByValueSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByValueSPITest.java index a4953e076c..b885d7c965 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByValueSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByValueSPITest.java @@ -40,6 +40,7 @@ import static java.lang.ClassLoader.getSystemClassLoader; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; /** * This factory instantiates a CachingTier @@ -88,7 +89,7 @@ public Store.ValueHolder newValueHolder(final String value) { @Override public Store.Provider newProvider() { Store.Provider service = new OnHeapStore.Provider(); - service.start(new ServiceLocator()); + service.start(dependencySet().build()); return service; } @@ -131,7 +132,7 @@ public void disposeOf(CachingTier tier) { @Override public ServiceProvider getServiceProvider() { - return new ServiceLocator(); + return dependencySet().build(); } }; diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByRefSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByRefSPITest.java index 4010fa228d..c20790d29f 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByRefSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByRefSPITest.java @@ -40,6 +40,7 @@ import java.util.Arrays; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; public class OnHeapStoreCachingTierByRefSPITest extends CachingTierSPITest { @@ -124,7 +125,7 @@ public void disposeOf(CachingTier tier) { @Override public ServiceProvider getServiceProvider() { - return new ServiceLocator(); + return dependencySet().build(); } }; diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByValueSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByValueSPITest.java index b2567b3b1c..6801d09857 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByValueSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByValueSPITest.java @@ -43,6 +43,7 @@ import static java.lang.ClassLoader.getSystemClassLoader; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; public class OnHeapStoreCachingTierByValueSPITest extends CachingTierSPITest { @@ -86,7 +87,7 @@ public Store.ValueHolder newValueHolder(final String value) { @Override public Store.Provider newProvider() { Store.Provider service = new OnHeapStore.Provider(); - service.start(new ServiceLocator()); + service.start(dependencySet().build()); return service; } @@ -130,7 +131,7 @@ public void disposeOf(CachingTier tier) { @Override public ServiceProvider getServiceProvider() { - return new ServiceLocator(); + return dependencySet().build(); } }; diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreSPITest.java index 87f2c466b9..f4f6b5f0c9 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreSPITest.java @@ -41,6 +41,7 @@ import java.util.Arrays; import static org.ehcache.config.ResourceType.Core.OFFHEAP; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; /** * OffHeapStoreSPITest @@ -117,7 +118,7 @@ public ServiceConfiguration[] getServiceConfigurations() { @Override public ServiceLocator getServiceProvider() { - ServiceLocator serviceLocator = new ServiceLocator(); + ServiceLocator serviceLocator = dependencySet().build(); try { serviceLocator.startAllServices(); } catch (Exception e) { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierSPITest.java index 4ebb16df63..8775583452 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierSPITest.java @@ -45,6 +45,7 @@ import static java.lang.ClassLoader.getSystemClassLoader; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; /** * This factory instantiates a CachingTier @@ -140,7 +141,7 @@ public void disposeOf(CachingTier tier) { @Override public ServiceProvider getServiceProvider() { - return new ServiceLocator(); + return dependencySet().build(); } }; diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreFlushWhileShutdownTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreFlushWhileShutdownTest.java index c2b0ea1b4d..fefb425603 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreFlushWhileShutdownTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreFlushWhileShutdownTest.java @@ -43,6 +43,7 @@ import java.io.Serializable; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; @@ -151,11 +152,11 @@ private ServiceLocator getServiceLocator(File location) throws Exception { DefaultPersistenceConfiguration persistenceConfiguration = new DefaultPersistenceConfiguration(location); DefaultLocalPersistenceService fileService = new DefaultLocalPersistenceService(persistenceConfiguration); DefaultDiskResourceService diskResourceService = new DefaultDiskResourceService(); - ServiceLocator serviceLocator = new ServiceLocator(); - serviceLocator.addService(fileService); - serviceLocator.addService(diskResourceService); - serviceLocator.addService(new OnHeapStore.Provider()); - serviceLocator.addService(new OffHeapDiskStore.Provider()); - return serviceLocator; + ServiceLocator.DependencySet dependencySet = dependencySet(); + dependencySet.with(fileService); + dependencySet.with(diskResourceService); + dependencySet.with(new OnHeapStore.Provider()); + dependencySet.with(new OffHeapDiskStore.Provider()); + return dependencySet.build(); } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreSPITest.java index 1809fc5bb0..4245372142 100755 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreSPITest.java @@ -69,6 +69,7 @@ import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.ehcache.config.units.MemoryUnit.MB; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -304,10 +305,10 @@ public void close(final Store store) { @Override public ServiceProvider getServiceProvider() { - ServiceLocator serviceLocator = new ServiceLocator(); - serviceLocator.addService(new FakeCachingTierProvider()); - serviceLocator.addService(new FakeAuthoritativeTierProvider()); - return serviceLocator; + ServiceLocator.DependencySet dependencySet = dependencySet(); + dependencySet.with(new FakeCachingTierProvider()); + dependencySet.with(new FakeAuthoritativeTierProvider()); + return dependencySet.build(); } }; } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java index 6c1eabd7b1..6f34386252 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java @@ -58,6 +58,7 @@ import java.util.concurrent.TimeUnit; import static java.util.Collections.singleton; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.Is.is; @@ -596,7 +597,7 @@ public void testReleaseStoreFlushes() throws Exception { @Test public void testRank() throws Exception { TieredStore.Provider provider = new TieredStore.Provider(); - ServiceLocator serviceLocator = new ServiceLocator(provider); + ServiceLocator serviceLocator = dependencySet().with(provider).build(); serviceLocator.startAllServices(); assertRank(provider, 0, ResourceType.Core.DISK); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreWith3TiersSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreWith3TiersSPITest.java index 961154fe87..d9da7f8441 100755 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreWith3TiersSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreWith3TiersSPITest.java @@ -71,6 +71,7 @@ import java.util.concurrent.atomic.AtomicInteger; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -318,9 +319,8 @@ public void close(final Store store) { @Override public ServiceProvider getServiceProvider() { - ServiceLocator serviceLocator = new ServiceLocator(); - serviceLocator.addService(new FakeCachingTierProvider()); - serviceLocator.addService(new FakeAuthoritativeTierProvider()); + ServiceLocator serviceLocator = dependencySet().with(new FakeCachingTierProvider()) + .with(new FakeAuthoritativeTierProvider()).build(); return serviceLocator; } }; diff --git a/integration-test/src/test/java/org/ehcache/integration/EhcacheBulkMethodsITest.java b/integration-test/src/test/java/org/ehcache/integration/EhcacheBulkMethodsITest.java index 972551946c..511d6b0505 100644 --- a/integration-test/src/test/java/org/ehcache/integration/EhcacheBulkMethodsITest.java +++ b/integration-test/src/test/java/org/ehcache/integration/EhcacheBulkMethodsITest.java @@ -50,6 +50,7 @@ import java.util.Set; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsCollectionContaining.hasItem; @@ -525,7 +526,7 @@ public int rank(final Set> resourceTypes, final Collection Store createStore(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { - ServiceLocator serviceLocator = new ServiceLocator(new DefaultSerializationProvider(null)); + ServiceLocator serviceLocator = dependencySet().with(new DefaultSerializationProvider(null)).build(); try { serviceLocator.startAllServices(); } catch (Exception e) { diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreTest.java b/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreTest.java index af8b4fe871..63720b5c9d 100644 --- a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreTest.java +++ b/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreTest.java @@ -95,6 +95,7 @@ import javax.transaction.xa.XAResource; import static java.util.Collections.emptySet; +import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -1540,13 +1541,13 @@ public boolean adviseAgainstEviction(Long key, SoftLock value) { public void testRank() throws Exception { XAStore.Provider provider = new XAStore.Provider(); XAStoreConfiguration configuration = new XAStoreConfiguration("testXAResourceId"); - ServiceLocator serviceLocator = new ServiceLocator( - provider, - new TieredStore.Provider(), - new OnHeapStore.Provider(), - new OffHeapStore.Provider(), - new OffHeapDiskStore.Provider(), - mock(TransactionManagerProvider.class)); + ServiceLocator serviceLocator = dependencySet() + .with(provider) + .with(new TieredStore.Provider()) + .with(new OnHeapStore.Provider()) + .with(new OffHeapStore.Provider()) + .with(new OffHeapDiskStore.Provider()) + .with(mock(TransactionManagerProvider.class)).build(); serviceLocator.startAllServices(); From 5c4f110f3d67e97147f73d196f7dec20a24c272c Mon Sep 17 00:00:00 2001 From: Chris Dennis Date: Mon, 10 Oct 2016 18:44:55 -0400 Subject: [PATCH 065/218] Issue #1509 Cleanup service dependencies and service retrieval --- .../internal/service/ClusteringServiceFactory.java | 1 + .../internal/service/DefaultClusteringService.java | 1 - .../client/internal/store/ClusteredStore.java | 2 ++ .../internal/store/ClusteredStoreProviderTest.java | 3 +++ .../core/spi/store/tiering/HigherCachingTier.java | 2 ++ .../core/spi/store/tiering/LowerCachingTier.java | 2 ++ ...NotificationListenerServiceProviderFactory.java | 4 ++-- .../DefaultDiskResourceServiceFactory.java | 10 +++++----- .../DefaultLocalPersistenceServiceFactory.java | 1 + .../store/tiering/CompoundCachingTier.java | 14 +++++++++++--- .../impl/internal/store/tiering/TieredStore.java | 3 +-- .../persistence/DefaultDiskResourceService.java | 2 ++ .../org/ehcache/core/spi/ServiceProviderTest.java | 3 +++ .../internal/store/disk/OffHeapDiskStoreTest.java | 14 +++----------- .../internal/store/tiering/TieredStoreTest.java | 3 ++- .../registry/DefaultManagementRegistryService.java | 3 +-- .../ehcache/transactions/xa/internal/XAStore.java | 2 +- .../internal/journal/DefaultJournalProvider.java | 2 ++ .../DefaultTransactionManagerProviderFactory.java | 1 + .../provider/LookupTransactionManagerProvider.java | 1 - .../transactions/xa/internal/XAStoreTest.java | 7 +++---- 21 files changed, 48 insertions(+), 33 deletions(-) diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteringServiceFactory.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteringServiceFactory.java index fb12d4c83c..d835ae6407 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteringServiceFactory.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteringServiceFactory.java @@ -27,6 +27,7 @@ * * @author Clifford W. Johnson */ +@ServiceFactory.RequiresConfiguration public class ClusteringServiceFactory implements ServiceFactory { @Override diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java index 18f0f83320..615a531f72 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java @@ -68,7 +68,6 @@ /** * Provides support for accessing server-based cluster services. */ -@ServiceDependencies(ClusteredStore.Provider.class) class DefaultClusteringService implements ClusteringService, EntityService { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultClusteringService.class); diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java index 72da95ff32..4ac6b504d3 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java @@ -57,6 +57,7 @@ import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.serialization.StatefulSerializer; +import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; @@ -545,6 +546,7 @@ public void setInvalidationValve(InvalidationValve valve) { /** * Provider of {@link ClusteredStore} instances. */ + @ServiceDependencies({TimeSourceService.class, ClusteringService.class}) public static class Provider implements Store.Provider, AuthoritativeTier.Provider { private static final Logger LOGGER = LoggerFactory.getLogger(Provider.class); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java index 327487a4d4..69dc50fc9a 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java @@ -28,6 +28,7 @@ import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.config.ResourcePoolsImpl; import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.core.spi.store.Store; import org.ehcache.expiry.Expirations; import org.ehcache.expiry.Expiry; @@ -65,6 +66,7 @@ public void testRank() throws Exception { .with(new TieredStore.Provider()) .with(new OnHeapStore.Provider()) .with(new OffHeapStore.Provider()) + .with(mock(DiskResourceService.class)) .with(new OffHeapDiskStore.Provider()) .with(mock(ClusteringService.class)).build(); provider.start(serviceLocator); @@ -85,6 +87,7 @@ public void testRankTiered() throws Exception { .with(new OnHeapStore.Provider()) .with(new OffHeapStore.Provider()) .with(new OffHeapDiskStore.Provider()) + .with(mock(DiskResourceService.class)) .with(mock(ClusteringService.class)).build(); serviceLocator.startAllServices(); diff --git a/core/src/main/java/org/ehcache/core/spi/store/tiering/HigherCachingTier.java b/core/src/main/java/org/ehcache/core/spi/store/tiering/HigherCachingTier.java index d37b7c26bd..c20ef8bc5e 100644 --- a/core/src/main/java/org/ehcache/core/spi/store/tiering/HigherCachingTier.java +++ b/core/src/main/java/org/ehcache/core/spi/store/tiering/HigherCachingTier.java @@ -20,6 +20,7 @@ import org.ehcache.core.spi.store.StoreAccessException; import org.ehcache.core.spi.function.Function; import org.ehcache.core.spi.store.Store; +import org.ehcache.spi.service.PluralService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; @@ -62,6 +63,7 @@ public interface HigherCachingTier extends CachingTier { /** * {@link Service} interface for providing {@link HigherCachingTier} instances. */ + @PluralService interface Provider extends Service { /** diff --git a/core/src/main/java/org/ehcache/core/spi/store/tiering/LowerCachingTier.java b/core/src/main/java/org/ehcache/core/spi/store/tiering/LowerCachingTier.java index 666a170817..ab3d169233 100644 --- a/core/src/main/java/org/ehcache/core/spi/store/tiering/LowerCachingTier.java +++ b/core/src/main/java/org/ehcache/core/spi/store/tiering/LowerCachingTier.java @@ -20,6 +20,7 @@ import org.ehcache.core.spi.store.ConfigurationChangeSupport; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.spi.service.PluralService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; @@ -100,6 +101,7 @@ public interface LowerCachingTier extends ConfigurationChangeSupport { /** * {@link Service} interface for providing {@link LowerCachingTier} instances. */ + @PluralService interface Provider extends Service { /** diff --git a/impl/src/main/java/org/ehcache/impl/internal/events/CacheEventNotificationListenerServiceProviderFactory.java b/impl/src/main/java/org/ehcache/impl/internal/events/CacheEventNotificationListenerServiceProviderFactory.java index e9fea8a845..acb05eae07 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/events/CacheEventNotificationListenerServiceProviderFactory.java +++ b/impl/src/main/java/org/ehcache/impl/internal/events/CacheEventNotificationListenerServiceProviderFactory.java @@ -35,7 +35,7 @@ public CacheEventDispatcherFactory create(ServiceCreationConfiguration getServiceType() { - return CacheEventDispatcherFactory.class; + public Class getServiceType() { + return CacheEventDispatcherFactoryImpl.class; } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultDiskResourceServiceFactory.java b/impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultDiskResourceServiceFactory.java index 0448e83ee0..46a6c24d4f 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultDiskResourceServiceFactory.java +++ b/impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultDiskResourceServiceFactory.java @@ -21,15 +21,15 @@ import org.ehcache.impl.persistence.DefaultDiskResourceService; import org.ehcache.spi.service.ServiceCreationConfiguration; -public class DefaultDiskResourceServiceFactory implements ServiceFactory { +public class DefaultDiskResourceServiceFactory implements ServiceFactory { @Override - public DefaultDiskResourceService create(final ServiceCreationConfiguration serviceConfiguration) { + public DefaultDiskResourceService create(final ServiceCreationConfiguration serviceConfiguration) { return new DefaultDiskResourceService(); } @Override - public Class getServiceType() { - return DiskResourceService.class; + public Class getServiceType() { + return DefaultDiskResourceService.class; } -} \ No newline at end of file +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultLocalPersistenceServiceFactory.java b/impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultLocalPersistenceServiceFactory.java index 0b7b4fcc99..55ae580bc2 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultLocalPersistenceServiceFactory.java +++ b/impl/src/main/java/org/ehcache/impl/internal/persistence/DefaultLocalPersistenceServiceFactory.java @@ -25,6 +25,7 @@ /** * @author Alex Snaps */ +@ServiceFactory.RequiresConfiguration public class DefaultLocalPersistenceServiceFactory implements ServiceFactory { @Override diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTier.java b/impl/src/main/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTier.java index 76a45f040f..d74e3cc031 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTier.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTier.java @@ -211,7 +211,7 @@ public List getConfigurationChangeListeners() } - @ServiceDependencies({OnHeapStore.Provider.class, OffHeapStore.Provider.class}) + @ServiceDependencies({HigherCachingTier.Provider.class, LowerCachingTier.Provider.class}) public static class Provider implements CachingTier.Provider { private volatile ServiceProvider serviceProvider; private final ConcurrentMap, Map.Entry> providersMap = new ConcurrentWeakIdentityHashMap, Map.Entry>(); @@ -222,10 +222,18 @@ public CachingTier createCachingTier(Store.Configuration stor throw new RuntimeException("ServiceProvider is null."); } - HigherCachingTier.Provider higherProvider = serviceProvider.getService(HigherCachingTier.Provider.class); + Collection higherProviders = serviceProvider.getServicesOfType(HigherCachingTier.Provider.class); + if (higherProviders.size() != 1) { + throw new IllegalStateException("Cannot handle multiple higher tier providers"); + } + HigherCachingTier.Provider higherProvider = higherProviders.iterator().next(); HigherCachingTier higherCachingTier = higherProvider.createHigherCachingTier(storeConfig, serviceConfigs); - LowerCachingTier.Provider lowerProvider = serviceProvider.getService(LowerCachingTier.Provider.class); + Collection lowerProviders = serviceProvider.getServicesOfType(LowerCachingTier.Provider.class); + if (lowerProviders.size() != 1) { + throw new IllegalStateException("Cannot handle multiple lower tier providers"); + } + LowerCachingTier.Provider lowerProvider = lowerProviders.iterator().next(); LowerCachingTier lowerCachingTier = lowerProvider.createCachingTier(storeConfig, serviceConfigs); CompoundCachingTier compoundCachingTier = new CompoundCachingTier(higherCachingTier, lowerCachingTier); diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java index adf31eb0f2..1db1140ca8 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java @@ -356,8 +356,7 @@ private CachingTier cachingTier() { return cachingTierRef.get(); } - @ServiceDependencies({CompoundCachingTier.Provider.class, - OnHeapStore.Provider.class, OffHeapStore.Provider.class, OffHeapDiskStore.Provider.class}) + @ServiceDependencies({CachingTier.Provider.class, AuthoritativeTier.Provider.class}) public static class Provider implements Store.Provider { private volatile ServiceProvider serviceProvider; diff --git a/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java b/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java index f7b9d73a9d..04facbab5d 100644 --- a/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java +++ b/impl/src/main/java/org/ehcache/impl/persistence/DefaultDiskResourceService.java @@ -27,6 +27,7 @@ import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.service.MaintainableService; import org.ehcache.spi.service.Service; +import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.spi.service.ServiceProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -40,6 +41,7 @@ * Default implementation of the {@link DiskResourceService} which can be used explicitly when * {@link org.ehcache.PersistentUserManagedCache persistent user managed caches} are desired. */ +@ServiceDependencies(LocalPersistenceService.class) public class DefaultDiskResourceService implements DiskResourceService { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultDiskResourceService.class); static final String PERSISTENCE_SPACE_OWNER = "file"; diff --git a/impl/src/test/java/org/ehcache/core/spi/ServiceProviderTest.java b/impl/src/test/java/org/ehcache/core/spi/ServiceProviderTest.java index 7a36576ecd..5ddc9a77d2 100644 --- a/impl/src/test/java/org/ehcache/core/spi/ServiceProviderTest.java +++ b/impl/src/test/java/org/ehcache/core/spi/ServiceProviderTest.java @@ -17,6 +17,7 @@ package org.ehcache.core.spi; import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.impl.internal.store.disk.OffHeapDiskStore; import org.ehcache.impl.internal.store.heap.OnHeapStore; import org.ehcache.impl.internal.store.offheap.OffHeapStore; @@ -29,6 +30,7 @@ import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; import static org.hamcrest.core.IsCollectionContaining.hasItem; import static org.junit.Assert.assertThat; +import static org.mockito.Mockito.mock; /** * @@ -47,6 +49,7 @@ public void testSupportsMultipleAuthoritativeTierProviders() throws Exception { dependencySet.with(cachingTierProvider); dependencySet.with(authoritativeTierProvider); dependencySet.with(diskStoreProvider); + dependencySet.with(mock(DiskResourceService.class)); ServiceLocator serviceLocator = dependencySet.build(); serviceLocator.startAllServices(); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java index 0d91a8dc43..6fc0e9e9c3 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java @@ -266,20 +266,12 @@ protected void destroyStore(AbstractOffHeapStore store) { @Test public void testStoreInitFailsWithoutLocalPersistenceService() throws Exception { OffHeapDiskStore.Provider provider = new OffHeapDiskStore.Provider(); - ServiceLocator serviceLocator = dependencySet().with(provider).build(); - serviceLocator.startAllServices(); - Store.Configuration storeConfig = mock(Store.Configuration.class); - when(storeConfig.getKeyType()).thenReturn(String.class); - when(storeConfig.getValueType()).thenReturn(String.class); - when(storeConfig.getResourcePools()).thenReturn(ResourcePoolsBuilder.newResourcePoolsBuilder() - .disk(10, MB) - .build()); - when(storeConfig.getDispatcherConcurrency()).thenReturn(1); try { - provider.createStore(storeConfig); + ServiceLocator serviceLocator = dependencySet().with(provider).build(); fail("IllegalStateException expected"); } catch (IllegalStateException e) { - assertThat(e.getMessage(), containsString("No LocalPersistenceService could be found - did you configure it at the CacheManager level?")); + assertThat(e.getMessage(), containsString("Failed to find provider with satisfied dependency set for interface" + + " org.ehcache.core.spi.service.DiskResourceService")); } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java index 6f34386252..844a5d8c64 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java @@ -20,6 +20,7 @@ import org.ehcache.config.ResourceType; import org.ehcache.config.SizedResourcePool; import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.core.spi.store.StoreAccessException; import org.ehcache.core.spi.function.BiFunction; import org.ehcache.core.spi.function.Function; @@ -597,7 +598,7 @@ public void testReleaseStoreFlushes() throws Exception { @Test public void testRank() throws Exception { TieredStore.Provider provider = new TieredStore.Provider(); - ServiceLocator serviceLocator = dependencySet().with(provider).build(); + ServiceLocator serviceLocator = dependencySet().with(provider).with(mock(DiskResourceService.class)).build(); serviceLocator.startAllServices(); assertRank(provider, 0, ResourceType.Core.DISK); diff --git a/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryService.java b/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryService.java index 7876e183a2..50d894d0a7 100644 --- a/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryService.java +++ b/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryService.java @@ -22,7 +22,6 @@ import org.ehcache.core.spi.store.InternalCacheManager; import org.ehcache.core.spi.service.CacheManagerProviderService; import org.ehcache.core.spi.service.ExecutionService; -import org.ehcache.core.spi.time.TimeSourceService; import org.ehcache.management.ManagementRegistryService; import org.ehcache.management.ManagementRegistryServiceConfiguration; import org.ehcache.management.cluster.Clustering; @@ -48,7 +47,7 @@ import static org.ehcache.impl.internal.executor.ExecutorUtil.shutdownNow; -@ServiceDependencies({CacheManagerProviderService.class, ExecutionService.class, TimeSourceService.class}) +@ServiceDependencies({CacheManagerProviderService.class, ExecutionService.class}) public class DefaultManagementRegistryService extends AbstractManagementRegistry implements ManagementRegistryService, CacheManagerListener { private final ManagementRegistryServiceConfiguration configuration; diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java index e2e9ec1486..f3ae3750aa 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java @@ -730,7 +730,7 @@ public CreatedStoreRef(final Store.Provider storeProvider, final SoftLockValueCo } } - @ServiceDependencies({TimeSourceService.class, JournalProvider.class, CopyProvider.class}) + @ServiceDependencies({TimeSourceService.class, JournalProvider.class, CopyProvider.class, TransactionManagerProvider.class}) public static class Provider implements Store.Provider { private volatile ServiceProvider serviceProvider; diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProvider.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProvider.java index 66904319d5..43df56ff50 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProvider.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/DefaultJournalProvider.java @@ -19,6 +19,7 @@ import org.ehcache.CachePersistenceException; import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.spi.persistence.PersistableResourceService; +import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.spi.serialization.Serializer; import org.ehcache.core.spi.service.FileBasedPersistenceContext; @@ -29,6 +30,7 @@ /** * @author Ludovic Orban */ +@ServiceDependencies(DiskResourceService.class) public class DefaultJournalProvider implements JournalProvider { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultJournalProvider.class); diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/txmgr/provider/DefaultTransactionManagerProviderFactory.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/txmgr/provider/DefaultTransactionManagerProviderFactory.java index a32412d032..5edb71e685 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/txmgr/provider/DefaultTransactionManagerProviderFactory.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/internal/txmgr/provider/DefaultTransactionManagerProviderFactory.java @@ -27,6 +27,7 @@ * * @see LookupTransactionManagerProvider */ +@ServiceFactory.RequiresConfiguration public class DefaultTransactionManagerProviderFactory implements ServiceFactory { /** diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProvider.java b/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProvider.java index 636e34bb64..be431d936b 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProvider.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProvider.java @@ -39,7 +39,6 @@ * unless it can be considered a singleton. *

*/ -@ServiceDependencies(XAStore.Provider.class) public class LookupTransactionManagerProvider implements TransactionManagerProvider { private final TransactionManagerLookup lookup; diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreTest.java b/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreTest.java index 63720b5c9d..5c69f1bb77 100644 --- a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreTest.java +++ b/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreTest.java @@ -27,6 +27,7 @@ import org.ehcache.core.internal.store.StoreConfigurationImpl; import org.ehcache.core.events.StoreEventDispatcher; import org.ehcache.core.internal.service.ServiceLocator; +import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.core.spi.store.Store; import org.ehcache.expiry.Duration; import org.ehcache.expiry.Expirations; @@ -1543,10 +1544,8 @@ public void testRank() throws Exception { XAStoreConfiguration configuration = new XAStoreConfiguration("testXAResourceId"); ServiceLocator serviceLocator = dependencySet() .with(provider) - .with(new TieredStore.Provider()) - .with(new OnHeapStore.Provider()) - .with(new OffHeapStore.Provider()) - .with(new OffHeapDiskStore.Provider()) + .with(Store.Provider.class) + .with(mock(DiskResourceService.class)) .with(mock(TransactionManagerProvider.class)).build(); serviceLocator.startAllServices(); From f327d67832b05f10ba4200c8f4a5ded66fe246cd Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Tue, 11 Oct 2016 22:35:47 +0200 Subject: [PATCH 066/218] :bug: #1529 Fix ehcache-clustered pom name and description --- buildSrc/src/main/groovy/EhPomMangle.groovy | 2 +- clustered/clustered-dist/gradle.properties | 4 ++-- dist/gradle.properties | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/buildSrc/src/main/groovy/EhPomMangle.groovy b/buildSrc/src/main/groovy/EhPomMangle.groovy index a20274e57e..f97b883564 100644 --- a/buildSrc/src/main/groovy/EhPomMangle.groovy +++ b/buildSrc/src/main/groovy/EhPomMangle.groovy @@ -67,7 +67,7 @@ class EhPomMangle implements Plugin { pom.scopeMappings.addMapping(MavenPlugin.COMPILE_PRIORITY, project.configurations.pomOnlyCompile, Conf2ScopeMappingContainer.COMPILE) pom.scopeMappings.addMapping(MavenPlugin.COMPILE_PRIORITY, project.configurations.pomOnlyProvided, Conf2ScopeMappingContainer.PROVIDED) - utils.pomFiller(pom, 'Ehcache', 'Ehcache single jar, containing all modules') + utils.pomFiller(pom, project.subPomName, project.subPomDesc) } diff --git a/clustered/clustered-dist/gradle.properties b/clustered/clustered-dist/gradle.properties index b21565b96b..4d52981583 100644 --- a/clustered/clustered-dist/gradle.properties +++ b/clustered/clustered-dist/gradle.properties @@ -14,8 +14,8 @@ # limitations under the License. # -subPomName = Ehcache 3 Clustered Kit -subPomDesc = Ehcache 3 Clustered Kit +subPomName = Ehcache 3 Clustered Module +subPomDesc = Ehcache 3 Clustered: Defines the client jar and the kit containing the Terracotta server javadocExclude = **/core/**, **/impl/**, **/xml/**, **/jsr107/**, **/transactions/**, **/management/**, **/tck/** # Set to anything to disable SPI doc and jar generation diff --git a/dist/gradle.properties b/dist/gradle.properties index 2b9a7a0d0d..944561ba5a 100644 --- a/dist/gradle.properties +++ b/dist/gradle.properties @@ -14,6 +14,6 @@ # limitations under the License. # -subPomName = Ehcache 3 Jar Distribution +subPomName = Ehcache subPomDesc = End-user ehcache3 jar artifact javadocExclude = **/core/**, **/impl/**, **/xml/**, **/jsr107/**, **/transactions/**, **/management/**, **/tck/** From 68ce32cefc8279a50358ac752ae1cab00597e0b8 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Tue, 11 Oct 2016 23:06:56 +0200 Subject: [PATCH 067/218] :snowflake: Fix #1529 Generalise and use provided scope This enables produced pom files to properly report provided dependencies. --- 107/build.gradle | 3 +- buildSrc/src/main/groovy/EhDeploy.groovy | 35 ++++++++++++++++++++---- clustered/client/build.gradle | 2 +- clustered/common/build.gradle | 2 +- clustered/server/build.gradle | 14 ---------- management/build.gradle | 1 + 6 files changed, 34 insertions(+), 23 deletions(-) diff --git a/107/build.gradle b/107/build.gradle index aae8a5ad3f..a46639b3a3 100644 --- a/107/build.gradle +++ b/107/build.gradle @@ -30,7 +30,8 @@ sourceSets { } dependencies { - compile project(':impl'), project(':xml'), "javax.cache:cache-api:$parent.jcacheVersion" + compile project(':impl'), project(':xml') + provided "javax.cache:cache-api:$parent.jcacheVersion" tckTestRuntime 'javax.cache:cache-tests:1.0.1' tckTestClasses('javax.cache:cache-tests:1.0.1:tests') { transitive = false diff --git a/buildSrc/src/main/groovy/EhDeploy.groovy b/buildSrc/src/main/groovy/EhDeploy.groovy index 282c40b8e1..ad7d14e6aa 100644 --- a/buildSrc/src/main/groovy/EhDeploy.groovy +++ b/buildSrc/src/main/groovy/EhDeploy.groovy @@ -1,6 +1,8 @@ import org.gradle.api.Plugin import org.gradle.api.Project +import org.gradle.api.artifacts.maven.Conf2ScopeMappingContainer import org.gradle.api.artifacts.maven.MavenDeployment +import org.gradle.api.plugins.MavenPlugin import org.gradle.plugins.signing.Sign import scripts.Utils @@ -32,14 +34,39 @@ class EhDeploy implements Plugin { project.plugins.apply 'signing' project.plugins.apply 'maven' + project.configurations { + provided + } + + project.sourceSets { + main { + compileClasspath += project.configurations.provided + } + test { + compileClasspath += project.configurations.provided + runtimeClasspath += project.configurations.provided + } + } + project.signing { required { project.isReleaseVersion && project.gradle.taskGraph.hasTask("uploadArchives") } sign project.configurations.getByName('archives') } + def artifactFiltering = { + pom.scopeMappings.addMapping(MavenPlugin.COMPILE_PRIORITY, project.configurations.provided, Conf2ScopeMappingContainer.PROVIDED) + + utils.pomFiller(pom, project.subPomName, project.subPomDesc) + + } + + project.install { + repositories.mavenInstaller artifactFiltering + } + project.uploadArchives { repositories { - mavenDeployer { + mavenDeployer ({ beforeDeployment { MavenDeployment deployment -> project.signing.signPom(deployment)} if (project.isReleaseVersion) { @@ -51,16 +78,12 @@ class EhDeploy implements Plugin { authentication(userName: project.sonatypeUser, password: project.sonatypePwd) } } - } + } << artifactFiltering) } } def installer = project.install.repositories.mavenInstaller def deployer = project.uploadArchives.repositories.mavenDeployer - [installer, deployer]*.pom*.whenConfigured {pom -> - utils.pomFiller(pom, project.subPomName, project.subPomDesc) - } - } } diff --git a/clustered/client/build.gradle b/clustered/client/build.gradle index 9e1c16d5d3..5fdcc10286 100644 --- a/clustered/client/build.gradle +++ b/clustered/client/build.gradle @@ -20,7 +20,7 @@ dependencies { compileOnly project(':api') compileOnly project(':xml') compile project(':clustered:common') - compile "org.terracotta:entity-client-api:$parent.entityApiVersion" + provided "org.terracotta:entity-client-api:$parent.entityApiVersion" testCompile project(':api') testCompile project(':xml') diff --git a/clustered/common/build.gradle b/clustered/common/build.gradle index 2bd3530ae3..36182b9429 100644 --- a/clustered/common/build.gradle +++ b/clustered/common/build.gradle @@ -17,5 +17,5 @@ apply plugin: EhDeploy dependencies { - compile "org.terracotta:entity-server-api:$parent.entityApiVersion" + provided "org.terracotta:entity-server-api:$parent.entityApiVersion" } diff --git a/clustered/server/build.gradle b/clustered/server/build.gradle index c7f9df290b..55a7e16a6c 100644 --- a/clustered/server/build.gradle +++ b/clustered/server/build.gradle @@ -16,10 +16,6 @@ apply plugin: EhDeploy -configurations { - provided -} - dependencies { compile project(':clustered:common') compile group: 'org.terracotta', name: 'offheap-resource', version: parent.offheapResourceVersion @@ -28,13 +24,3 @@ dependencies { provided "org.terracotta:entity-server-api:$parent.entityApiVersion" provided "org.terracotta:standard-cluster-services:$parent.terracottaApisVersion" } - -sourceSets { - main { - compileClasspath += configurations.provided - } - test { - compileClasspath += configurations.provided - runtimeClasspath += configurations.provided - } -} diff --git a/management/build.gradle b/management/build.gradle index aaf037ec6f..3e21597f38 100644 --- a/management/build.gradle +++ b/management/build.gradle @@ -19,6 +19,7 @@ apply plugin: EhDeploy dependencies { compileOnly project(':xml') compileOnly project(':clustered:client') + compileOnly "org.terracotta:entity-client-api:$parent.entityApiVersion" compile project(':api') compile project(':core') From c0fefa4a0ea1ae5c402839f1920f2b41f4028dc1 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Wed, 12 Oct 2016 20:14:28 +0200 Subject: [PATCH 068/218] :bug: Fix #1535 Clean up server dependencies that leaked on client * clustered common module only depends on common apis * passive sync codes for the lock have been moved to the server module --- clustered/common/build.gradle | 2 +- .../common/internal/lock/LockMessaging.java | 17 ------- ...ltronReadWriteLockServerEntityService.java | 3 +- .../server/messages/LockSyncMessaging.java | 44 +++++++++++++++++++ 4 files changed, 47 insertions(+), 19 deletions(-) create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/lock/server/messages/LockSyncMessaging.java diff --git a/clustered/common/build.gradle b/clustered/common/build.gradle index 36182b9429..2bc268e9f7 100644 --- a/clustered/common/build.gradle +++ b/clustered/common/build.gradle @@ -17,5 +17,5 @@ apply plugin: EhDeploy dependencies { - provided "org.terracotta:entity-server-api:$parent.entityApiVersion" + provided "org.terracotta:entity-common-api:$parent.entityApiVersion" } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/lock/LockMessaging.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/lock/LockMessaging.java index 7e9414569b..544edee519 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/lock/LockMessaging.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/lock/LockMessaging.java @@ -20,7 +20,6 @@ import org.terracotta.entity.EntityResponse; import org.terracotta.entity.MessageCodec; import org.terracotta.entity.MessageCodecException; -import org.terracotta.entity.SyncMessageCodec; public class LockMessaging { @@ -60,26 +59,10 @@ public LockTransition decodeResponse(byte[] bytes) throws MessageCodecException } }; - private static final SyncMessageCodec SYNC_CODEC = new SyncMessageCodec() { - @Override - public byte[] encode(int i, LockOperation message) throws MessageCodecException { - throw new AssertionError(); - } - - @Override - public LockOperation decode(int i, byte[] bytes) throws MessageCodecException { - throw new AssertionError(); - } - }; - public static MessageCodec codec() { return CODEC; } - public static SyncMessageCodec syncCodec() { - return SYNC_CODEC; - } - public static LockOperation tryLock(HoldType type) { return new LockOperation(Operation.TRY_ACQUIRE, type); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockServerEntityService.java b/clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockServerEntityService.java index 90a8b1ac49..5ae6adc23a 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockServerEntityService.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockServerEntityService.java @@ -21,6 +21,7 @@ import org.ehcache.clustered.common.internal.lock.LockMessaging.LockOperation; import org.ehcache.clustered.common.internal.lock.LockMessaging.LockTransition; +import org.ehcache.clustered.lock.server.messages.LockSyncMessaging; import org.terracotta.entity.ActiveServerEntity; import org.terracotta.entity.ClientCommunicator; import org.terracotta.entity.ConcurrencyStrategy; @@ -80,7 +81,7 @@ public MessageCodec getMessageCodec() { @Override public SyncMessageCodec getSyncMessageCodec() { - return LockMessaging.syncCodec(); + return LockSyncMessaging.syncCodec(); } private static final ServiceConfiguration config(final Class klazz) { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/lock/server/messages/LockSyncMessaging.java b/clustered/server/src/main/java/org/ehcache/clustered/lock/server/messages/LockSyncMessaging.java new file mode 100644 index 0000000000..354d7b2e1d --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/lock/server/messages/LockSyncMessaging.java @@ -0,0 +1,44 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.lock.server.messages; + +import org.ehcache.clustered.common.internal.lock.LockMessaging; +import org.terracotta.entity.MessageCodecException; +import org.terracotta.entity.SyncMessageCodec; + +/** + * LockSyncMessaging + */ +public class LockSyncMessaging { + + public static SyncMessageCodec syncCodec() { + return SYNC_CODEC; + } + + private static final SyncMessageCodec SYNC_CODEC = new SyncMessageCodec() { + @Override + public byte[] encode(int i, LockMessaging.LockOperation message) throws MessageCodecException { + throw new AssertionError(); + } + + @Override + public LockMessaging.LockOperation decode(int i, byte[] bytes) throws MessageCodecException { + throw new AssertionError(); + } + }; + +} From d06ed8168ee32ae023da29951d0afad8d167bd3e Mon Sep 17 00:00:00 2001 From: Abhilash Date: Thu, 13 Oct 2016 13:24:17 +0530 Subject: [PATCH 069/218] bump up terracotta-core & galvan version --- build.gradle | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index 713eb5abf8..7a10f1c0b6 100644 --- a/build.gradle +++ b/build.gradle @@ -31,12 +31,12 @@ ext { terracottaPlatformVersion = '5.0.7.beta' managementVersion = terracottaPlatformVersion terracottaApisVersion = '1.0.7.beta' - terracottaCoreVersion = '5.0.7-beta' + terracottaCoreVersion = '5.0.7-beta3' offheapResourceVersion = terracottaPlatformVersion entityApiVersion = terracottaApisVersion terracottaPassthroughTestingVersion = '1.0.7.beta2' entityTestLibVersion = terracottaPassthroughTestingVersion - galvanVersion = '1.0.7-beta' + galvanVersion = '1.0.7-beta3' // Tools findbugsVersion = '3.0.1' From 1e7ee76adfefef77961ef762399563db854453a0 Mon Sep 17 00:00:00 2001 From: Anthony Dahanne Date: Wed, 12 Oct 2016 23:05:12 -0400 Subject: [PATCH 070/218] Issue #1537 : HitRatio not computed correctly * fix hit ratio * rewrite some tests assertions --- .../ClusteringManagementServiceTest.java | 4 + .../statistics/StandardEhcacheStatistics.java | 44 +++++++--- .../StandardEhcacheStatisticsTest.java | 85 ++++++++++++++----- .../DefaultManagementRegistryServiceTest.java | 3 + 4 files changed, 102 insertions(+), 34 deletions(-) diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java index f0b4fc33f7..52b357386e 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java @@ -255,6 +255,7 @@ public static void initDescriptors() throws ClassNotFoundException { ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissLatencyMaximum" , StatisticType.DURATION_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionRate" , StatisticType.RATE_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitRatioRatio" , StatisticType.RATIO_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissRatioRatio" , StatisticType.RATIO_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MappingCount" , StatisticType.COUNTER_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyAverage" , StatisticType.AVERAGE_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyMinimum" , StatisticType.DURATION_HISTORY)); @@ -280,6 +281,7 @@ public static void initDescriptors() throws ClassNotFoundException { OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MappingCount", StatisticType.COUNTER_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRate", StatisticType.RATE_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRatioRatio", StatisticType.RATIO_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissRatioRatio", StatisticType.RATIO_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionLatencyMinimum", StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionCount", StatisticType.COUNTER_HISTORY)); @@ -305,6 +307,7 @@ public static void initDescriptors() throws ClassNotFoundException { DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionCount", StatisticType.COUNTER_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitRatioRatio", StatisticType.RATIO_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissRatioRatio", StatisticType.RATIO_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissCount", StatisticType.COUNTER_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MappingCount", StatisticType.COUNTER_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissRate", StatisticType.RATE_HISTORY)); @@ -325,6 +328,7 @@ public static void initDescriptors() throws ClassNotFoundException { CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:EvictionLatencyMaximum", StatisticType.DURATION_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissLatencyAverage", StatisticType.AVERAGE_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitRatioRatio", StatisticType.RATIO_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissRatioRatio", StatisticType.RATIO_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:AllocatedBytesCount", StatisticType.COUNTER_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MappingCount", StatisticType.COUNTER_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:EvictionRate", StatisticType.RATE_HISTORY)); diff --git a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java index 7e18a8523a..7e7cc82854 100644 --- a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java +++ b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java @@ -55,6 +55,11 @@ import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; import org.terracotta.management.model.stats.StatisticType; +import static java.util.Collections.singleton; +import static java.util.EnumSet.allOf; +import static java.util.EnumSet.of; +import static org.terracotta.context.extended.ValueStatisticDescriptor.descriptor; + class StandardEhcacheStatistics extends ExposedCacheBinding { private final StatisticsRegistry statisticsRegistry; @@ -65,20 +70,31 @@ class StandardEhcacheStatistics extends ExposedCacheBinding { statisticsProviderConfiguration.averageWindowUnit(), statisticsProviderConfiguration.historySize(), statisticsProviderConfiguration.historyInterval(), statisticsProviderConfiguration.historyIntervalUnit(), statisticsProviderConfiguration.timeToDisable(), statisticsProviderConfiguration.timeToDisableUnit()); - statisticsRegistry.registerCompoundOperations("Cache:Hit", OperationStatisticDescriptor.descriptor("get", Collections.singleton("cache"), CacheOperationOutcomes.GetOutcome.class), EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER, CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER)); - statisticsRegistry.registerCompoundOperations("Cache:Miss", OperationStatisticDescriptor.descriptor("get", Collections.singleton("cache"), CacheOperationOutcomes.GetOutcome.class), EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER, CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER)); - statisticsRegistry.registerCompoundOperations("Cache:Clear", OperationStatisticDescriptor.descriptor("clear",Collections.singleton("cache"),CacheOperationOutcomes.ClearOutcome.class), EnumSet.allOf(CacheOperationOutcomes.ClearOutcome.class)); - statisticsRegistry.registerRatios("Cache:HitRatio", OperationStatisticDescriptor.descriptor("get", Collections.singleton("cache"), CacheOperationOutcomes.GetOutcome.class), EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER, CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER), EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER, CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER)); - statisticsRegistry.registerRatios("Cache:MissRatio", OperationStatisticDescriptor.descriptor("get", Collections.singleton("cache"), CacheOperationOutcomes.GetOutcome.class), EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER, CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER), EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER, CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER)); - - statisticsRegistry.registerCompoundOperations("Hit", OperationStatisticDescriptor.descriptor("get", Collections.singleton("tier"), TierOperationStatistic.TierOperationOutcomes.GetOutcome.class), EnumSet.of(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT)); - statisticsRegistry.registerCompoundOperations("Miss", OperationStatisticDescriptor.descriptor("get", Collections.singleton("tier"), TierOperationStatistic.TierOperationOutcomes.GetOutcome.class), EnumSet.of(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS)); - statisticsRegistry.registerCompoundOperations("Eviction", OperationStatisticDescriptor.descriptor("eviction", Collections.singleton("tier"), TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class), EnumSet.allOf(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class)); - statisticsRegistry.registerRatios("HitRatio", OperationStatisticDescriptor.descriptor("get", Collections.singleton("tier"), TierOperationStatistic.TierOperationOutcomes.GetOutcome.class), EnumSet.of(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT), EnumSet.of(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS)); - statisticsRegistry.registerValue("MappingCount", ValueStatisticDescriptor.descriptor("mappings", Collections.singleton("tier"))); - statisticsRegistry.registerValue("MaxMappingCount", ValueStatisticDescriptor.descriptor("maxMappings", Collections.singleton("tier"))); - statisticsRegistry.registerValue("AllocatedBytesCount", ValueStatisticDescriptor.descriptor("allocatedMemory", Collections.singleton("tier"))); - statisticsRegistry.registerValue("OccupiedBytesCount", ValueStatisticDescriptor.descriptor("occupiedMemory", Collections.singleton("tier"))); + EnumSet hit = of(CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER, CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER); + EnumSet miss = of(CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER, CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER); + OperationStatisticDescriptor getCacheStatisticDescriptor = OperationStatisticDescriptor.descriptor("get", singleton("cache"), CacheOperationOutcomes.GetOutcome.class); + + statisticsRegistry.registerCompoundOperations("Cache:Hit", getCacheStatisticDescriptor, hit); + statisticsRegistry.registerCompoundOperations("Cache:Miss", getCacheStatisticDescriptor, miss); + statisticsRegistry.registerCompoundOperations("Cache:Clear", OperationStatisticDescriptor.descriptor("clear", singleton("cache"),CacheOperationOutcomes.ClearOutcome.class), allOf(CacheOperationOutcomes.ClearOutcome.class)); + statisticsRegistry.registerRatios("Cache:HitRatio", getCacheStatisticDescriptor, hit, allOf(CacheOperationOutcomes.GetOutcome.class)); + statisticsRegistry.registerRatios("Cache:MissRatio", getCacheStatisticDescriptor, miss, allOf(CacheOperationOutcomes.GetOutcome.class)); + + Class tierOperationGetOucomeClass = TierOperationStatistic.TierOperationOutcomes.GetOutcome.class; + OperationStatisticDescriptor getTierStatisticDescriptor = OperationStatisticDescriptor.descriptor("get", singleton("tier"), tierOperationGetOucomeClass); + + statisticsRegistry.registerCompoundOperations("Hit", getTierStatisticDescriptor, of(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT)); + statisticsRegistry.registerCompoundOperations("Miss", getTierStatisticDescriptor, of(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS)); + statisticsRegistry.registerCompoundOperations("Eviction", + OperationStatisticDescriptor.descriptor("eviction", singleton("tier"), + TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class), + allOf(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class)); + statisticsRegistry.registerRatios("HitRatio", getTierStatisticDescriptor, of(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT), allOf(tierOperationGetOucomeClass)); + statisticsRegistry.registerRatios("MissRatio", getTierStatisticDescriptor, of(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS), allOf(tierOperationGetOucomeClass)); + statisticsRegistry.registerValue("MappingCount", descriptor("mappings", singleton("tier"))); + statisticsRegistry.registerValue("MaxMappingCount", descriptor("maxMappings", singleton("tier"))); + statisticsRegistry.registerValue("AllocatedBytesCount", descriptor("allocatedMemory", singleton("tier"))); + statisticsRegistry.registerValue("OccupiedBytesCount", descriptor("occupiedMemory", singleton("tier"))); Map registrations = statisticsRegistry.getRegistrations(); for (RegisteredStatistic registeredStatistic : registrations.values()) { diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java index baaea4b8c8..c48f26c575 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java @@ -84,20 +84,15 @@ public void statsCacheMissTest() throws InterruptedException { Assert.assertThat(missCounter.size(), Matchers.is(2)); - /*RateHistory missRateHistory = missCounter.getStatistic(RateHistory.class, "Cache:MissRate"); - while(!isHistoryReady(missRateHistory, 0d)) {} - //TODO how can i calculate rate? miss/second - Assert.assertThat(missRateHistory.getValue()[mostRecentIndex].getValue(), Matchers.greaterThan(0d));*/ - CounterHistory missCountCounterHistory = missCounter.getStatistic(CounterHistory.class, "Cache:MissCount"); while(!StatsUtil.isHistoryReady(missCountCounterHistory, 0L)) {} int mostRecentIndex = missCountCounterHistory.getValue().length - 1; Assert.assertThat(missCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(2L)); RatioHistory ratioHistory = missCounter.getStatistic(RatioHistory.class, "Cache:MissRatio"); - while(!StatsUtil.isHistoryReady(ratioHistory, Double.POSITIVE_INFINITY)) {} mostRecentIndex = ratioHistory.getValue().length - 1; - Assert.assertThat(ratioHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(1d)); + // 2 hits, 2 misses -> HitRatio is 0.5 + Assert.assertThat(ratioHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(0.5d)); } finally { if(cacheManager != null) { @@ -129,11 +124,6 @@ public void statsCacheHitTest() throws InterruptedException { cache.put(2L, "2"); cache.put(3L, "3"); - cache.get(1L);//HIT - cache.get(2L);//HIT - cache.get(2L);//HIT - cache.get(4L);//need a MISS for ratio, otherwise you get infinity as a value - Thread.sleep(1000); Context context = StatsUtil.createContext(managementRegistry); @@ -147,20 +137,75 @@ public void statsCacheHitTest() throws InterruptedException { Assert.assertThat(contextualStatistics.size(), Matchers.is(2)); - /*RateHistory hitRateHistory = hitCounter.getStatistic(RateHistory.class, "Cache:HitRate"); - while(!isHistoryReady(hitRateHistory, 0d)) {} - //TODO how can i calculate rate? hits/second - Assert.assertThat(hitRateHistory.getValue()[mostRecentIndex].getValue(), Matchers.greaterThan(0d));*/ + /////////////////////// + // NO HITS, NO MISSES// + /////////////////////// CounterHistory hitCountCounterHistory = contextualStatistics.getStatistic(CounterHistory.class, "Cache:HitCount"); - while(!StatsUtil.isHistoryReady(hitCountCounterHistory, 0L)) {} int mostRecentIndex = hitCountCounterHistory.getValue().length - 1; - Assert.assertThat(hitCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(3L)); + Assert.assertThat(hitCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(0L)); RatioHistory ratioHistory = contextualStatistics.getStatistic(RatioHistory.class, "Cache:HitRatio"); - while(!StatsUtil.isHistoryReady(ratioHistory, Double.POSITIVE_INFINITY)) {} mostRecentIndex = ratioHistory.getValue().length - 1; - Assert.assertThat(ratioHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(3d)); + // no hits, no misses -> HitRatio is NaN + Assert.assertThat(ratioHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(Double.NaN)); + + /////////////////////// + // 3 HITS, NO MISSES // + /////////////////////// + + cache.get(1L);//HIT + cache.get(2L);//HIT + cache.get(3L);//HIT + + Thread.sleep(1000); + + contextualStatistics = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(Arrays.asList("Cache:HitCount", "Cache:HitRatio")) + .on(context) + .build() + .execute() + .getSingleResult(); + + hitCountCounterHistory = contextualStatistics.getStatistic(CounterHistory.class, "Cache:HitCount"); + while(!StatsUtil.isHistoryReady(hitCountCounterHistory, 0L)) {} + mostRecentIndex = hitCountCounterHistory.getValue().length - 1; + Assert.assertThat(hitCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(3L)); + + ratioHistory = contextualStatistics.getStatistic(RatioHistory.class, "Cache:HitRatio"); + mostRecentIndex = ratioHistory.getValue().length - 1; + // 3 hits, no misses -> HitRatio is 1 + Assert.assertThat(ratioHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(1.0)); + + /////////////////////// + // 3 HITS, 1 MISS // + /////////////////////// + + cache.get(4L);//MISS + + Thread.sleep(1000); + + contextualStatistics = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(Arrays.asList("Cache:MissCount", "Cache:HitCount", "Cache:HitRatio")) + .on(context) + .build() + .execute() + .getSingleResult(); + + CounterHistory missCountCounterHistory = contextualStatistics.getStatistic(CounterHistory.class, "Cache:MissCount"); + mostRecentIndex = missCountCounterHistory.getValue().length - 1; + while(!StatsUtil.isHistoryReady(missCountCounterHistory, 0L)) {} + Assert.assertThat(missCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(1L)); + + ratioHistory = contextualStatistics.getStatistic(RatioHistory.class, "Cache:HitRatio"); + mostRecentIndex = ratioHistory.getValue().length - 1; + // 3 hits, 1 misses -> HitRatio is 0.75 + Assert.assertThat(ratioHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(0.75)); + + hitCountCounterHistory = contextualStatistics.getStatistic(CounterHistory.class, "Cache:HitCount"); + mostRecentIndex = hitCountCounterHistory.getValue().length - 1; + + Assert.assertThat(hitCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(3L)); } finally { if(cacheManager != null) { diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java index c7dffc5be6..d4f4aeac40 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java @@ -527,6 +527,7 @@ public static void loadStatsUtil() throws ClassNotFoundException { ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissLatencyMaximum" , StatisticType.DURATION_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionRate" , StatisticType.RATE_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitRatioRatio" , StatisticType.RATIO_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissRatioRatio" , StatisticType.RATIO_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MappingCount" , StatisticType.COUNTER_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyAverage" , StatisticType.AVERAGE_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyMinimum" , StatisticType.DURATION_HISTORY)); @@ -552,6 +553,7 @@ public static void loadStatsUtil() throws ClassNotFoundException { OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MappingCount", StatisticType.COUNTER_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRate", StatisticType.RATE_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRatioRatio", StatisticType.RATIO_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissRatioRatio", StatisticType.RATIO_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionLatencyMinimum", StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionCount", StatisticType.COUNTER_HISTORY)); @@ -577,6 +579,7 @@ public static void loadStatsUtil() throws ClassNotFoundException { DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionCount", StatisticType.COUNTER_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitRatioRatio", StatisticType.RATIO_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissRatioRatio", StatisticType.RATIO_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissCount", StatisticType.COUNTER_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MappingCount", StatisticType.COUNTER_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissRate", StatisticType.RATE_HISTORY)); From 86ac52061503f812e84b4e85617547f06b626db0 Mon Sep 17 00:00:00 2001 From: ChrisBradley001 Date: Thu, 6 Oct 2016 10:44:51 +0200 Subject: [PATCH 071/218] Various documentation updates Did some general language tweaks. --- docs/src/docs/asciidoc/user/107.adoc | 56 +++++++++---------- .../asciidoc/user/cache-event-listeners.adoc | 2 +- .../docs/asciidoc/user/caching-concepts.adoc | 12 ++-- .../src/docs/asciidoc/user/caching-terms.adoc | 8 +-- .../docs/asciidoc/user/clustered-cache.adoc | 40 ++++++------- .../docs/asciidoc/user/eviction-advisor.adoc | 6 +- docs/src/docs/asciidoc/user/examples.adoc | 8 +-- docs/src/docs/asciidoc/user/expiry.adoc | 6 +- .../docs/asciidoc/user/getting-started.adoc | 32 +++++------ docs/src/docs/asciidoc/user/index.adoc | 2 +- docs/src/docs/asciidoc/user/management.adoc | 18 +++--- .../asciidoc/user/serializers-copiers.adoc | 16 +++--- docs/src/docs/asciidoc/user/thread-pools.adoc | 8 +-- docs/src/docs/asciidoc/user/usermanaged.adoc | 14 ++--- docs/src/docs/asciidoc/user/writers.adoc | 4 +- docs/src/docs/asciidoc/user/xa.adoc | 36 ++++++------ docs/src/docs/asciidoc/user/xml.adoc | 12 ++-- 17 files changed, 140 insertions(+), 140 deletions(-) diff --git a/docs/src/docs/asciidoc/user/107.adoc b/docs/src/docs/asciidoc/user/107.adoc index 944562009c..7b444301ef 100644 --- a/docs/src/docs/asciidoc/user/107.adoc +++ b/docs/src/docs/asciidoc/user/107.adoc @@ -11,15 +11,15 @@ endif::notBuildingForSite[] == JCache overview -JCache (aka JSR-107) specification defines the standard caching API for Java. +The JCache (aka JSR-107) specification defines the standard caching API for Java. The specification was developed under the Java Community Process v2.9 by an expert group including members from the Ehcache developer community. JCache provides a very simple API set that is easy to use and vendor neutral. Being one of the pioneers in the Java caching domain, Ehcache had to offer an implementation that is fully compliant with the JCache specification. -For years the biggest problems that application developers have faced while wanting to try cache implementations by different vendors is the stark contrast in the APIs offered by these vendors. -Developers were forced to rewrite a whole lot of their caching related code in an application just to try out a new caching solution. -This lead to developers sticking with what they had as the bar to investigating other products was too high. +For years, the biggest problem that application developers have faced while wanting to try cache implementations by different vendors is the stark contrast in the APIs offered by these vendors. +Developers were forced to rewrite a lot of their caching related code in an application just to try out a new caching solution. +This leads to developers sticking with what they had, as the bar to investigating other products was too high. The availability of the JCache specification gives real added value for developers as there is now a standard caching API they can use. So it is easier for an application developer to switch between products by different vendors and choose the one that suits them best without changing a single line of their application code interacting with caches. @@ -37,7 +37,7 @@ In addition to the `Cache` interface, JCache specification has defined two more Applications need to use a `CacheManager` to create/retrieve a `Cache`. Similarly a `CachingProvider` is required to get/access a `CacheManager`. -Here is a sample code that demonstrates the usage of basic JCache configuration APIs: +Here is some sample code that demonstrates the usage of the basic JCache configuration APIs: [source,java] ---- @@ -50,7 +50,7 @@ include::{sourcedir31}/107/src/test/java/org/ehcache/docs/EhCache107Configuratio <2> Retrieve the default `CacheManager` instance using the provider. <3> Create a cache configuration using `MutableConfiguration`... <4> with key type and value type as `Long` and `String` respectively... -<5> configured to store the cache entries by reference(not by value)... +<5> configured to store the cache entries by reference (not by value)... <6> and with an expiry time of one minute defined for entries from the moment they are created. <7> Using the cache manager, create a cache named `jCache` with the configuration created in step <3> <8> Put some data into the cache. @@ -58,15 +58,15 @@ include::{sourcedir31}/107/src/test/java/org/ehcache/docs/EhCache107Configuratio == JSR-107 and Ehcache configuration integration -As mentioned already the JCache specification offers a minimal set of configuration that is ideal for an in-memory cache. -But Ehcache native APIs support topologies that are much more complex and provides more features. -At times application developers might want to configure caches that are much complex (in terms of topology or features) -than the ones that JCache `MutableConfiguration` permits and still be able to use the JCache's caching APIs. -Ehcache provides several ways to achieve that and this section covers the same. +As mentioned already, the JCache specification offers a minimal set of configurations that is ideal for an in-memory cache. +But Ehcache native APIs support topologies that are much more complex and provide more features. +At times, application developers might want to configure caches that are much complex (in terms of topology or features) +than the ones that JCache `MutableConfiguration` permits and still be able to use JCache's caching APIs. +Ehcache provides several ways to achieve this, as described in the following section. === Starting from JSR-107 created caches -When you create a `Cache` on a `CacheManager` using a `MutableConfiguration` - that is you only use JSR-107 types - +When you create a `Cache` on a `CacheManager` using a `MutableConfiguration` - in other words, using only JSR-107 types - you can still get to the underlying Ehcache `RuntimeCacheConfiguration`: [source,java,indent=0] @@ -87,10 +87,10 @@ When using this mechanism, no JSR-107 `CompleteConfiguration` is used and so you ---- include::{sourcedir31}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=ehcacheBasedConfigurationExample] ---- -<1> Create an Ehcache `CacheConfiguration` - through a builder as shown here or even through XML +<1> Create an Ehcache `CacheConfiguration` - through a builder as shown here or alternatively use an XML configuration (as described in the following section). <2> Use the configuration with JSR-107 API by wrapping it <3> Get back to the Ehcache `CacheConfiguration` ... -<4> or to the runtime configuration even. +<4> or even to the runtime configuration. <5> No JSR-107 `CompleteConfiguration` is available in this context === Getting JSR-107 caches configured through Ehcache XML @@ -119,10 +119,10 @@ NOTE: You can also use the `CachingProvider.getCacheManager()` method that takes The `URI` and `ClassLoader` used to configure the `CacheManager` will then use the vendor specific values returned by `CachingProvider.getDefaultURI` and `.getDefaultClassLoader` respectively. -==== Control JSR-107 MBeans from XML +==== Controlling JSR-107 MBeans from XML When using Ehcache XML, you may want to enable management and / or statistics MBeans for JSR-107 caches. -This is giving you control over the following: +This gives you control over the following: * `javax.cache.configuration.CompleteConfiguration.isStatisticsEnabled` * `javax.cache.configuration.CompleteConfiguration.isManagementEnabled` @@ -156,16 +156,16 @@ constraint. All that's needed is adding a `jsr107` service in your XML configura include::{sourcedir31}/107/src/test/resources/org/ehcache/docs/ehcache-jsr107-template-override.xml[] ---- <1> First, declare a namespace for the 107 extension, e.g. `jsr107` -<2> Within a `service` element at the top of you configuration, add a `jsr107:defaults` element +<2> Within a `service` element at the top of your configuration, add a `jsr107:defaults` element <3> The element takes an optional attribute `default-template`, which references the `cache-template` to use for all `javax.cache.Cache` created by the application at runtime using `javax.cache.CacheManager.createCache`. In - this example, the default `cache-template` used will be `tinyCache`, meaning that atop of their particular config, + this example, the default `cache-template` used will be `tinyCache`, meaning that in addition to their particular configuration, programmatically created `Cache` instances will have their capacity constrained to 20 entries. -<4> Nested within the `jsr107:defaults`, add specific `cache-templates` to use for given named `Cache`, e.g. when +<4> Nested within the `jsr107:defaults`, add specific `cache-templates` to use for the given named `Cache`. So, for example, when creating the `Cache` named `foos` at runtime, Ehcache will enhance its config, giving it a capacity of 2000 entries, as well as insuring both key and value types are `String`. -NOTE: See <> for complete definition +NOTE: See <> for a complete definition Using the above configuration, you can not only supplement but also override the configuration of JSR-107 created caches without modifying the application code. @@ -180,25 +180,25 @@ include::{sourcedir31}/107/src/test/java/org/ehcache/docs/EhCache107Configuratio <4> you could verify that the template configured capacity is applied to the cache and returns _20_ here. <5> The cache template will override the JSR-107 cache's store-by-value config to store-by-ref since the `byRefTemplate` template that is used to create the cache is configured explicitly using `IdentityCopier`. -<6> Templates will also override JSR-107 config, see here a configuration with TTL 1 minute -<7> used to create a cache where the template says TTL 2 minutes. -<8> And we can indeed verify that the template provided configuration has been applied, duration will be _2 minutes_ and not _1_. +<6> Templates will also override the JSR-107 configuration, in this case using a configuration with TTL 1 minute +<7> used to create a cache where the template sets the TTL to 2 minutes. +<8> And we can indeed verify that the configuration provided in the template has been applied; the duration will be _2 minutes_ and not _1_. <9> One drawback of this is that when getting at the `CompleteConfiguration`, you no longer have access to the factories from JSR-107. NOTE: As mentioned in step 5, in order to override store-by-value configuration of a JSR-107 cache using templates you can explicitly configure the template using `IdentityCopier`. But the usage of `IdentityCopier` is not mandatory to get a store-by-ref cache. You can use any custom copier implementation that does not perform any "copying" but returns -the exact same reference that gets passed into the copy methods. `IdentityCopier` is just one that we have provided to -make your life easier. +the exact same reference that gets passed into the copy methods. `IdentityCopier` is just an example that we have +provided for your convenience. == A word on defaults -Ehcache 3 and Ehcache 3 through JCache do not always agree on default behavior. +Ehcache 3 used natively and Ehcache 3 through JCache do not always agree on default behavior. While native Ehcache 3 can behave the way JCache specifies, depending on the used configuration mechanism, you may see differences in defaults. === _by-reference_ or _by-value_ -Ehcache 3 and Ehcache 3 through JCache disagree on the default mode for heap only caching. +Native Ehcache 3 and Ehcache 3 through JCache disagree on the default mode for heap only caching. ==== Ehcache configuration with JCache `MutableConfiguration` @@ -220,7 +220,7 @@ See the documentation <>, you can now proceed to create the cache manager. For creating the cache manager with clustering support you will need to provide the clustering service configuration. @@ -112,7 +112,7 @@ include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/clie <1> Returns the `org.ehcache.config.builders.CacheManagerBuilder` instance; <2> Use the `ClusteringServiceConfigurationBuilder`{empty}'s static method `.cluster(URI)` for connecting the cache manager to the clustered storage at the URI specified that returns the clustering service configuration builder instance. - Sample URI provided in the example is pointing to the clustered storage instance named `my-application` on the Terracotta server (Assuming the server is running on localhost and port *9510*). + The sample URI provided in the example points to the clustered storage instance named `my-application` on the Terracotta server (Assuming the server is running on localhost and port *9510*). <3> Auto-create the clustered storage if it doesn't already exist. <4> Returns a fully initialized cache manager that can be used to create clustered caches. <5> Close the cache manager. @@ -131,14 +131,14 @@ include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/clie <2> Adds a resource pool for the cache manager with the specified name (`resource-pool-a`) and size (`28MB`) consumed out of the named server off-heap resource `secondary-server-resource`. A resource pool at the cache manager level maps directly to a shared pool at the server side. <3> Adds another resource pool for the cache manager with the specified name (`resource-pool-b`) and size (`32MB`). - Since the server resource identifier is not explicitly passed, this resource pool will be consumed out of default server resource provided in Step 3. + Since the server resource identifier is not explicitly passed, this resource pool will be consumed out of the default server resource provided in Step 3. This demonstrates that a cache manager with clustering support can have multiple resource pools created out of several server off-heap resources. <4> Provide the cache configuration to be created. <5> `ClusteredResourcePoolBuilder.fixed(String , long , MemoryUnit)` allocates a fixed pool of storage to the cache from the specified server off-heap resource. In this example, a fixed pool of 32MB is allocated for `clustered-cache` from `primary-server-resource`. <6> `ClusteredResourcePoolBuilder.shared(String)`, passing the name of the resource pool specifies that `shared-cache-1` shares the storage resources with other caches using the same resource pool (`resource-pool-a`). <7> Configures another cache (`shared-cache-2`) that shares the resource pool (`resource-pool-a`) with `shared-cache-1`. -<8> Creates fully initialized cache manager with the clustered caches. +<8> Creates a fully initialized cache manager with the clustered caches. == Ehcache Clustered Tier Manager Lifecycle @@ -165,8 +165,8 @@ If it does not exist then the cache manager will fail to initialize. include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheTieredExample] ---- -<1> Configuring heap tier for cache. -<2> Configuring clustered tier of fixed size from server off-heap resource using `ClusteredResourcePoolBuilder`. +<1> Configuring the heap tier for cache. +<2> Configuring the clustered tier of fixed size from the server off-heap resource using `ClusteredResourcePoolBuilder`. The equivalent XML configuration is as follows: @@ -180,14 +180,14 @@ include::{sourcedir31}/clustered/client/src/test/resources/configs/docs/ehcache- === Specifying consistency level -Ehcache offers two level of consistency: +Ehcache offers two levels of consistency: Eventual:: This consistency level indicates that the visibility of a write operation is not guaranteed when the operation returns. Other clients may still see a stale value for the given key. However this consistency level guarantees that for a mapping `(K, V1)` updated to `(K, V2)`, once a client sees `(K, V2)` it will never see `(K, V1)` again. Strong:: -This consistency level provides strong visibility guarantees ensuring that when a write operation returns other clients will be able to observe it immediately. +This consistency level provides strong visibility guarantees, ensuring that when a write operation returns other clients will be able to observe it immediately. This comes with a latency penalty on the write operation required to give this guarantee. @@ -196,7 +196,7 @@ This comes with a latency penalty on the write operation required to give this g include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheConsistency] ---- -<1> Specify the consistency level through the use of additional service configuration, using _strong_ consistency here, +<1> Specify the consistency level through the use of an additional service configuration, using _strong_ consistency here, <2> With the consistency used above, this `put` operation will return only when all other clients have had the corresponding mapping invalidated. The equivalent XML configuration is as follows: @@ -210,7 +210,7 @@ include::{sourcedir31}/clustered/client/src/test/resources/configs/docs/ehcache- === Clustered Cache Expiry -Expiry in clustered caches work with an exception that `Expiry#getExpiryForAccess` is handled on a best effort basis for clustered tiers. +Expiry in clustered caches works with the exception that `Expiry#getExpiryForAccess` is handled on a best effort basis for clustered tiers. It may not be as accurate as in the case of local tiers. === Clustered Unspecified Inheritance @@ -221,7 +221,7 @@ When you create the second cache with the same alias in a different cache manage It will then inherit the clustered resource pool as it was configured when creating the clustered tier. This option provides many benefits. -The main benefit is it simplifies clustered configuration by allowing clustered resource pool configuration to be handled by one client, then all subsequent clients can inherit this configuration. +The main benefit is that it simplifies clustered configuration by allowing clustered resource pool configuration to be handled by one client, then all subsequent clients can inherit this configuration. In addition, it also reduces clustered pool allocation configuration errors. More importantly, sizing calculations only need to be done by one person and updated in one location. Thus any programmer can use the cache without having to worry about creating the right size resource pool allocations. @@ -237,4 +237,4 @@ include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/clie <3> Create cache `my-dedicated-cache` using the cache configuration <4> Configure the second cache manager as _expecting_ (auto create off) <5> Build a cache configuration for a clustered _unspecified_ resource pool, which will use the previously configured clustered _dedicated_ resource pool. -<6> Create cache with the same name `my-dedicated-cache` and use the clustered _unspecified_ cache configuration +<6> Create a cache with the same name `my-dedicated-cache` and use the clustered _unspecified_ cache configuration diff --git a/docs/src/docs/asciidoc/user/eviction-advisor.adoc b/docs/src/docs/asciidoc/user/eviction-advisor.adoc index 7b50667dad..e48f50b7c3 100644 --- a/docs/src/docs/asciidoc/user/eviction-advisor.adoc +++ b/docs/src/docs/asciidoc/user/eviction-advisor.adoc @@ -17,7 +17,7 @@ NOTE: This is an advanced topic/feature that will not be of interest to most use You can affect which elements are selected for eviction from the cache by providing a class that implements the `org.ehcache.config.EvictionAdvisor` interface. -`EvictionAdvisor` implementations are invoked when Ehcache is attempting to evict entries from the cache +`EvictionAdvisor` implementations are invoked when Ehcache attempts to evict entries from the cache (in order to make room for new entries) in order to determine whether the given entry should not be considered a good candidate for eviction. If the eviction is advised against, Ehcache will try to honor the preference of preserving that entry in the cache, though there is no full guarantee of such. @@ -28,7 +28,7 @@ include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[t ---- <1> Configure a constrained heap, as the eviction advisor is only relevant when mappings get evicted from the cache. -<2> If you want to hint the eviction algorithm to advisor against the eviction of some mappings, you have to +<2> If you want to hint to the eviction algorithm to advise against the eviction of some mappings, you have to configure an instance of `EvictionAdvisor`. In this particular example, the `OddKeysEvictionAdvisor` class will advise against eviction of any key that is an odd number. @@ -37,7 +37,7 @@ into the cache - which will trigger capacity eviction. By the time the cache manager gets closed, only mappings with odd keys should be left in the cache as their prime candidacy for eviction would have been advised against. -NOTE: Eviction advise status is computed when a mapping is written to the cache. +NOTE: Eviction advisory status is computed when a mapping is written to the cache. This means that proper eviction advisor implementations are expected to be constant for a key-value pair. NOTE: Please keep in mind that configuring an eviction advisor can slow down eviction: the more often you advise against diff --git a/docs/src/docs/asciidoc/user/examples.adoc b/docs/src/docs/asciidoc/user/examples.adoc index 26a3b657c1..b71c1c7135 100644 --- a/docs/src/docs/asciidoc/user/examples.adoc +++ b/docs/src/docs/asciidoc/user/examples.adoc @@ -13,11 +13,11 @@ endif::notBuildingForSite[] The `demo` directory in the Ehcache 3 sources includes a sample applications with two (2) implementations demonstrating Ehcache use. Implemented as a simple -browser-based web service, the sample application, Peeper, displays any messages (_peeps_) -previously entered and accepts new peeps recording the peeps in a database. The peeps +browser-based web service, the sample application Peeper displays any messages (_peeps_) +previously entered and accepts new peeps, recording the peeps in a database. The peeps database, shared among implementations of the Peeper application, is located at +$HOME/ehcache-demo-peeper.mv.db+. This file may be safely erased while the application -is not running. While running, information about the operation of Peeper application +is not running. While running, information about the operation of the Peeper application (database access, cache access, etc.) is written to the console. While the sample application may be run, the application is _very_ simplistic -- the code @@ -66,7 +66,7 @@ Note the absence of indications of interactions with a cache. === Peeper with Cache-aside Caching -- +01-CacheAside+ The second sample, located in +demos/01-CacheAside+, is a version of the Peeper application -that makes use of Ehcache. As each peep is being read from the database (for display in the web +that makes use of Ehcache. As each peep is read from the database (for display in the web page), it is written to an Ehcache instance. If the Peeper web page is refreshed (without adding a new peep) or a new Peeper client connects, the peeps are read from the cache (instead of the database) to form the web page. If a new peep is posted, diff --git a/docs/src/docs/asciidoc/user/expiry.adoc b/docs/src/docs/asciidoc/user/expiry.adoc index 89b3472ebc..b859f690f9 100644 --- a/docs/src/docs/asciidoc/user/expiry.adoc +++ b/docs/src/docs/asciidoc/user/expiry.adoc @@ -38,7 +38,7 @@ Both Java and XML offer direct support for three types of expiry: [horizontal] no expiry:: this means cache mappings will never expire, time-to-live:: this means cache mappings will expire after a fixed duration following their creation, -time-to-idle:: this means cache mappings will expire after a fixed duration following their last access time. +time-to-idle:: this means cache mappings will expire after a fixed duration following the time they were last accessed. For Java, see `org.ehcache.expiry.Expirations` and the XSD for XML. @@ -46,7 +46,7 @@ Read on to implement your own expiration scheme. == Custom expiry -Support your own expiration scheme simply means implementing the `Expiry` interface: +Supporting your own expiration scheme simply means implementing the `Expiry` interface: [source,java,indent=0] ---- @@ -83,4 +83,4 @@ In XML: include::{sourcedir31}/xml/src/test/resources/configs/docs/getting-started.xml[tags=customExpiry] ---- -<1> Simply pass the fully qualified class name of your custom expiry. \ No newline at end of file +<1> Simply pass the fully qualified class name of your custom expiry. diff --git a/docs/src/docs/asciidoc/user/getting-started.adoc b/docs/src/docs/asciidoc/user/getting-started.adoc index c13b6f7b4d..a4dc77b361 100644 --- a/docs/src/docs/asciidoc/user/getting-started.adoc +++ b/docs/src/docs/asciidoc/user/getting-started.adoc @@ -44,21 +44,21 @@ include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[t <5> We can retrieve the `preConfigured` aliased `Cache` we declared in step 2. For type-safety, we ask for both key and value types to be passed in. If these differ from the ones we expect, the `CacheManager` throws a `ClassCastException` early in the application's lifecycle. It also guards the `Cache` from being polluted by random types. -<6> The `CacheManager` can also be used to create new `Cache` as needed. Just as in step 2, it requires passing in an +<6> The `CacheManager` can also be used to create new `Cache` instances as needed. Just as in step 2, it requires passing in an alias as well as a `CacheConfiguration`. The instantiated and fully initialized `Cache` added will be returned and/or accessed through the `CacheManager.getCache` API. <7> We can now use the newly added `Cache` to store and ... <8> ... retrieve data. -<9> We can also `CacheManager.removeCache(String)` a given `Cache`. The `CacheManager` will not only remove it's reference to the +<9> We can also `CacheManager.removeCache(String)` a given `Cache`. The `CacheManager` will not only remove its reference to the `Cache`, but will also close it. The `Cache` releases all locally held transient resources (such as memory). References to this `Cache` become unusable. <10> In order to release all transient resources (memory, threads, ...) a `CacheManager` provides to `Cache` instances it manages, you have to invoke `CacheManager.close()`, which in turns closes all `Cache` instances known at the time. -=== Creating cache manager with clustering support +=== Creating a cache manager with clustering support To enable Clustering with Terracotta, firstly you will have to <> configured with clustered storage. -Further, for creating the cache manager with clustering support, you will need to provide the clustering service configuration: +In addition, for creating the cache manager with clustering support, you will need to provide the clustering service configuration: [source,java,indent=0] ---- @@ -68,7 +68,7 @@ include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/clie <1> Returns the `org.ehcache.config.builders.CacheManagerBuilder` instance; <2> Use the `ClusteringServiceConfigurationBuilder`{empty}'s static method `.cluster(URI)` for connecting the cache manager to the clustering storage at the URI specified that returns the clustering service configuration builder instance. - Sample URI provided in the example is pointing to the clustered storage with clustered storage identifier *my-application* on the Terracotta server (Assuming the server is running on localhost and port *9510*); the query-param `auto-create` + The sample URI provided in the example points to the clustered storage with clustered storage identifier *my-application* on the Terracotta server (assuming the server is running on localhost and port *9510*); the query-param `auto-create` creates the clustered storage in the server if it doesn't already exist. <3> Returns a fully initialized cache manager that can be used to create clustered caches. <4> Close the cache manager. @@ -84,7 +84,7 @@ Ehcache 3 introduces the concept of `UserManagedCache`: include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java[tag=userManagedCacheExample] ---- -<1> A new feature of Ehcache 3 is the ability to create `UserManagedCache` instances, i.e. ones not managed by a `CacheManager`, again you can either have the builder `init()` it for you, passing true or +<1> A new feature of Ehcache 3 is the ability to create `UserManagedCache` instances, i.e. ones not managed by a `CacheManager`, again you can either have the builder `init()` it for you, passing true, or <2> pass false and it is up to you to `UserManagedCache.init()` them, prior to using them. <3> You can use the cache exactly as a managed cache <4> In the same vein, a `UserManagedCache` requires you to `UserManagedCache.close()` it explicitly. If you would also use @@ -97,8 +97,8 @@ NOTE: See <> for more in Ehcache 3, as in previous versions, offers a tiering model to allow storing increasing amounts of data on slower tiers (which are generally more abundant). -The idea is that resources related to faster storage are more rare, but are where the 'hottest' data is preferred to be. -Thus less-hot (less frequently used) data is moved to the more abundant but slower tiers. Hotter data is faulted onto +The idea is that resources related to faster storage are more rare, but are located where the 'hottest' data is preferred to be. +Thus less-hot (less frequently used) data is moved to the more abundant but slower tiers. Hotter data is moved onto the faster tiers. ==== Off-heap @@ -133,7 +133,7 @@ and is thus slower than heap and offheap. You should thus favor disk for large amounts of data. Another reason to use disk storage is persistence across application restarts. -Note that Ehcache 3 only offers persistence in case of clean shutdowns. +Note that Ehcache 3 only offers persistence in the case of clean shutdowns. ==== Three tiers @@ -161,8 +161,8 @@ include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[t <1> You can also size the heap tier in bytes. This will limit the amount of heap used by that tier for storing key-value pairs. Note that there is a cost associated to sizing objects. -<2> The sizing mechanism can be configured along two axis: The first one specifies the maximum number - of objects to traverse while walking the object graph, the second defines the maximum size of a +<2> The sizing mechanism can be configured along two axes: The first one specifies the maximum number + of objects to traverse while walking through the object graph, the second defines the maximum size of a single object. If the sizing goes above any of these two limits, the mutative operation on the cache will be ignored. <3> A default configuration can be provided at CacheManager level to be used by the caches unless defined @@ -181,8 +181,8 @@ include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[t <1> You will need to create a new `ResourcePools` object with resources of required size, using `ResourcePoolsBuilder`. This object can then be passed to the said method so as to trigger the update. -<2> To update capacity of `ResourcePools`, `updateResourcePools(ResourcePools)` method in `RuntimeConfiguration` can be of help. - `ResourcePools` object created earlier can then be passed to this method so as to trigger the update. +<2> To update capacity of `ResourcePools`, the `updateResourcePools(ResourcePools)` method in `RuntimeConfiguration` can be of help. + The `ResourcePools` object created earlier can then be passed to this method so as to trigger the update. === Data freshness @@ -204,7 +204,7 @@ See the section on <> for more information about the option ...It wouldn't be Java without _some_ XML -You can create a XML file to configure a `CacheManager`: +You can create an XML file to configure a `CacheManager`: [source,xml] ---- @@ -215,7 +215,7 @@ include::{sourcedir31}/xml/src/test/resources/configs/docs/getting-started.xml[t <2> The keys of `foo` are declared as type `String`; since the value type is not specified, the values will be of type `Object`. <3> `foo` is declared to hold up to 2,000 entries on heap... <4> ...as well as up to 500 MB of off-heap memory before it starts evicting -<5> `` elements let you create an abstract configuration that further `` configuration can then _extend_ +<5> `` elements let you create an abstract configuration that further `` configurations can then _extend_ <6> `bar` is such a `Cache`. `bar` uses the `` named `myDefaults` and overrides its `key-type` to a wider type. <7> `simpleCache` is another such a `Cache`. It uses `myDefaults` configuration for its sole `CacheConfiguration`. @@ -237,7 +237,7 @@ CacheManager myCacheManager = CacheManagerBuilder.newCacheManager(xmlConfig); // == Current development -For developer information, you might want to go check the https://github.com/ehcache/ehcache3/wiki[Ehcache 3 project wiki on GitHub]. +For developer information, you might want to check the https://github.com/ehcache/ehcache3/wiki[Ehcache 3 project wiki on GitHub]. The next version, 3.2, will finalize http://terracotta.org[Terracotta clustering] support. Cache events, cache-through and transactional clustered caches will then be supported. diff --git a/docs/src/docs/asciidoc/user/index.adoc b/docs/src/docs/asciidoc/user/index.adoc index f387229fb0..86d60f8d88 100644 --- a/docs/src/docs/asciidoc/user/index.adoc +++ b/docs/src/docs/asciidoc/user/index.adoc @@ -25,7 +25,7 @@ Each topic below corresponds to a menu item at the left. |link:107.html[JSR-107 Support]|Using Ehcache as a javax.cache aka JSR-107 provider |link:examples.html[Java Examples]|Examples of using Ehcache APIs |link:xsds.html[Configuration XSD]|Reference XSD for configuration -|link:clustered-cache.html[Clustering with Terracotta]|Using Terracotta to enable clustering of caches in EhCache +|link:clustered-cache.html[Clustering with Terracotta]|Using Terracotta to enable clustering of caches in Ehcache |=== === General Topics diff --git a/docs/src/docs/asciidoc/user/management.adoc b/docs/src/docs/asciidoc/user/management.adoc index 07f9569d93..e5ed6302f1 100644 --- a/docs/src/docs/asciidoc/user/management.adoc +++ b/docs/src/docs/asciidoc/user/management.adoc @@ -11,7 +11,7 @@ endif::notBuildingForSite[] == Intro -Managed objects like caches, cache managers and stores are registered into a `org.ehcache.management.ManagementRegistryService` +Managed objects like caches, cache managers and stores are registered into an `org.ehcache.management.ManagementRegistryService` instance. A `ManagementRegistry` implementation has to understand the registered object and provide management and monitoring @@ -25,7 +25,7 @@ a minimal set of statistics and actions via a couple of capabilities. == Making use of the `ManagementRegistry` -By default, a `ManagementRegistry` is automatically discovered and enabled, but can only be accessed by ehcache +By default, a `ManagementRegistry` is automatically discovered and enabled, but can only be accessed by Ehcache internal services. If you wish to make use of it, you should create your own instance and pass it to the cache manager builder as a service: @@ -45,9 +45,9 @@ Obviously, you may use the above technique to pass your own implementation of `M == Capabilities and contexts -Capabilities are metadata of what the managed objects are capable of: a collection of statistic that can be queried +Capabilities are metadata of what the managed objects are capable of: a collection of statistics that can be queried and/or remote actions that can be called. -Each capability requires a context to run within. For instance, cache-specific statistics require a cache manager name +Each capability requires a context to run in. For instance, cache-specific statistics require a cache manager name and a cache name to uniquely identify the cache on which you want to query stats or call an action. [source,java,indent=0] @@ -57,11 +57,11 @@ include::{sourcedir31}/management/src/test/java/org/ehcache/docs/ManagementTest. <1> Query the `ManagementRegistry` for the registered managed objects' capabilities. <2> Each capability has a unique name you will need to refer to it. <3> Each capability has a collection of `Descriptor`s that contains the metadata of each statistic or action. -<4> Each capability requires a context to which it needs to refer to. +<4> Each capability requires a context which it needs to refer to. <5> The first attribute of this context is the cache manager name. <6> The second attribute of this context is the cache name. With both attributes, the capability can uniquely refer to a unique managed object. -<7> Query the `ManagementRegistry` for the all the registered managed objects' contexts. +<7> Query the `ManagementRegistry` for all of the registered managed objects' contexts. <8> There is only one context here, and its name is the cache manager's name. <9> The above context has a subcontext: the cache's name. @@ -70,9 +70,9 @@ context container to a capability's context by matching their respective names. == Actions -There are two forms of capabilities: statistics and action ones. The statistic ones offer a set of predefined -statistics that can be queried at will, while the action ones offer a set of actions that can be taken upon -a managed object. Examples of actions could be: clear caches, get their config or modify a config setting. +There are two forms of capabilities: statistics and action ones. The statistics ones offer a set of predefined +statistics that can be queried at will, while the action ones offer a set of actions that can be taken on +a managed object. Examples of actions could be: clear caches, get their configuration or modify a configuration setting. [source,java,indent=0] ---- diff --git a/docs/src/docs/asciidoc/user/serializers-copiers.adoc b/docs/src/docs/asciidoc/user/serializers-copiers.adoc index 27be820b30..7d20bdf047 100644 --- a/docs/src/docs/asciidoc/user/serializers-copiers.adoc +++ b/docs/src/docs/asciidoc/user/serializers-copiers.adoc @@ -45,7 +45,7 @@ If a serializer is configured directly at the cache level, it will be used, igno If a serializer is configured at the cache manager level, upon initialization, a cache with no specifically configured serializer will search through its cache manager's registered list of serializers and try to find one that directly matches the cache's key or value type. -If such search fails, all the registered serializers will be tried in the added order to find one that handles compatible types. +If such a search fails, all the registered serializers will be tried in the added order to find one that handles compatible types. For instance, let's say you have a `Person` interface and two subclasses: `Employee` and `Customer`. If you configure your cache manager as follows: ```java @@ -60,7 +60,7 @@ NOTE: Given the above, it is recommended to limit `Serializer` registration to c [[serializers-bundled]] === Bundled implementations -By default, cache managers are pre-configured with specially optimized `Serializer` that can handle the following types, in the following order: +By default, cache managers are pre-configured with a specially optimized `Serializer` that can handle the following types, in the following order: - `java.io.Serializable` - `java.lang.Long` @@ -84,7 +84,7 @@ However, registering a different `Serializer` for one of the given type means it === Lifecycle: instances vs. class names When a `Serializer` is configured by providing an _instance_, it is up to the provider of that instance to manage its lifecycle. -It will need to dispose of any resource the serializer might hold upon and/or persisting and reloading the serializer's state. +It will need to dispose of any resource the serializer might hold upon persisting and/or reloading the serializer's state. When a `Serializer` is configured by providing a _class name_ either at the cache or cache manager level, since Ehcache is responsible for creating the instance, it also is responsible for disposing of it. @@ -114,12 +114,12 @@ include::{sourcedir31}/api/src/main/java/org/ehcache/spi/serialization/Serialize As the javadoc states, there are some constructor rules, see the <> for that. -You can optionally implement `java.io.Closeable`. If you do, Ehcache will call `close()` when a cache using such serializer gets disposed of, but *only if* +You can optionally implement `java.io.Closeable`. If you do, Ehcache will call `close()` when a cache using such a serializer gets disposed of, but *only if* Ehcache instantiated the serializer itself. === ClassLoaders -When Ehcache instantiates a serializer itself, it will pass it a `ClassLoader` via the constructor. Such class loader must be used to access the classes +When Ehcache instantiates a serializer itself, it will pass it a `ClassLoader` via the constructor. Such a class loader must be used to access the classes of the serialized types as they might not be available in the current class loader [[persistent-vs-transient-caches]] @@ -194,7 +194,7 @@ which allow by _instance_ or by _class name_ configuration. If a copier is configured directly at the cache level, it will be used, ignoring any cache manager level configuration. If a copier is configured at the cache manager level, upon initialization, a cache with no specifically configured copier will -search through its cache manager's registered list of copiers and try to find one that directly matches the cache's key or value type. If such +search through its cache manager's registered list of copiers and try to find one that directly matches the cache's key or value type. If such a search fails, all the registered copiers will be tried in the added order to find one that handles compatible types. For instance, let's say you have a `Person` interface and two subclasses: `Employee` and `Customer`. If you configure your cache manager as follows: @@ -221,11 +221,11 @@ The `CacheConfigurationBuilder` provides the following methods to make use of th === Lifecycle: instances vs class names When a `Copier` is configured by providing an _instance_, it is up to the provider of that instance to manage its lifecycle. -It will have dispose of any resource the copier might hold upon. +It will have to dispose of any resource the copier might hold. When a `Copier` is configured by providing a _class name_ either at the cache or cache manager level, since Ehcache is responsible for creating the instance, it also is responsible for disposing of it. -If the `Copier` implements `java.io.Closeable` then `close()` will be called when the cache is closed and the `Copier` no longer needed. +If the `Copier` implements `java.io.Closeable` then `close()` will be called when the cache is closed and the `Copier` is no longer needed. === Writing your own Copier diff --git a/docs/src/docs/asciidoc/user/thread-pools.adoc b/docs/src/docs/asciidoc/user/thread-pools.adoc index 7d6beef65f..a0d8719fa3 100644 --- a/docs/src/docs/asciidoc/user/thread-pools.adoc +++ b/docs/src/docs/asciidoc/user/thread-pools.adoc @@ -66,12 +66,12 @@ Following is the list of services making use of `ExecutionService`: `CacheEventDispatcherFactoryConfiguration` is used to configure what thread pool to use at the cache manager level. The different builders will make use of the right configuration class, you do not have to use those classes directly. -For instance, calling `CacheManagerBuilder.withDefaultDiskStoreThreadPool(String threadPoolAlias)` actually is identical +For instance, calling `CacheManagerBuilder.withDefaultDiskStoreThreadPool(String threadPoolAlias)` is actually identical to calling `CacheManagerBuilder.using(new OffHeapDiskStoreProviderConfiguration(threadPoolAlias))`. -The thread pool can be assigned to a service with the builders by passing to the ad-hoc method a -`threadPoolAlias` parameter. When a service isn't told anything about what thread pool to use, the default thread pool -is used. +The thread pool can be assigned to a service with the builders by passing a +`threadPoolAlias` parameter to the ad-hoc method. When a service isn't told anything about what thread pool to use, +the default thread pool is used. == In practice diff --git a/docs/src/docs/asciidoc/user/usermanaged.adoc b/docs/src/docs/asciidoc/user/usermanaged.adoc index 59aa434b11..db61e594fa 100644 --- a/docs/src/docs/asciidoc/user/usermanaged.adoc +++ b/docs/src/docs/asciidoc/user/usermanaged.adoc @@ -50,11 +50,11 @@ include::{sourcedir31}/api/src/main/java/org/ehcache/PersistentUserManagedCache. ---- include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java[tag=userManagedCacheExample] ---- -<1> Create a `UserManagedCache` instance, again you can either have the builder `init()` it for you, passing true or +<1> Create a `UserManagedCache` instance, again you can either have the builder `init()` it for you, passing true, or <2> pass false and it is up to you to `UserManagedCache.init()` them, prior to using them. <3> You can use the cache exactly as a managed cache -<4> In the same vein, a `UserManagedCache` requires you to `UserManagedCache.close()` it explicitly. If you would also use - managed caches simultaneously, the `CacheManager.close()` operation would not impact the user managed cache(s). +<4> In the same vein, a `UserManagedCache` requires you to `UserManagedCache.close()` it explicitly. If you also use + managed caches simultaneously, the `CacheManager.close()` operation does not impact the user managed cache(s). From this basic example, explore the API of `UserManagedCacheBuilder` to find all the directly available features. @@ -65,18 +65,18 @@ The following features apply in the exact same way to user managed caches: Simply use the methods from `UserManagedCacheBuilder` which are equivalent to the ones from `CacheConfigurationBuilder`. -Below we will describe some more advanced setup where there is need to maintain a service instance in order to have working user managed cache. +Below we will describe some more advanced setup where there is a need to maintain a service instance in order to have a working user managed cache. -=== Example with disk persistent and lifecycle +=== Example with disk persistence and lifecycle -If you want to use disk persistent cache, you will need to create and lifecycle the persistence service. +If you want to use a disk persistent cache, you will need to create and lifecycle the persistence service. [source,java,indent=0] ---- include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java[tag=persistentUserManagedCache] ---- <1> Create the persistence service to be used by the cache for storing data on disk -<2> Pass the persistence service to the builder next to an id for the cache - note that this will make the builder produce a more specific type: `PersistentUserManagedCache` +<2> Pass the persistence service to the builder as well as an id for the cache - note that this will make the builder produce a more specific type: `PersistentUserManagedCache` <3> As usual, indicate here if the data should outlive the cache <4> Closing the cache will not delete the data it saved on disk when marked as persistent. <5> To delete the data, after closing the cache, destroy has to be explicitly invoked. diff --git a/docs/src/docs/asciidoc/user/writers.adoc b/docs/src/docs/asciidoc/user/writers.adoc index 44f8b8334e..5e9f294e6a 100644 --- a/docs/src/docs/asciidoc/user/writers.adoc +++ b/docs/src/docs/asciidoc/user/writers.adoc @@ -72,7 +72,7 @@ maximum write delay:: include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=writeThroughCache] ---- -<1> We register a sample `CacheLoaderWriter` who knows about the mapping `(41L -> "zero")` +<1> We register a sample `CacheLoaderWriter` that knows about the mapping `(41L -> "zero")` <2> Since the cache has no content yet, this will delegate to the `CacheLoaderWriter`. The returned mapping will populate the cache and be returned to the caller. <3> While creating this cache mapping, the `CacheLoaderWriter` will be invoked to write the mapping into the system of record. @@ -86,7 +86,7 @@ include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[t <1> For write-behind you need a configured `CacheLoaderWriter`. <2> Additionally, register a `WriteBehindConfiguration` on the cache by using the `WriteBehindConfigurationBuilder`. -<3> Here we configure write behind or batching with a batch size of 3 and a maximum write delay of 1 second. +<3> Here we configure write-behind or batching with a batch size of 3 and a maximum write delay of 1 second. <4> We also set the maximum size of the write-behind queue. <5> Define the concurrency level of write-behind queue(s). This indicates how many writer threads work in parallel to update the underlying system of record asynchronously. diff --git a/docs/src/docs/asciidoc/user/xa.adoc b/docs/src/docs/asciidoc/user/xa.adoc index a7a7b071a1..1f4db9a36b 100644 --- a/docs/src/docs/asciidoc/user/xa.adoc +++ b/docs/src/docs/asciidoc/user/xa.adoc @@ -56,10 +56,10 @@ include::{sourcedir31}/transactions/src/test/java/org/ehcache/docs/transactions/ <1> First start the Bitronix transaction manager. By default, Ehcache 3 will auto-detect it but will throw an exception during the cache manager initialization if BTM isn't started. -<2> Configure the cache manager such as it can handle transactions by having a `TransactionManagerProvider` loaded and configured to use Bitronix. +<2> Configure the cache manager to handle transactions by having a `TransactionManagerProvider` loaded and configured to use Bitronix. <3> Register a cache the normal way. <4> Give it the resources you wish. -<5> Add a `XAStoreConfiguration` object to make the cache XA transactional. You must also give the cache a unique +<5> Add an `XAStoreConfiguration` object to make the cache XA transactional. You must also give the cache a unique XAResource identifier as some transaction managers require this. <6> Begin a JTA transaction the normal way. <7> Work with the cache the normal way, all operations are supported. Note that concurrent transactions will not see @@ -68,9 +68,9 @@ include::{sourcedir31}/transactions/src/test/java/org/ehcache/docs/transactions/ === Configuring your transaction manager -While only the Bitronix JTA implementation has been tested so far, plugging-in another one is possible. +While only the Bitronix JTA implementation has been tested so far, plugging in another one is possible. -You will need to implement a `org.ehcache.transactions.xa.txmgr.provider.TransactionManagerLookup` +You will need to implement an `org.ehcache.transactions.xa.txmgr.provider.TransactionManagerLookup` and make sure you understand its expected lifecycle as well as the one of the `org.ehcache.transactions.xa.txmgr.provider.LookupTransactionManagerProvider`. If such a lifecycle does not match your needs, you will have to go one step further and implement your own `org.ehcache.transactions.xa.txmgr.provider.TransactionManagerProvider`. @@ -88,12 +88,12 @@ include::{sourcedir31}/transactions/src/test/java/org/ehcache/docs/transactions/ <1> First start the Bitronix transaction manager. By default, Ehcache 3 will auto-detect it but will throw an exception during the cache manager initialization if BTM isn't started. -<2> Configure the cache manager such as it can handle transactions by having a `TransactionManagerProvider` loaded and configured to use Bitronix. +<2> Configure the cache manager to handle transactions by having a `TransactionManagerProvider` loaded and configured to use Bitronix. <3> Register a cache the normal way. <4> Give it the resources you wish. -<5> Add a `XAStoreConfiguration` object to make the cache XA transactional. You must also give the cache a unique +<5> Add an `XAStoreConfiguration` object to make the cache XA transactional. You must also give the cache a unique XAResource identifier as some transaction managers require this. -<6> Add a `CacheLoaderWriter` configuration. This one is a mocked SoR backed by a map for illustration purpose that +<6> Add a `CacheLoaderWriter` configuration. This one is a mocked SoR backed by a map for illustration purposes that is filled with `1L`/`"eins"` key/value pair at startup. <7> Begin a JTA transaction the normal way. <8> The cache is empty at startup, so the `CacheLoaderWriter` will be called to load the value. @@ -102,8 +102,8 @@ include::{sourcedir31}/transactions/src/test/java/org/ehcache/docs/transactions/ === Transactional scope -A XA cache can only be accessed within a JTA transaction's context. Any attempt to access one outside of such context -will result in `XACacheException` to be thrown. +An XA cache can only be accessed within a JTA transaction's context. Any attempt to access one outside of such a context +will result in `XACacheException` being thrown. [source,java,indent=0] ---- @@ -112,10 +112,10 @@ include::{sourcedir31}/transactions/src/test/java/org/ehcache/docs/transactions/ <1> First start the Bitronix transaction manager. By default, Ehcache 3 will auto-detect it but will throw an exception during the cache manager initialization if BTM isn't started. -<2> Configure the cache manager such as it can handle transactions by having a `TransactionManagerProvider` loaded and configured to use Bitronix. +<2> Configure the cache manager to handle transactions by having a `TransactionManagerProvider` loaded and configured to use Bitronix. <3> Register a cache the normal way. <4> Give it the resources you wish. -<5> Add a `XAStoreConfiguration` object to make the cache XA transactional. You must also give the cache a unique +<5> Add an `XAStoreConfiguration` object to make the cache XA transactional. You must also give the cache a unique XAResource identifier as some transaction managers require this. <6> The cache is being accessed with no prior call to `transactionManager.begin()` which makes it throw `XACacheException`. @@ -139,11 +139,11 @@ include::{sourcedir31}/transactions/src/test/java/org/ehcache/docs/transactions/ <1> First start the Bitronix transaction manager. By default, Ehcache 3 will auto-detect it but will throw an exception during the cache manager initialization if BTM isn't started. -<2> Configure the cache manager such as it can handle transactions by having a `TransactionManagerProvider` loaded and configured to use Bitronix. +<2> Configure the cache manager to handle transactions by having a `TransactionManagerProvider` loaded and configured to use Bitronix. <3> Register a `LocalPersistenceService` with your `CacheManager` to use disk storage. <4> Register a cache the normal way. <5> Give it the resources you wish. -<6> Add a `XAStoreConfiguration` object to make the cache XA transactional. You must also give the cache a unique +<6> Add an `XAStoreConfiguration` object to make the cache XA transactional. You must also give the cache a unique XAResource identifier as some transaction managers require this. <7> Begin a JTA transaction the normal way. <8> Update the value. @@ -152,7 +152,7 @@ include::{sourcedir31}/transactions/src/test/java/org/ehcache/docs/transactions/ == Configuring it with XML -You can create a XML file to configure a `CacheManager`, lookup a specific transaction manager and configure +You can create an XML file to configure a `CacheManager`, look up a specific transaction manager and configure XA caches: [source,xml] @@ -160,7 +160,7 @@ XA caches: include::{sourcedir31}/transactions/src/test/resources/docs/configs/xa-getting-started.xml[tags=gettingStarted] ---- -<1> Declare a `TransactionManagerLookup` that will lookup your transaction manager. +<1> Declare a `TransactionManagerLookup` that will look up your transaction manager. <2> Configure a `xaCache` cache the normal way. <3> Configure `xaCache` as an XA cache, giving it `xaCache` as its unique XAResource ID. @@ -173,7 +173,7 @@ include::{sourcedir31}/transactions/src/test/java/org/ehcache/docs/transactions/ <1> The Bitronix transaction manager must be started before the cache manager is initialized. <2> Create a `URL` to your XML file's location. -<3> Instantiate a `XmlConfiguration` passing it the XML file's `URL`. +<3> Instantiate an `XmlConfiguration` passing it the XML file's `URL`. <4> Using the static `org.ehcache.config.builders.CacheManagerBuilder.newCacheManager(org.ehcache.config.Configuration)` lets you create your `CacheManager` instance using the `Configuration` from the `XmlConfiguration`. @@ -184,11 +184,11 @@ And here is what the `BitronixTransactionManagerLookup` implementation looks lik include::{sourcedir31}/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/BitronixTransactionManagerLookup.java[tag=BitronixLookup] ---- -<1> The `TransactionManagerLookup` interface must be implemented and the offer a no-arg constructor. +<1> The `TransactionManagerLookup` interface must be implemented and offer a no-arg constructor. <2> The `lookupTransactionManagerWrapper()` method must return a `TransactionManagerWrapper` instance. <3> Here is the check that makes sure BTM is started. <4> The `TransactionManagerWrapper` class is constructed with both the `javax.transaction.TransactionManager` - instance as well as a `XAResourceRegistry` instance. The latter is used to register the + instance and an `XAResourceRegistry` instance. The latter is used to register the `javax.transaction.xa.XAResource` instances of the cache with the transaction manager using an implementation-specific mechanism. If your JTA implementation doesn't require that, you can use the `NullXAResourceRegistry` instead. diff --git a/docs/src/docs/asciidoc/user/xml.adoc b/docs/src/docs/asciidoc/user/xml.adoc index 64b5da166c..2d2956cd36 100644 --- a/docs/src/docs/asciidoc/user/xml.adoc +++ b/docs/src/docs/asciidoc/user/xml.adoc @@ -27,7 +27,7 @@ provides the definition for a `CacheManager`. With Ehcache 3, however, you may `` elements are an extension point for specifying `CacheManager` managed services. Each `Service` defined in this way is managed with the -same lifecycle as the `CacheManager` -- for each `Service` defined for a `CacheManager, the `Service.start` +same lifecycle as the `CacheManager` -- for each `Service` defined for a `CacheManager`, the `Service.start` is called during `CacheManager.init` processing and the `Service.stop` method is called during `CacheManager.close` processing. @@ -38,7 +38,7 @@ JSR-107 uses this extension point of the XML configuration (and Ehcache 3's modu === `` element -A `` element represents `Serializers` configured at `CacheManager` level. +A `` element represents `Serializers` configured at `CacheManager` level. It is a collection of `` elements that require a `type` and a fully qualified class name of the `Serializer`. === `` element @@ -55,7 +55,7 @@ It requires the `directory` location where data needs be stored on disk. A `` element represent a `Cache` instance that will be created and managed by the `CacheManager`. Each `` requires the `alias` attribute, used at runtime to retrieve the corresponding `Cache` instance using -the `org.ehcache.CacheManager.getCache(String, Class, Class)` method. The optional `uses-template` attribute, lets you reference +the `org.ehcache.CacheManager.getCache(String, Class, Class)` method. The optional `uses-template` attribute lets you reference a `` element's `name` attribute. See the <> for further details on using them. @@ -74,11 +74,11 @@ Supported nested elements are optional: `` elements represent a uniquely named (specified using the mandatory `name` attribute) template for `` elements to inherit from. A `` element that references a `` by its `name` using the `uses-template` attribute, will inherit all properties of the ``. A `` -can override these properties as it needs. +can override these properties as required. A `` element may contain all the same child elements as a `` element. -NOTE: We've setup a complete configuration <> to inspire you. +NOTE: We've set up a complete configuration <> to inspire you. == XML programmatic parsing @@ -95,7 +95,7 @@ include::{sourcedir31}/xml/src/test/java/org/ehcache/docs/GettingStarted.java[ta to create your `CacheManager` instance using the `Configuration` from the `XmlConfiguration` We can also use `` declared in the XML file to seed instances of `CacheConfigurationBuilder`. In order -to use a `` element from a XML file, e.g. the `/my-config.xml` contained this XML fragment: +to use a `` element from an XML file, e.g. the `/my-config.xml` contains this XML fragment: [source,xml,indent=0] ---- From e828c301a5889cd8024233454343f3975618ade3 Mon Sep 17 00:00:00 2001 From: Henri Tremblay Date: Tue, 11 Oct 2016 09:40:08 -0400 Subject: [PATCH 072/218] Support concurrent cache creation (close #1278) --- .../service/DefaultClusteringService.java | 22 +--- .../client/ClusteredConcurrencyTest.java | 122 ++++++++++++++++++ 2 files changed, 129 insertions(+), 15 deletions(-) create mode 100644 clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredConcurrencyTest.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java index 615a531f72..9cd60ecafb 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java @@ -26,7 +26,6 @@ import org.ehcache.clustered.client.internal.EhcacheEntityNotFoundException; import org.ehcache.clustered.client.internal.EhcacheEntityValidationException; import org.ehcache.clustered.client.internal.config.ExperimentalClusteringServiceConfiguration; -import org.ehcache.clustered.client.internal.store.ClusteredStore; import org.ehcache.clustered.client.internal.store.EventualServerStoreProxy; import org.ehcache.clustered.client.internal.store.ServerStoreProxy; import org.ehcache.clustered.client.internal.store.StrongServerStoreProxy; @@ -45,7 +44,6 @@ import org.ehcache.spi.persistence.StateRepository; import org.ehcache.spi.service.MaintainableService; import org.ehcache.spi.service.Service; -import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.spi.service.ServiceProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -382,22 +380,16 @@ public ServerStoreProxy getServerStoreProxy(final ClusteredCacheIdentifie try { if (configuration.isAutoCreate()) { try { - this.entity.validateCache(cacheId, clientStoreConfiguration); - } catch (ClusteredTierValidationException ex) { - if (ex.getCause() instanceof InvalidStoreException) { - try { - this.entity.createCache(cacheId, clientStoreConfiguration); - } catch (TimeoutException e) { - throw new CachePersistenceException("Unable to create clustered tier proxy '" - + cacheIdentifier.getId() + "' for entity '" + entityIdentifier - + "'; create operation timed out", e); - } - } else { - throw ex; + entity.createCache(cacheId, clientStoreConfiguration); + } catch (ClusteredTierCreationException e) { + // An InvalidStoreException means the cache already exists. That's fine, the validateCache will then work + if (!(e.getCause() instanceof InvalidStoreException)) { + throw e; } + entity.validateCache(cacheId, clientStoreConfiguration); } } else { - this.entity.validateCache(cacheId, clientStoreConfiguration); + entity.validateCache(cacheId, clientStoreConfiguration); } } catch (ClusteredTierException e) { throw new CachePersistenceException("Unable to create clustered tier proxy '" + cacheIdentifier.getId() + "' for entity '" + entityIdentifier + "'", e); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredConcurrencyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredConcurrencyTest.java new file mode 100644 index 0000000000..f9625c9c9f --- /dev/null +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/ClusteredConcurrencyTest.java @@ -0,0 +1,122 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client; + +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.ClusteredStoreConfiguration; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.net.URI; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicReference; + +/** + * This test makes sure a clustered cache can be opened from many client instances. As usual with concurrency tests, a + * success doesn't mean it will work forever and a failure might not occur reliably. However, it puts together all + * conditions to make it fail in case of race condition + * + * @author Henri Tremblay + */ +public class ClusteredConcurrencyTest { + + private static final URI CLUSTER_URI = URI.create("terracotta://example.com:9540/my-application"); + private static final String CACHE_NAME = "clustered-cache"; + + private AtomicReference exception = new AtomicReference(); + + @Before + public void definePassthroughServer() throws Exception { + UnitTestConnectionService.add(CLUSTER_URI, + new UnitTestConnectionService.PassthroughServerBuilder() + .resource("primary-server-resource", 64, MemoryUnit.MB) + .resource("secondary-server-resource", 64, MemoryUnit.MB) + .build()); + } + + @After + public void removePassthroughServer() throws Exception { + UnitTestConnectionService.remove(CLUSTER_URI); + } + + @Test + public void test() throws Throwable { + final int THREAD_NUM = 50; + + final CountDownLatch latch = new CountDownLatch(THREAD_NUM + 1); + + List threads = new ArrayList(THREAD_NUM); + for (int i = 0; i < THREAD_NUM; i++) { + Thread t1 = new Thread(content(latch)); + t1.start(); + threads.add(t1); + } + + latch.countDown(); + latch.await(); + + for(Thread t : threads) { + t.join(); + } + + Throwable throwable = exception.get(); + if(throwable != null) { + throw throwable; + } + } + + private Runnable content(final CountDownLatch latch) { + return new Runnable() { + @Override + public void run() { + try { + CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER_URI).autoCreate() + .defaultServerResource("primary-server-resource") + .resourcePool("resource-pool-a", 32, MemoryUnit.MB) + .resourcePool("resource-pool-b", 32, MemoryUnit.MB, "secondary-server-resource")) + .withCache(CACHE_NAME, CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 32, MemoryUnit.MB))) + .add(new ClusteredStoreConfiguration(Consistency.STRONG))); + + latch.countDown(); + try { + latch.await(); + } catch (InterruptedException e) { + // continue + } + + clusteredCacheManagerBuilder.build(true); + } catch (Throwable t) { + exception.compareAndSet(null, t); // only keep the first exception + } + } + }; + } +} From d3ed4861320784d04320c7355d075007f038faa4 Mon Sep 17 00:00:00 2001 From: Kevin Cleereman Date: Fri, 14 Oct 2016 05:57:33 -0600 Subject: [PATCH 073/218] Issue #1468 Do not create config for existing cachemanager --- .../jsr107/EhcacheCachingProvider.java | 65 ++++++++++++++----- .../ehcache/jsr107/EhCachingProviderTest.java | 32 +++++++++ 2 files changed, 82 insertions(+), 15 deletions(-) diff --git a/107/src/main/java/org/ehcache/jsr107/EhcacheCachingProvider.java b/107/src/main/java/org/ehcache/jsr107/EhcacheCachingProvider.java index f9b679424e..1c50f4d6f5 100644 --- a/107/src/main/java/org/ehcache/jsr107/EhcacheCachingProvider.java +++ b/107/src/main/java/org/ehcache/jsr107/EhcacheCachingProvider.java @@ -76,18 +76,7 @@ public CacheManager getCacheManager(URI uri, ClassLoader classLoader, Properties } } - Configuration config; - try { - if (URI_DEFAULT.equals(uri)) { - config = new DefaultConfiguration(classLoader); - } else { - config = new XmlConfiguration(uri.toURL(), classLoader); - } - } catch (Exception e) { - throw new javax.cache.CacheException(e); - } - - return getCacheManager(uri, config, properties); + return getCacheManager(new ConfigSupplier(uri, classLoader), properties); } /** @@ -99,7 +88,7 @@ public CacheManager getCacheManager(URI uri, ClassLoader classLoader, Properties * @return a cache manager */ public Eh107CacheManager getCacheManager(URI uri, Configuration config) { - return getCacheManager(uri, config, new Properties()); + return getCacheManager(new ConfigSupplier(uri, config), new Properties()); } /** @@ -113,9 +102,14 @@ public Eh107CacheManager getCacheManager(URI uri, Configuration config) { * @return a cache manager */ public Eh107CacheManager getCacheManager(URI uri, Configuration config, Properties properties) { + return getCacheManager(new ConfigSupplier(uri, config), properties); + } + + Eh107CacheManager getCacheManager(ConfigSupplier configSupplier, Properties properties) { Eh107CacheManager cacheManager; ConcurrentMap byURI; - ClassLoader classLoader = config.getClassLoader(); + final ClassLoader classLoader = configSupplier.getClassLoader(); + final URI uri = configSupplier.getUri(); synchronized (cacheManagers) { byURI = cacheManagers.get(classLoader); @@ -131,7 +125,7 @@ public Eh107CacheManager getCacheManager(URI uri, Configuration config, Properti byURI.remove(uri, cacheManager); } - cacheManager = createCacheManager(uri, config, properties); + cacheManager = createCacheManager(uri, configSupplier.getConfiguration(), properties); byURI.put(uri, cacheManager); } } @@ -296,4 +290,45 @@ private static Properties cloneProperties(Properties properties) { return clone; } + static class ConfigSupplier { + private final URI uri; + private final ClassLoader classLoader; + private Configuration configuration; + + public ConfigSupplier(URI uri, ClassLoader classLoader) { + this.uri = uri; + this.classLoader = classLoader; + this.configuration = null; + } + + public ConfigSupplier(URI uri, Configuration configuration) { + this.uri = uri; + this.classLoader = configuration.getClassLoader(); + this.configuration = configuration; + } + + public URI getUri() { + return uri; + } + + public ClassLoader getClassLoader() { + return classLoader; + } + + public Configuration getConfiguration() { + if(configuration == null) { + try { + if (URI_DEFAULT.equals(uri)) { + configuration = new DefaultConfiguration(classLoader); + } else { + configuration = new XmlConfiguration(uri.toURL(), classLoader); + } + } catch (Exception e) { + throw new javax.cache.CacheException(e); + } + } + return configuration; + } + } + } diff --git a/107/src/test/java/org/ehcache/jsr107/EhCachingProviderTest.java b/107/src/test/java/org/ehcache/jsr107/EhCachingProviderTest.java index 1d1b296452..64b22daf32 100644 --- a/107/src/test/java/org/ehcache/jsr107/EhCachingProviderTest.java +++ b/107/src/test/java/org/ehcache/jsr107/EhCachingProviderTest.java @@ -31,6 +31,7 @@ import javax.cache.configuration.MutableConfiguration; import javax.cache.spi.CachingProvider; +import org.ehcache.config.Configuration; import org.junit.Test; import com.pany.domain.Customer; @@ -77,6 +78,24 @@ public void testCacheUsesCacheManagerClassLoaderForDefaultURI() { } } + @Test + public void testClassLoadCount() throws Exception { + EhcacheCachingProvider cachingProvider = (EhcacheCachingProvider)Caching.getCachingProvider(); + URI uri = cachingProvider.getDefaultURI(); + ClassLoader classLoader = cachingProvider.getDefaultClassLoader(); + CountingConfigSupplier configSupplier = new CountingConfigSupplier(uri, classLoader); + + assertEquals(configSupplier.configCount, 0); + + cachingProvider.getCacheManager(configSupplier, new Properties()); + + assertEquals(configSupplier.configCount, 1); + + cachingProvider.getCacheManager(configSupplier, new Properties()); + + assertEquals(configSupplier.configCount, 1); + } + private class LimitedClassLoader extends ClassLoader { private final ClassLoader delegate; @@ -94,4 +113,17 @@ public Class loadClass(String name) throws ClassNotFoundException { } } + private static class CountingConfigSupplier extends EhcacheCachingProvider.ConfigSupplier { + private int configCount = 0; + + public CountingConfigSupplier(URI uri, ClassLoader classLoader) { + super(uri, classLoader); + } + + @Override + public Configuration getConfiguration() { + configCount++; + return super.getConfiguration(); + } + } } From 6e4d3cc5bb5aa792a8646304767daa5da9e08c5a Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Tue, 18 Oct 2016 08:11:11 -0400 Subject: [PATCH 074/218] :arrow_up: Compat' with new tc-platform 1.0.8 beta and new monitoring service --- build.gradle | 12 ++-- clustered/integration-test/build.gradle | 31 ++++++-- .../AbstractClusteringManagementTest.java | 51 +++++++++----- .../ClusteringManagementServiceTest.java | 70 ++++++++++--------- .../state/EhcacheStateServiceProvider.java | 3 +- .../EhcacheStateServiceProviderTest.java | 2 +- gradle.properties | 2 + .../DefaultClusteringManagementService.java | 10 ++- 8 files changed, 117 insertions(+), 64 deletions(-) diff --git a/build.gradle b/build.gradle index 7a10f1c0b6..d1bba67ab0 100644 --- a/build.gradle +++ b/build.gradle @@ -21,22 +21,22 @@ ext { baseVersion = findProperty('overrideVersion') ?: '3.2.0-SNAPSHOT' // Third parties - offheapVersion = '2.3.0' + offheapVersion = '2.3.1' statisticVersion = '1.2.0' jcacheVersion = '1.0.0' slf4jVersion = '1.7.7' sizeofVersion = '0.3.0' // Clustered - terracottaPlatformVersion = '5.0.7.beta' + terracottaPlatformVersion = '5.0.8.beta2' managementVersion = terracottaPlatformVersion - terracottaApisVersion = '1.0.7.beta' - terracottaCoreVersion = '5.0.7-beta3' + terracottaApisVersion = '1.0.8.beta' + terracottaCoreVersion = '5.0.8-beta' offheapResourceVersion = terracottaPlatformVersion entityApiVersion = terracottaApisVersion - terracottaPassthroughTestingVersion = '1.0.7.beta2' + terracottaPassthroughTestingVersion = '1.0.8.beta' entityTestLibVersion = terracottaPassthroughTestingVersion - galvanVersion = '1.0.7-beta3' + galvanVersion = '1.0.8-beta' // Tools findbugsVersion = '3.0.1' diff --git a/clustered/integration-test/build.gradle b/clustered/integration-test/build.gradle index a528926eac..2501d2d035 100644 --- a/clustered/integration-test/build.gradle +++ b/clustered/integration-test/build.gradle @@ -16,11 +16,34 @@ import org.gradle.internal.jvm.Jvm +// all these libs should be on server classpath for management +// management-entity-server : entity used client-side to send management data into the server +// monitoring-service : server monitoring service +// monitoring-service-entity : test entity, so that we can read the server monitoring service +// management-model : management metadata describing exposed objects, stats and notifications (jdk-6 compat) +// cluster-topology : model classes describing cluster topology (jdk-8 compat) +// management-registry : service classes to expose management metadata and query api for stats (jdk-6 compat) +// sequence-generator : improved boundary flake seq generator to add seq numbers on management messages +def serverCP = [ + 'management-entity-server': ':plugin', + 'monitoring-service': ':plugin', + 'monitoring-service-entity': ':plugin', + 'management-model': '', + 'cluster-topology': '', + 'management-registry': '', + 'sequence-generator': '', +] + +configurations { + serverClasspath +} + dependencies { testCompile project(':dist') testCompile project(':clustered:clustered-dist') testCompile project(':management') testCompile "org.terracotta.management:management-entity-client:$parent.managementVersion" + testCompile "org.terracotta.management:monitoring-service-entity:$parent.managementVersion" testCompile group:'org.terracotta', name:'galvan-support', version: galvanVersion testCompile (group:'com.google.code.tempus-fugit', name:'tempus-fugit', version:'1.1') { @@ -29,9 +52,9 @@ dependencies { } testCompile group: 'javax.cache', name: 'cache-api', version: jcacheVersion - testCompile "org.terracotta.management:management-entity-server:$parent.managementVersion:plugin" - testCompile "org.terracotta.management:monitoring-service:$parent.managementVersion:plugin" - testCompile "org.terracotta.management:monitoring-service-entity:$parent.managementVersion:plugin" + serverCP.each { k, v -> + serverClasspath "org.terracotta.management:$k:$parent.managementVersion$v" + } } task unzipKit(type: Copy) { @@ -51,7 +74,7 @@ test { environment 'JAVA_OPTS', '-Dcom.tc.l2.lockmanager.greedy.locks.enabled=false' //If this directory does not exist, tests will fail with a cryptic assert failure systemProperty 'kitInstallationPath', "$unzipKit.destinationDir/${project(':clustered:clustered-dist').archivesBaseName}-$project.version-kit" - systemProperty 'managementPlugins', ["management-model", "management-registry", "sequence-generator", "management-entity-server", "monitoring-service", "monitoring-service-entity"].collect { String artifact -> project.configurations.testCompile.find { it.name.startsWith("$artifact-$parent.managementVersion") } }.join(File.pathSeparator) + systemProperty 'managementPlugins', serverCP.keySet().collect { String artifact -> project.configurations.serverClasspath.find { it.name.startsWith("$artifact-$parent.managementVersion") } }.join(File.pathSeparator) // Uncomment to include client logging in console output // testLogging.showStandardStreams = true } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java index f0e06859f1..84920a9e54 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java @@ -25,13 +25,14 @@ import org.terracotta.management.entity.management.client.ContextualReturnListener; import org.terracotta.management.entity.management.client.ManagementAgentEntityFactory; import org.terracotta.management.entity.management.client.ManagementAgentService; -import org.terracotta.management.entity.monitoring.client.MonitoringServiceEntity; import org.terracotta.management.entity.monitoring.client.MonitoringServiceEntityFactory; +import org.terracotta.management.entity.monitoring.client.MonitoringServiceProxyEntity; import org.terracotta.management.model.call.ContextualReturn; import org.terracotta.management.model.call.Parameter; import org.terracotta.management.model.cluster.ClientIdentifier; import org.terracotta.management.model.context.Context; import org.terracotta.management.model.message.Message; +import org.terracotta.management.model.notification.ContextualNotification; import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.testing.rules.BasicExternalCluster; import org.terracotta.testing.rules.Cluster; @@ -63,7 +64,7 @@ public abstract class AbstractClusteringManagementTest { + "" + "\n"; - protected static MonitoringServiceEntity consumer; + protected static MonitoringServiceProxyEntity consumer; @ClassRule public static Cluster CLUSTER = new BasicExternalCluster(new File("build/cluster"), 1, getManagementPlugins(), "", RESOURCE_CONFIG, ""); @@ -73,14 +74,7 @@ public static void beforeClass() throws Exception { CLUSTER.getClusterControl().waitForActive(); consumer = new MonitoringServiceEntityFactory(ConnectionFactory.connect(CLUSTER.getConnectionURI(), new Properties())).retrieveOrCreate("MonitoringConsumerEntity"); - // buffer for client-side notifications - consumer.createBestEffortBuffer("client-notifications", 1024, Message.class); - // buffer for client-side stats - consumer.createBestEffortBuffer("client-statistics", 1024, Message.class); - // buffer for platform topology changes - consumer.createBestEffortBuffer("platform-notifications", 1024, Message.class); - // buffer for entity notifications - consumer.createBestEffortBuffer("entity-notifications", 1024, Message.class); + consumer.createMessageBuffer(1024); } @After @@ -89,8 +83,7 @@ public final void clearBuffers() throws Exception { } protected final void clear() { - while (consumer.readBuffer("client-notifications", Message.class) != null) ; - while (consumer.readBuffer("client-statistics", Message.class) != null) ; + consumer.clearMessageBuffer(); } protected static void sendManagementCallToCollectStats(String... statNames) throws Exception { @@ -98,7 +91,7 @@ protected static void sendManagementCallToCollectStats(String... statNames) thro try { ManagementAgentService agent = new ManagementAgentService(new ManagementAgentEntityFactory(managementConnection).retrieveOrCreate(new ManagementAgentConfig())); - assertThat(agent.getManageableClients().size(), equalTo(2)); + assertThat(agent.getManageableClients().size(), equalTo(1)); // only ehcache client is manageable, not this one // find Ehcache client ClientIdentifier me = agent.getClientIdentifier(); @@ -150,11 +143,15 @@ public void onContextualReturn(ClientIdentifier from, String id, ContextualRetur } } - protected static ContextualStatistics[] waitForNextStats() { + protected static List waitForNextStats() { // uses the monitoring consumre entity to get the content of the stat buffer when some stats are collected - Message message; - while ((message = consumer.readBuffer("client-statistics", Message.class)) == null) { Thread.yield(); } - return message.unwrap(ContextualStatistics[].class); + while (true) { + Message message = consumer.readMessageBuffer(); + if (message != null && message.getType().equals("STATISTICS")) { + return message.unwrap(ContextualStatistics.class); + } + Thread.yield(); + } } private static List getManagementPlugins() { @@ -166,4 +163,24 @@ private static List getManagementPlugins() { return plugins; } + protected static List messageTypes(List messages) { + List types = new ArrayList(messages.size()); + for (Message message : messages) { + types.add(message.getType()); + } + return types; + } + + protected static List notificationTypes(List messages) { + List types = new ArrayList(messages.size()); + for (Message message : messages) { + if ("NOTIFICATION".equals(message.getType())) { + for (ContextualNotification notification : message.unwrap(ContextualNotification.class)) { + types.add(notification.getType()); + } + } + } + return types; + } + } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java index 52b357386e..e2ee75ecd3 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java @@ -32,35 +32,36 @@ import org.junit.Rule; import org.junit.Test; import org.junit.rules.Timeout; -import org.terracotta.management.entity.management.ManagementAgentConfig; -import org.terracotta.management.entity.management.client.ManagementAgentEntityFactory; import org.terracotta.management.model.capabilities.Capability; +import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; +import org.terracotta.management.model.cluster.Client; import org.terracotta.management.model.context.ContextContainer; import org.terracotta.management.model.message.Message; import org.terracotta.management.model.notification.ContextualNotification; import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.management.model.stats.Sample; +import org.terracotta.management.model.stats.StatisticType; import org.terracotta.management.model.stats.history.CounterHistory; -import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.List; import java.util.TreeSet; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.collection.IsCollectionWithSize.hasSize; -import static org.hamcrest.number.OrderingComparison.greaterThanOrEqualTo; import static org.junit.Assert.assertThat; -import org.terracotta.management.model.capabilities.descriptors.Descriptor; -import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; -import org.terracotta.management.model.stats.StatisticType; public class ClusteringManagementServiceTest extends AbstractClusteringManagementTest { @@ -77,13 +78,13 @@ public class ClusteringManagementServiceTest extends AbstractClusteringManagemen private CacheManager cacheManager; private String clientIdentifier; - private long consumerId; + private int n = N.incrementAndGet(); @Before public void init() throws Exception { this.cacheManager = CacheManagerBuilder.newCacheManagerBuilder() // cluster config - .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/my-server-entity-" + N.incrementAndGet())) + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/my-server-entity-" + n)) .autoCreate() .defaultServerResource("primary-server-resource")) // management config @@ -106,8 +107,12 @@ public void init() throws Exception { // ensure the CM is running and get its client id assertThat(cacheManager.getStatus(), equalTo(Status.AVAILABLE)); - consumerId = consumer.getConsumerId(ManagementAgentConfig.ENTITY_TYPE, ManagementAgentEntityFactory.ENTITYNAME); - clientIdentifier = consumer.getChildNamesForNode(consumerId, "management", "clients").iterator().next(); + for (Client client : consumer.readTopology().getClients().values()) { + if(client.getName().equals("Ehcache:my-server-entity-" + n)) { + clientIdentifier = client.getClientId(); + } + } + assertThat(clientIdentifier, is(notNullValue())); } @After @@ -119,13 +124,13 @@ public void close() throws Exception { @Test public void test_tags_exposed() throws Exception { - String[] tags = consumer.getValueForNode(consumerId, new String[]{"management", "clients", clientIdentifier, "tags"}, String[].class); + String[] tags = consumer.readTopology().getClient(clientIdentifier).get().getTags().toArray(new String[0]); assertThat(tags, equalTo(new String[]{"server-node-1", "webapp-1"})); } @Test public void test_contextContainer_exposed() throws Exception { - ContextContainer contextContainer = consumer.getValueForNode(consumerId, new String[]{"management", "clients", clientIdentifier, "registry", "contextContainer"}, ContextContainer.class); + ContextContainer contextContainer = consumer.readTopology().getClient(clientIdentifier).get().getManagementRegistry().get().getContextContainer(); assertThat(contextContainer.getValue(), equalTo("my-super-cache-manager")); assertThat(contextContainer.getSubContexts(), hasSize(1)); assertThat(contextContainer.getSubContexts().iterator().next().getValue(), equalTo("cache-1")); @@ -133,7 +138,7 @@ public void test_contextContainer_exposed() throws Exception { @Test public void test_capabilities_exposed() throws Exception { - Capability[] capabilities = consumer.getValueForNode(consumerId, new String[]{"management", "clients", clientIdentifier, "registry", "capabilities"}, Capability[].class); + Capability[] capabilities = consumer.readTopology().getClient(clientIdentifier).get().getManagementRegistry().get().getCapabilities().toArray(new Capability[0]); assertThat(capabilities.length, equalTo(5)); assertThat(capabilities[0].getName(), equalTo("ActionsCapability")); assertThat(capabilities[1].getName(), equalTo("StatisticsCapability")); @@ -156,9 +161,10 @@ public void test_capabilities_exposed() throws Exception { @Test public void test_notifs_sent_at_CM_init() throws Exception { - assertThat(consumer.readBuffer("client-notifications", Message.class).unwrap(ContextualNotification.class).getType(), equalTo("CLIENT_REGISTRY_UPDATED")); - assertThat(consumer.readBuffer("client-notifications", Message.class).unwrap(ContextualNotification.class).getType(), equalTo("CLIENT_TAGS_UPDATED")); - assertThat(consumer.readBuffer("client-notifications", Serializable[].class), is(nullValue())); + List messages = consumer.drainMessageBuffer(); + assertThat(messages.size(), equalTo(14)); + assertThat(notificationTypes(messages).containsAll(Arrays.asList("CLIENT_CONNECTED", "SERVER_ENTITY_CREATED", "SERVER_ENTITY_FETCHED", "SERVER_ENTITY_UNFETCHED", "SERVER_ENTITY_DESTROYED", "CLIENT_REGISTRY_UPDATED", "CLIENT_TAGS_UPDATED")), is(true)); + assertThat(consumer.readMessageBuffer(), is(nullValue())); } @Test @@ -173,7 +179,7 @@ public void test_notifs_on_add_cache() throws Exception { .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) .build()); - ContextContainer contextContainer = consumer.getValueForNode(consumerId, new String[]{"management", "clients", clientIdentifier, "registry", "contextContainer"}, ContextContainer.class); + ContextContainer contextContainer = consumer.readTopology().getClient(clientIdentifier).get().getManagementRegistry().get().getContextContainer(); assertThat(contextContainer.getSubContexts(), hasSize(2)); TreeSet cNames = new TreeSet(); @@ -182,10 +188,10 @@ public void test_notifs_on_add_cache() throws Exception { } assertThat(cNames, equalTo(new TreeSet(Arrays.asList("cache-1", "cache-2")))); - assertThat(consumer.readBuffer("client-notifications", Message.class).unwrap(ContextualNotification.class).getType(), equalTo("CLIENT_REGISTRY_UPDATED")); - assertThat(consumer.readBuffer("client-notifications", Message.class).unwrap(ContextualNotification.class).getType(), equalTo("CLIENT_REGISTRY_UPDATED")); - assertThat(consumer.readBuffer("client-notifications", Message.class).unwrap(ContextualNotification.class).getType(), equalTo("CACHE_ADDED")); - assertThat(consumer.readBuffer("client-notifications", Message.class), is(nullValue())); + List messages = consumer.drainMessageBuffer(); + assertThat(messages.size(), equalTo(3)); + assertThat(notificationTypes(messages), equalTo(Arrays.asList("CLIENT_REGISTRY_UPDATED", "CLIENT_REGISTRY_UPDATED", "CACHE_ADDED"))); + assertThat(consumer.readMessageBuffer(), is(nullValue())); } @Test @@ -194,10 +200,10 @@ public void test_notifs_on_remove_cache() throws Exception { cacheManager.removeCache("cache-2"); - assertThat(consumer.readBuffer("client-notifications", Message.class).unwrap(ContextualNotification.class).getType(), equalTo("CLIENT_REGISTRY_UPDATED")); - assertThat(consumer.readBuffer("client-notifications", Message.class).unwrap(ContextualNotification.class).getType(), equalTo("CLIENT_REGISTRY_UPDATED")); - assertThat(consumer.readBuffer("client-notifications", Message.class).unwrap(ContextualNotification.class).getType(), equalTo("CACHE_REMOVED")); - assertThat(consumer.readBuffer("client-notifications", Message.class), is(nullValue())); + List messages = consumer.drainMessageBuffer(); + assertThat(messages.size(), equalTo(3)); + assertThat(notificationTypes(messages), equalTo(Arrays.asList("CLIENT_REGISTRY_UPDATED", "CLIENT_REGISTRY_UPDATED", "CACHE_REMOVED"))); + assertThat(consumer.readMessageBuffer(), is(nullValue())); } @Test @@ -220,10 +226,10 @@ public void test_stats_collection() throws Exception { do { // get the stats (we are getting the primitive counter, not the sample history) - ContextualStatistics[] stats = waitForNextStats(); - Sample[] samples = stats[0].getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); + List stats = waitForNextStats(); + Sample[] samples = stats.get(0).getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); - if(stats.length == 1 && stats[0].getContext().get("cacheName").equals("cache-1") && samples.length > 0) { + if(stats.size() == 1 && stats.get(0).getContext().get("cacheName").equals("cache-1") && samples.length > 0) { val = samples[samples.length - 1].getValue(); } } while(val != 2); @@ -234,10 +240,10 @@ public void test_stats_collection() throws Exception { do { - ContextualStatistics[] stats = waitForNextStats(); - Sample[] samples = stats[0].getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); + List stats = waitForNextStats(); + Sample[] samples = stats.get(0).getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); - if(stats.length == 1 && stats[0].getContext().get("cacheName").equals("cache-1") && samples.length > 0) { + if(stats.size() == 1 && stats.get(0).getContext().get("cacheName").equals("cache-1") && samples.length > 0) { val = samples[samples.length - 1].getValue(); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java index c61c27b0f1..a3147975a8 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java @@ -18,6 +18,7 @@ import org.ehcache.clustered.server.EhcacheStateServiceImpl; import org.ehcache.clustered.server.state.config.EhcacheStateServiceConfig; +import org.terracotta.entity.PlatformConfiguration; import org.terracotta.entity.ServiceConfiguration; import org.terracotta.entity.ServiceProvider; import org.terracotta.entity.ServiceProviderCleanupException; @@ -40,7 +41,7 @@ public class EhcacheStateServiceProvider implements ServiceProvider { private ConcurrentMap serviceMap = new ConcurrentHashMap(); @Override - public boolean initialize(ServiceProviderConfiguration configuration) { + public boolean initialize(ServiceProviderConfiguration configuration, PlatformConfiguration platformConfiguration) { return true; } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java index 316993adfc..da28b24b00 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java @@ -39,7 +39,7 @@ public void testInitialize() { ServiceProviderConfiguration serviceProviderConfiguration = mock(ServiceProviderConfiguration.class); - assertTrue(serviceProvider.initialize(serviceProviderConfiguration)); + assertTrue(serviceProvider.initialize(serviceProviderConfiguration, null)); } @Test diff --git a/gradle.properties b/gradle.properties index 95d91e9e83..1a8fbca976 100644 --- a/gradle.properties +++ b/gradle.properties @@ -5,3 +5,5 @@ deployUrl = 'https://oss.sonatype.org/service/local/staging/deploy/maven2/' # Enable the daemon by adding org.gradle.daemon in USER_HOME/.gradle/gradle.properties org.gradle.parallel=true + +java6Home=/Library/Java/JavaVirtualMachines/1.6.0_65-b14-462.jdk/Contents/Home diff --git a/management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementService.java b/management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementService.java index d208300d4e..3bd311cbce 100644 --- a/management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementService.java +++ b/management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementService.java @@ -95,12 +95,16 @@ public void start(ServiceProvider serviceProvider) { @Override public void stop() { - collectorService.stop(); + if(collectorService != null) { + collectorService.stop(); + } shutdownNow(managementCallExecutor); // nullify so that no further actions are done with them (see null-checks below) - managementAgentService.close(); - managementRegistryService = null; + if(managementAgentService != null) { + managementAgentService.close(); + managementRegistryService = null; + } managementAgentService = null; managementCallExecutor = null; } From 0e4f6a7df694c28368c8f0c55f58ce6aaa159a06 Mon Sep 17 00:00:00 2001 From: Henri Tremblay Date: Tue, 11 Oct 2016 22:14:53 -0400 Subject: [PATCH 075/218] Move to gretty plugin (close #1508) --- .../src/main/webapp/WEB-INF/web.xml | 11 +++++---- .../src/main/webapp/WEB-INF/web.xml | 9 +++---- demos/build.gradle | 24 ++++++++++++++++--- docs/src/docs/asciidoc/user/examples.adoc | 4 ++-- 4 files changed, 34 insertions(+), 14 deletions(-) diff --git a/demos/00-NoCache/src/main/webapp/WEB-INF/web.xml b/demos/00-NoCache/src/main/webapp/WEB-INF/web.xml index 627a8c4f83..f9df833ddb 100755 --- a/demos/00-NoCache/src/main/webapp/WEB-INF/web.xml +++ b/demos/00-NoCache/src/main/webapp/WEB-INF/web.xml @@ -14,9 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. --> - - + org.ehcache.demos.peeper.PeeperServletContextListener @@ -31,5 +32,5 @@ PeeperServlet /* - - \ No newline at end of file + + diff --git a/demos/01-CacheAside/src/main/webapp/WEB-INF/web.xml b/demos/01-CacheAside/src/main/webapp/WEB-INF/web.xml index 207674fa34..2993f41a64 100755 --- a/demos/01-CacheAside/src/main/webapp/WEB-INF/web.xml +++ b/demos/01-CacheAside/src/main/webapp/WEB-INF/web.xml @@ -14,9 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. --> - - + org.ehcache.demos.peeper.PeeperServletContextListener @@ -32,4 +33,4 @@ /* - \ No newline at end of file + diff --git a/demos/build.gradle b/demos/build.gradle index f2b79ea364..b85f66cd92 100644 --- a/demos/build.gradle +++ b/demos/build.gradle @@ -1,8 +1,26 @@ +plugins { + id 'org.akhikhl.gretty' version '1.4.0' +} + subprojects { + configurations.all { + resolutionStrategy { + // It seems jetty has some internal conflict and so those need to be forced + force 'org.ow2.asm:asm:5.0.3', 'org.ow2.asm:asm-commons:5.0.3', 'org.glassfish:javax.el:3.0.1-b08' + } + } + apply plugin: 'war' - apply plugin: 'jetty' + apply plugin: 'org.akhikhl.gretty' + + gretty { + port = 8080 + contextPath = '/' + servletContainer = 'jetty9' + } dependencies { - compile 'ch.qos.logback:logback-classic:1.1.3', 'javax.servlet:servlet-api:2.5', 'com.h2database:h2:1.4.186', project(':impl') + compile 'javax.servlet:servlet-api:2.5', project(':impl') + runtime 'ch.qos.logback:logback-classic:1.1.3', 'com.h2database:h2:1.4.192' } -} \ No newline at end of file +} diff --git a/docs/src/docs/asciidoc/user/examples.adoc b/docs/src/docs/asciidoc/user/examples.adoc index b71c1c7135..c919ba8da7 100644 --- a/docs/src/docs/asciidoc/user/examples.adoc +++ b/docs/src/docs/asciidoc/user/examples.adoc @@ -40,7 +40,7 @@ from the database to display the Peeper web page. To run this implementation: [source,bash] ---- cd ehcache3/demos/00-NoCache -../../gradlew jettyRun +../../gradlew appStart ---- This builds the necessary components, starts a http://eclipse.org/jetty/[Jetty] web service, @@ -75,7 +75,7 @@ the cache is cleared. To run this implementation: [source,bash] ---- cd ehcache3/demos/01-CacheAside -../../gradlew jettyRun +../../gradlew appStart ---- This builds the necessary components, starts a http://eclipse.org/jetty/[Jetty] web service, From 96c0fc38807c36882bd92ec56992e7335615053d Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Tue, 18 Oct 2016 15:46:07 -0400 Subject: [PATCH 076/218] :green_heart: Fix unreliable management test --- .../management/AbstractClusteringManagementTest.java | 4 +++- .../management/ClusteringManagementServiceTest.java | 6 +++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java index 84920a9e54..8690bd7b3f 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java @@ -83,7 +83,9 @@ public final void clearBuffers() throws Exception { } protected final void clear() { - consumer.clearMessageBuffer(); + if(consumer != null) { + consumer.clearMessageBuffer(); + } } protected static void sendManagementCallToCollectStats(String... statNames) throws Exception { diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java index e2ee75ecd3..c0869e72ce 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java @@ -82,6 +82,9 @@ public class ClusteringManagementServiceTest extends AbstractClusteringManagemen @Before public void init() throws Exception { + // clear previous messages + clear(); + this.cacheManager = CacheManagerBuilder.newCacheManagerBuilder() // cluster config .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/my-server-entity-" + n)) @@ -163,7 +166,7 @@ public void test_capabilities_exposed() throws Exception { public void test_notifs_sent_at_CM_init() throws Exception { List messages = consumer.drainMessageBuffer(); assertThat(messages.size(), equalTo(14)); - assertThat(notificationTypes(messages).containsAll(Arrays.asList("CLIENT_CONNECTED", "SERVER_ENTITY_CREATED", "SERVER_ENTITY_FETCHED", "SERVER_ENTITY_UNFETCHED", "SERVER_ENTITY_DESTROYED", "CLIENT_REGISTRY_UPDATED", "CLIENT_TAGS_UPDATED")), is(true)); + assertThat(notificationTypes(messages).containsAll(Arrays.asList("CLIENT_CONNECTED", "SERVER_ENTITY_FETCHED", "CLIENT_REGISTRY_UPDATED", "CLIENT_TAGS_UPDATED")), is(true)); assertThat(consumer.readMessageBuffer(), is(nullValue())); } @@ -197,6 +200,7 @@ public void test_notifs_on_add_cache() throws Exception { @Test public void test_notifs_on_remove_cache() throws Exception { test_notifs_on_add_cache(); + clear(); cacheManager.removeCache("cache-2"); From 314368ae9ba7b2b1cbde7f4978af57a706817fc2 Mon Sep 17 00:00:00 2001 From: Anthony Dahanne Date: Mon, 17 Oct 2016 21:26:39 +0530 Subject: [PATCH 077/218] :bug: Fix #1543 : Use SIzeHistory for size in bytes related stats For example, OnHeap:OccupiedByteSize --- build.gradle | 2 +- .../ClusteringManagementServiceTest.java | 14 +++---- .../statistics/StandardEhcacheStatistics.java | 37 ++++++++++++------- .../StandardEhcacheStatisticsTest.java | 29 ++++++++------- .../providers/statistics/StatsUtil.java | 8 ++-- .../DefaultManagementRegistryServiceTest.java | 10 ++--- 6 files changed, 56 insertions(+), 44 deletions(-) diff --git a/build.gradle b/build.gradle index d1bba67ab0..e38d92d174 100644 --- a/build.gradle +++ b/build.gradle @@ -22,7 +22,7 @@ ext { // Third parties offheapVersion = '2.3.1' - statisticVersion = '1.2.0' + statisticVersion = '1.3.0' jcacheVersion = '1.0.0' slf4jVersion = '1.7.7' sizeofVersion = '0.3.0' diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java index c0869e72ce..8afc6a1745 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java @@ -269,7 +269,7 @@ public static void initDescriptors() throws ClassNotFoundException { ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MappingCount" , StatisticType.COUNTER_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyAverage" , StatisticType.AVERAGE_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyMinimum" , StatisticType.DURATION_HISTORY)); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:OccupiedBytesCount" , StatisticType.COUNTER_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:OccupiedByteSize", StatisticType.SIZE_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissRate" , StatisticType.RATE_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionLatencyAverage" , StatisticType.AVERAGE_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitCount" , StatisticType.COUNTER_HISTORY)); @@ -283,10 +283,10 @@ public static void initDescriptors() throws ClassNotFoundException { OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissRate", StatisticType.RATE_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitLatencyMinimum", StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitLatencyAverage", StatisticType.AVERAGE_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:OccupiedBytesCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:OccupiedByteSize", StatisticType.SIZE_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionLatencyAverage", StatisticType.AVERAGE_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitLatencyMaximum", StatisticType.DURATION_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:AllocatedBytesCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:AllocatedByteSize", StatisticType.SIZE_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionLatencyMaximum", StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MappingCount", StatisticType.COUNTER_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRate", StatisticType.RATE_HISTORY)); @@ -305,13 +305,13 @@ public static void initDescriptors() throws ClassNotFoundException { DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitLatencyMinimum", StatisticType.DURATION_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MaxMappingCount", StatisticType.COUNTER_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitRate", StatisticType.RATE_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:OccupiedBytesCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:OccupiedByteSize", StatisticType.SIZE_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitLatencyAverage", StatisticType.AVERAGE_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionLatencyAverage", StatisticType.AVERAGE_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionLatencyMinimum", StatisticType.DURATION_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionRate", StatisticType.RATE_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionLatencyMaximum", StatisticType.DURATION_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:AllocatedBytesCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:AllocatedByteSize", StatisticType.SIZE_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitLatencyMaximum", StatisticType.DURATION_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitCount", StatisticType.COUNTER_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); @@ -333,13 +333,13 @@ public static void initDescriptors() throws ClassNotFoundException { CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitLatencyAverage", StatisticType.AVERAGE_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitLatencyMaximum", StatisticType.DURATION_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissRate", StatisticType.RATE_HISTORY)); - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:OccupiedBytesCount", StatisticType.COUNTER_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:OccupiedByteSize", StatisticType.SIZE_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:EvictionLatencyMaximum", StatisticType.DURATION_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissLatencyAverage", StatisticType.AVERAGE_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitRatioRatio", StatisticType.RATIO_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissRatioRatio", StatisticType.RATIO_HISTORY)); - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:AllocatedBytesCount", StatisticType.COUNTER_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:AllocatedByteSize", StatisticType.SIZE_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MappingCount", StatisticType.COUNTER_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:EvictionRate", StatisticType.RATE_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitLatencyMinimum", StatisticType.DURATION_HISTORY)); diff --git a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java index 7e7cc82854..494a90c215 100644 --- a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java +++ b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java @@ -23,12 +23,13 @@ import org.ehcache.management.providers.ExposedCacheBinding; import org.terracotta.context.extended.OperationStatisticDescriptor; import org.terracotta.context.extended.RegisteredCompoundStatistic; +import org.terracotta.context.extended.RegisteredCounterStatistic; import org.terracotta.context.extended.RegisteredRatioStatistic; +import org.terracotta.context.extended.RegisteredSizeStatistic; import org.terracotta.context.extended.RegisteredStatistic; -import org.terracotta.context.extended.RegisteredValueStatistic; import org.terracotta.context.extended.StatisticsRegistry; -import org.terracotta.context.extended.ValueStatisticDescriptor; import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.model.stats.MemoryUnit; import org.terracotta.management.model.stats.NumberUnit; import org.terracotta.management.model.stats.Sample; import org.terracotta.management.model.stats.Statistic; @@ -37,13 +38,13 @@ import org.terracotta.management.model.stats.history.DurationHistory; import org.terracotta.management.model.stats.history.RateHistory; import org.terracotta.management.model.stats.history.RatioHistory; +import org.terracotta.management.model.stats.history.SizeHistory; import org.terracotta.statistics.archive.Timestamped; import org.terracotta.statistics.extended.CompoundOperation; import org.terracotta.statistics.extended.SampledStatistic; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.EnumSet; import java.util.HashSet; import java.util.List; @@ -91,10 +92,10 @@ class StandardEhcacheStatistics extends ExposedCacheBinding { allOf(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class)); statisticsRegistry.registerRatios("HitRatio", getTierStatisticDescriptor, of(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT), allOf(tierOperationGetOucomeClass)); statisticsRegistry.registerRatios("MissRatio", getTierStatisticDescriptor, of(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS), allOf(tierOperationGetOucomeClass)); - statisticsRegistry.registerValue("MappingCount", descriptor("mappings", singleton("tier"))); - statisticsRegistry.registerValue("MaxMappingCount", descriptor("maxMappings", singleton("tier"))); - statisticsRegistry.registerValue("AllocatedBytesCount", descriptor("allocatedMemory", singleton("tier"))); - statisticsRegistry.registerValue("OccupiedBytesCount", descriptor("occupiedMemory", singleton("tier"))); + statisticsRegistry.registerCounter("MappingCount", descriptor("mappings", singleton("tier"))); + statisticsRegistry.registerCounter("MaxMappingCount", descriptor("maxMappings", singleton("tier"))); + statisticsRegistry.registerSize("AllocatedByteSize", descriptor("allocatedMemory", singleton("tier"))); + statisticsRegistry.registerSize("OccupiedByteSize", descriptor("occupiedMemory", singleton("tier"))); Map registrations = statisticsRegistry.getRegistrations(); for (RegisteredStatistic registeredStatistic : registrations.values()) { @@ -140,11 +141,16 @@ class StandardEhcacheStatistics extends ExposedCacheBinding { SampledStatistic ratio = (SampledStatistic) compoundOperation.ratioOf((Set) registeredRatioStatistic.getNumerator(), (Set) registeredRatioStatistic.getDenominator()); return new RatioHistory(buildHistory(ratio, since), NumberUnit.RATIO); } - } else if (registeredStatistic instanceof RegisteredValueStatistic) { - RegisteredValueStatistic registeredValueStatistic = (RegisteredValueStatistic) registeredStatistic; - + } else if (registeredStatistic instanceof RegisteredSizeStatistic) { + RegisteredSizeStatistic registeredSizeStatistic = (RegisteredSizeStatistic) registeredStatistic; + if (name.equals(statisticName)) { + SampledStatistic count = (SampledStatistic) registeredSizeStatistic.getSampledStatistic(); + return new SizeHistory(buildHistory(count, since), MemoryUnit.B); + } + } else if (registeredStatistic instanceof RegisteredCounterStatistic) { + RegisteredCounterStatistic registeredCounterStatistic = (RegisteredCounterStatistic) registeredStatistic; if (name.equals(statisticName)) { - SampledStatistic count = (SampledStatistic) registeredValueStatistic.getSampledStatistic(); + SampledStatistic count = (SampledStatistic) registeredCounterStatistic.getSampledStatistic(); return new CounterHistory(buildHistory(count, since), NumberUnit.COUNT); } } else { @@ -181,7 +187,8 @@ private Set queryStatisticsRegistry() { Map registrations = statisticsRegistry.getRegistrations(); for(Entry entry : registrations.entrySet()) { - RegisteredStatistic registeredStatistic = registrations.get(entry.getKey().toString()); + String statisticName = entry.getKey().toString(); + RegisteredStatistic registeredStatistic = registrations.get(statisticName); if(registeredStatistic instanceof RegisteredCompoundStatistic) { List statistics = new ArrayList(); @@ -194,8 +201,10 @@ private Set queryStatisticsRegistry() { capabilities.addAll(statistics); } else if(registeredStatistic instanceof RegisteredRatioStatistic) { capabilities.add(new StatisticDescriptor(entry.getKey() + "Ratio", StatisticType.RATIO_HISTORY)); - } else if(registeredStatistic instanceof RegisteredValueStatistic) { - capabilities.add(new StatisticDescriptor(entry.getKey().toString(), StatisticType.COUNTER_HISTORY)); + } else if(registeredStatistic instanceof RegisteredCounterStatistic) { + capabilities.add(new StatisticDescriptor(statisticName, StatisticType.COUNTER_HISTORY)); + } else if(registeredStatistic instanceof RegisteredSizeStatistic) { + capabilities.add(new StatisticDescriptor(statisticName, StatisticType.SIZE_HISTORY)); } } diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java index c48f26c575..cb9485f800 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java @@ -36,6 +36,9 @@ import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.management.model.stats.history.CounterHistory; import org.terracotta.management.model.stats.history.RatioHistory; +import org.terracotta.management.model.stats.history.SizeHistory; + +import static org.junit.Assert.assertThat; /** * @@ -82,17 +85,17 @@ public void statsCacheMissTest() throws InterruptedException { .execute() .getSingleResult(); - Assert.assertThat(missCounter.size(), Matchers.is(2)); + assertThat(missCounter.size(), Matchers.is(2)); CounterHistory missCountCounterHistory = missCounter.getStatistic(CounterHistory.class, "Cache:MissCount"); while(!StatsUtil.isHistoryReady(missCountCounterHistory, 0L)) {} int mostRecentIndex = missCountCounterHistory.getValue().length - 1; - Assert.assertThat(missCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(2L)); + assertThat(missCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(2L)); RatioHistory ratioHistory = missCounter.getStatistic(RatioHistory.class, "Cache:MissRatio"); mostRecentIndex = ratioHistory.getValue().length - 1; // 2 hits, 2 misses -> HitRatio is 0.5 - Assert.assertThat(ratioHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(0.5d)); + assertThat(ratioHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(0.5d)); } finally { if(cacheManager != null) { @@ -135,7 +138,7 @@ public void statsCacheHitTest() throws InterruptedException { .execute() .getSingleResult(); - Assert.assertThat(contextualStatistics.size(), Matchers.is(2)); + assertThat(contextualStatistics.size(), Matchers.is(2)); /////////////////////// // NO HITS, NO MISSES// @@ -143,12 +146,12 @@ public void statsCacheHitTest() throws InterruptedException { CounterHistory hitCountCounterHistory = contextualStatistics.getStatistic(CounterHistory.class, "Cache:HitCount"); int mostRecentIndex = hitCountCounterHistory.getValue().length - 1; - Assert.assertThat(hitCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(0L)); + assertThat(hitCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(0L)); RatioHistory ratioHistory = contextualStatistics.getStatistic(RatioHistory.class, "Cache:HitRatio"); mostRecentIndex = ratioHistory.getValue().length - 1; // no hits, no misses -> HitRatio is NaN - Assert.assertThat(ratioHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(Double.NaN)); + assertThat(ratioHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(Double.NaN)); /////////////////////// // 3 HITS, NO MISSES // @@ -170,12 +173,12 @@ public void statsCacheHitTest() throws InterruptedException { hitCountCounterHistory = contextualStatistics.getStatistic(CounterHistory.class, "Cache:HitCount"); while(!StatsUtil.isHistoryReady(hitCountCounterHistory, 0L)) {} mostRecentIndex = hitCountCounterHistory.getValue().length - 1; - Assert.assertThat(hitCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(3L)); + assertThat(hitCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(3L)); ratioHistory = contextualStatistics.getStatistic(RatioHistory.class, "Cache:HitRatio"); mostRecentIndex = ratioHistory.getValue().length - 1; // 3 hits, no misses -> HitRatio is 1 - Assert.assertThat(ratioHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(1.0)); + assertThat(ratioHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(1.0)); /////////////////////// // 3 HITS, 1 MISS // @@ -195,17 +198,17 @@ public void statsCacheHitTest() throws InterruptedException { CounterHistory missCountCounterHistory = contextualStatistics.getStatistic(CounterHistory.class, "Cache:MissCount"); mostRecentIndex = missCountCounterHistory.getValue().length - 1; while(!StatsUtil.isHistoryReady(missCountCounterHistory, 0L)) {} - Assert.assertThat(missCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(1L)); + assertThat(missCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(1L)); ratioHistory = contextualStatistics.getStatistic(RatioHistory.class, "Cache:HitRatio"); mostRecentIndex = ratioHistory.getValue().length - 1; // 3 hits, 1 misses -> HitRatio is 0.75 - Assert.assertThat(ratioHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(0.75)); + assertThat(ratioHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(0.75)); hitCountCounterHistory = contextualStatistics.getStatistic(CounterHistory.class, "Cache:HitCount"); mostRecentIndex = hitCountCounterHistory.getValue().length - 1; - Assert.assertThat(hitCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(3L)); + assertThat(hitCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(3L)); } finally { if(cacheManager != null) { @@ -254,12 +257,12 @@ public void statsClearCacheTest() throws InterruptedException { .execute() .getSingleResult(); - Assert.assertThat(clearCounter.size(), Matchers.is(1)); + assertThat(clearCounter.size(), Matchers.is(1)); CounterHistory cache_Clear_Count = clearCounter.getStatistic(CounterHistory.class, "Cache:ClearCount"); while(!StatsUtil.isHistoryReady(cache_Clear_Count, 0L)) {} int mostRecentIndex = cache_Clear_Count.getValue().length - 1; - Assert.assertThat(cache_Clear_Count.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(2L)); + assertThat(cache_Clear_Count.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(2L)); } finally { if(cacheManager != null) { diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java b/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java index c91de2737d..2d5bf24425 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java @@ -51,10 +51,10 @@ public static boolean isHistoryReady(AbstractStatisticHistory counterHistory) { return false; } - public static boolean isHistoryReady(AbstractStatisticHistory counterHistory, Long defaultValue) { - if(counterHistory.getValue().length > 0) { - int mostRecentIndex = counterHistory.getValue().length - 1; - if((Long)counterHistory.getValue()[mostRecentIndex].getValue() > defaultValue) { + public static boolean isHistoryReady(AbstractStatisticHistory history, Long defaultValue) { + if(history.getValue().length > 0) { + int mostRecentIndex = history.getValue().length - 1; + if((Long)history.getValue()[mostRecentIndex].getValue() > defaultValue) { return true; } } diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java index d4f4aeac40..625def7b91 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java @@ -531,7 +531,7 @@ public static void loadStatsUtil() throws ClassNotFoundException { ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MappingCount" , StatisticType.COUNTER_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyAverage" , StatisticType.AVERAGE_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyMinimum" , StatisticType.DURATION_HISTORY)); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:OccupiedBytesCount" , StatisticType.COUNTER_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:OccupiedByteSize" , StatisticType.SIZE_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissRate" , StatisticType.RATE_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionLatencyAverage" , StatisticType.AVERAGE_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitCount" , StatisticType.COUNTER_HISTORY)); @@ -545,10 +545,10 @@ public static void loadStatsUtil() throws ClassNotFoundException { OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissRate", StatisticType.RATE_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitLatencyMinimum", StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitLatencyAverage", StatisticType.AVERAGE_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:OccupiedBytesCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:OccupiedByteSize", StatisticType.SIZE_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionLatencyAverage", StatisticType.AVERAGE_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitLatencyMaximum", StatisticType.DURATION_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:AllocatedBytesCount", StatisticType.COUNTER_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:AllocatedByteSize", StatisticType.SIZE_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionLatencyMaximum", StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MappingCount", StatisticType.COUNTER_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRate", StatisticType.RATE_HISTORY)); @@ -567,13 +567,13 @@ public static void loadStatsUtil() throws ClassNotFoundException { DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitLatencyMinimum", StatisticType.DURATION_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MaxMappingCount", StatisticType.COUNTER_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitRate", StatisticType.RATE_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:OccupiedBytesCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:OccupiedByteSize", StatisticType.SIZE_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitLatencyAverage", StatisticType.AVERAGE_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionLatencyAverage", StatisticType.AVERAGE_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionLatencyMinimum", StatisticType.DURATION_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionRate", StatisticType.RATE_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionLatencyMaximum", StatisticType.DURATION_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:AllocatedBytesCount", StatisticType.COUNTER_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:AllocatedByteSize", StatisticType.SIZE_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitLatencyMaximum", StatisticType.DURATION_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitCount", StatisticType.COUNTER_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); From cbb3b64ec6ab134eff54c5bbbfc0af33f3fa1daa Mon Sep 17 00:00:00 2001 From: Henri Tremblay Date: Thu, 20 Oct 2016 14:16:06 -0400 Subject: [PATCH 078/218] Log in info and with a logger that matches the output shown in example.adoc (#1534) --- demos/00-NoCache/src/main/resources/logback.xml | 11 +++++++++++ demos/01-CacheAside/src/main/resources/logback.xml | 11 +++++++++++ 2 files changed, 22 insertions(+) create mode 100644 demos/00-NoCache/src/main/resources/logback.xml create mode 100644 demos/01-CacheAside/src/main/resources/logback.xml diff --git a/demos/00-NoCache/src/main/resources/logback.xml b/demos/00-NoCache/src/main/resources/logback.xml new file mode 100644 index 0000000000..4ea574f5f5 --- /dev/null +++ b/demos/00-NoCache/src/main/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + diff --git a/demos/01-CacheAside/src/main/resources/logback.xml b/demos/01-CacheAside/src/main/resources/logback.xml new file mode 100644 index 0000000000..4ea574f5f5 --- /dev/null +++ b/demos/01-CacheAside/src/main/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + From 33b3c341df11f407638edeaaebbd270089fb43fa Mon Sep 17 00:00:00 2001 From: Henri Tremblay Date: Thu, 20 Oct 2016 14:17:22 -0400 Subject: [PATCH 079/218] Do not log the stacktrace, only the message since it's in debug (close #1534) --- .../builders/UserManagedCacheBuilder.java | 53 +++++++++---------- 1 file changed, 26 insertions(+), 27 deletions(-) diff --git a/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java b/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java index 8682da4e33..1f60dbf2fc 100644 --- a/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java +++ b/impl/src/main/java/org/ehcache/config/builders/UserManagedCacheBuilder.java @@ -17,54 +17,53 @@ package org.ehcache.config.builders; import org.ehcache.Cache; +import org.ehcache.CachePersistenceException; +import org.ehcache.UserManagedCache; import org.ehcache.config.Builder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.ResourceType; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.Ehcache; import org.ehcache.core.EhcacheWithLoaderWriter; import org.ehcache.core.InternalCache; import org.ehcache.core.PersistentUserManagedEhcache; -import org.ehcache.UserManagedCache; import org.ehcache.core.config.BaseCacheConfiguration; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.EvictionAdvisor; -import org.ehcache.config.ResourcePools; -import org.ehcache.config.ResourceType; +import org.ehcache.core.events.CacheEventDispatcher; +import org.ehcache.core.events.CacheEventListenerConfiguration; +import org.ehcache.core.events.CacheEventListenerProvider; +import org.ehcache.core.internal.service.ServiceLocator; import org.ehcache.core.internal.store.StoreConfigurationImpl; +import org.ehcache.core.internal.store.StoreSupport; +import org.ehcache.core.internal.util.ClassLoading; +import org.ehcache.core.spi.LifeCycled; +import org.ehcache.core.spi.LifeCycledAdapter; import org.ehcache.core.spi.service.DiskResourceService; +import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.heap.SizeOfEngine; -import org.ehcache.impl.events.CacheEventDispatcherImpl; -import org.ehcache.core.internal.store.StoreSupport; +import org.ehcache.core.spi.store.heap.SizeOfEngineProvider; import org.ehcache.event.CacheEventListener; -import org.ehcache.core.events.CacheEventListenerConfiguration; -import org.ehcache.core.events.CacheEventListenerProvider; +import org.ehcache.expiry.Expirations; +import org.ehcache.expiry.Expiry; import org.ehcache.impl.config.copy.DefaultCopierConfiguration; import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration; import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineProviderConfiguration; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.config.units.MemoryUnit; -import org.ehcache.core.events.CacheEventDispatcher; -import org.ehcache.impl.internal.events.DisabledCacheEventNotificationService; -import org.ehcache.CachePersistenceException; -import org.ehcache.expiry.Expirations; -import org.ehcache.expiry.Expiry; import org.ehcache.impl.copy.SerializingCopier; +import org.ehcache.impl.events.CacheEventDispatcherImpl; +import org.ehcache.impl.internal.events.DisabledCacheEventNotificationService; import org.ehcache.impl.internal.spi.event.DefaultCacheEventListenerProvider; -import org.ehcache.core.spi.LifeCycled; -import org.ehcache.core.spi.LifeCycledAdapter; -import org.ehcache.core.internal.service.ServiceLocator; -import org.ehcache.core.spi.store.Store; -import org.ehcache.spi.persistence.PersistableResourceService; -import org.ehcache.spi.service.ServiceProvider; import org.ehcache.spi.copy.Copier; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; +import org.ehcache.spi.persistence.PersistableResourceService; import org.ehcache.spi.serialization.SerializationProvider; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.serialization.UnsupportedTypeException; -import org.ehcache.core.spi.store.heap.SizeOfEngineProvider; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceCreationConfiguration; -import org.ehcache.spi.service.ServiceDependencies; -import org.ehcache.core.internal.util.ClassLoading; +import org.ehcache.spi.service.ServiceProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -80,10 +79,10 @@ import static org.ehcache.config.ResourceType.Core.OFFHEAP; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.ehcache.core.internal.service.ServiceLocator.findSingletonAmongst; import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_MAX_OBJECT_SIZE; import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_OBJECT_GRAPH_SIZE; import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_UNIT; -import static org.ehcache.core.internal.service.ServiceLocator.findSingletonAmongst; /** * The {@code UserManagedCacheBuilder} enables building {@link UserManagedCache}s using a fluent style. @@ -268,7 +267,7 @@ public void close() throws Exception { if (resources.contains(OFFHEAP) || resources.contains(DISK)) { throw new RuntimeException(e); } else { - LOGGER.debug("Could not create serializers for user managed cache {}", id, e); + LOGGER.debug("Serializers for cache '{}' failed creation ({}). However, depending on the configuration, they might not be needed", id, e.getMessage()); } } } From 7b8de7ab1a428e2e55c641f1a9c399510b856754 Mon Sep 17 00:00:00 2001 From: Albin Suresh Date: Fri, 7 Oct 2016 17:10:51 +0530 Subject: [PATCH 080/218] Closes #1479 Active-Passive data sync --- .../client/ActivePassiveSyncTest.java | 158 ------------------ .../common/internal/messages/ChainCodec.java | 2 +- .../messages/ClientIDTrackerMessage.java | 4 +- .../common/internal/messages/CodecUtil.java | 2 +- .../messages/ConcurrentEntityMessage.java | 2 +- .../messages/ServerStoreOpMessage.java | 13 +- .../messages/ServerStoreOpMessageTest.java | 30 ++-- .../clustered/sync/PassiveSyncTest.java | 65 ++++++- .../server/ConcurrencyStrategies.java | 27 +-- .../clustered/server/EhcacheActiveEntity.java | 24 ++- .../server/EhcachePassiveEntity.java | 40 +++-- .../server/EhcacheServerEntityService.java | 11 +- .../server/EhcacheStateServiceImpl.java | 10 +- .../clustered/server/KeySegmentMapper.java | 37 ++++ .../clustered/server/ServerStoreImpl.java | 12 +- .../messages/EhcacheSyncMessageCodec.java | 94 +++++++++++ .../messages/EntityDataSyncMessage.java | 52 ++++++ .../messages/EntityStateSyncMessage.java | 32 +--- .../internal/messages/EntitySyncMessage.java | 85 ++++++++++ .../messages/EhcacheSyncMessageCodec.java | 56 ------- .../server/offheap/OffHeapChainMap.java | 15 +- .../server/offheap/OffHeapServerStore.java | 15 +- .../state/EhcacheStateServiceProvider.java | 3 +- .../config/EhcacheStateServiceConfig.java | 11 +- .../DefaultConcurrencyStrategyTest.java | 31 +++- .../server/EhcacheActiveEntityTest.java | 135 ++++++++------- .../server/EhcachePassiveEntityTest.java | 29 ++-- .../messages/EhcacheSyncMessageCodecTest.java | 18 +- .../offheap/OffHeapServerStoreTest.java | 7 +- .../EhcacheStateServiceProviderTest.java | 19 ++- 30 files changed, 610 insertions(+), 429 deletions(-) delete mode 100644 clustered/client/src/test/java/org/ehcache/clustered/client/ActivePassiveSyncTest.java create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/KeySegmentMapper.java create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EntityDataSyncMessage.java rename clustered/server/src/main/java/org/ehcache/clustered/server/{ => internal}/messages/EntityStateSyncMessage.java (73%) create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EntitySyncMessage.java delete mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/messages/EhcacheSyncMessageCodec.java rename clustered/server/src/test/java/org/ehcache/clustered/server/{ => internal}/messages/EhcacheSyncMessageCodecTest.java (87%) diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/ActivePassiveSyncTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/ActivePassiveSyncTest.java deleted file mode 100644 index 79725ef33e..0000000000 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/ActivePassiveSyncTest.java +++ /dev/null @@ -1,158 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client; - -import org.ehcache.clustered.client.config.ClusteredResourcePool; -import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; -import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; -import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; -import org.ehcache.clustered.client.internal.EhcacheClientEntity; -import org.ehcache.clustered.client.internal.EhcacheClientEntityService; -import org.ehcache.clustered.client.internal.UnitTestConnectionService; -import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockEntityClientService; -import org.ehcache.clustered.client.internal.service.ClusteredTierManagerValidationException; -import org.ehcache.clustered.client.internal.service.ClusteringServiceFactory; -import org.ehcache.clustered.client.service.ClusteringService; -import org.ehcache.clustered.common.Consistency; -import org.ehcache.clustered.common.internal.ServerStoreConfiguration; -import org.ehcache.clustered.common.internal.exceptions.LifecycleException; -import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; -import org.ehcache.clustered.server.EhcacheServerEntityService; -import org.ehcache.impl.serialization.CompactJavaSerializer; -import org.hamcrest.Matchers; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.terracotta.offheapresource.OffHeapResourcesConfiguration; -import org.terracotta.offheapresource.OffHeapResourcesProvider; -import org.terracotta.offheapresource.config.MemoryUnit; -import org.terracotta.passthrough.PassthroughClusterControl; -import org.terracotta.passthrough.PassthroughServer; -import org.terracotta.passthrough.PassthroughTestHelpers; - -import java.lang.reflect.Field; -import java.net.URI; -import java.util.concurrent.TimeoutException; - -import static org.ehcache.clustered.client.internal.UnitTestConnectionService.getOffheapResourcesType; -import static org.ehcache.config.units.MemoryUnit.MB; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.instanceOf; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; - -public class ActivePassiveSyncTest { - - private PassthroughClusterControl clusterControl; - private static String STRIPENAME = "stripe"; - private static String STRIPE_URI = "passthrough://" + STRIPENAME; - - @Before - public void setUp() throws Exception { - this.clusterControl = PassthroughTestHelpers.createActivePassive(STRIPENAME, - new PassthroughTestHelpers.ServerInitializer() { - @Override - public void registerServicesForServer(PassthroughServer server) { - server.registerServerEntityService(new EhcacheServerEntityService()); - server.registerClientEntityService(new EhcacheClientEntityService()); - server.registerServerEntityService(new VoltronReadWriteLockServerEntityService()); - server.registerClientEntityService(new VoltronReadWriteLockEntityClientService()); - server.registerServiceProvider(new OffHeapResourcesProvider(), - new OffHeapResourcesConfiguration(getOffheapResourcesType("test", 32, MemoryUnit.MB))); - - UnitTestConnectionService.addServerToStripe(STRIPENAME, server); - } - } - ); - - clusterControl.waitForActive(); - clusterControl.waitForRunningPassivesInStandby(); - } - - @After - public void tearDown() throws Exception { - UnitTestConnectionService.removeStripe(STRIPENAME); - clusterControl.tearDown(); - } - - @Test - public void testTierManagerStateSync() throws Exception { - clusterControl.terminateOnePassive(); - - ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(STRIPE_URI)) - .autoCreate() - .defaultServerResource("test") - .resourcePool("foo", 8L, MB) - .build(); - - ClusteringService service = new ClusteringServiceFactory().create(configuration); - service.start(null); - EhcacheClientEntity clientEntity = getEntity(service); - - clusterControl.startOneServer(); - clusterControl.waitForRunningPassivesInStandby(); - clusterControl.terminateActive(); - - try { - clientEntity.validate(configuration.getServerConfiguration()); - fail("ClusteredTierManagerValidationException Expected."); - } catch (ClusteredTierManagerValidationException e) { - assertThat(e.getCause(), instanceOf(LifecycleException.class)); - assertThat(e.getCause().getMessage(), containsString("is already being tracked with Client Id")); - } - service.stop(); - } - - @Test - public void testServerStoreStateSync() throws Exception { - clusterControl.terminateOnePassive(); - - ClusteringServiceConfiguration configuration = - ClusteringServiceConfigurationBuilder.cluster(URI.create(STRIPE_URI)) - .autoCreate() - .defaultServerResource("test") - .build(); - - ClusteringService service = new ClusteringServiceFactory().create(configuration); - service.start(null); - EhcacheClientEntity clientEntity = getEntity(service); - clientEntity.createCache("foo", getServerStoreConfiguration("test")); - - clusterControl.startOneServer(); - clusterControl.waitForRunningPassivesInStandby(); - clusterControl.terminateActive(); - - clientEntity.validateCache("foo", getServerStoreConfiguration("test")); - - service.stop(); - } - - private static EhcacheClientEntity getEntity(ClusteringService clusteringService) throws NoSuchFieldException, IllegalAccessException { - Field entity = clusteringService.getClass().getDeclaredField("entity"); - entity.setAccessible(true); - return (EhcacheClientEntity)entity.get(clusteringService); - } - - private static ServerStoreConfiguration getServerStoreConfiguration(String resourceName) { - ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(resourceName, 8, MB); - return new ServerStoreConfiguration(resourcePool.getPoolAllocation(), - String.class.getName(), String.class.getName(), null, null, CompactJavaSerializer.class.getName(), CompactJavaSerializer.class - .getName(), Consistency.STRONG); - } -} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java index b21d9a8a5a..66f8815972 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java @@ -27,7 +27,7 @@ import static org.ehcache.clustered.common.internal.store.Util.getElement; import static org.ehcache.clustered.common.internal.store.Util.getChain; -class ChainCodec { +public class ChainCodec { private static final byte NON_SEQUENCED_CHAIN = 0; private static final byte SEQUENCED_CHAIN = 1; diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessage.java index b1cb45b4a7..6941205ac4 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessage.java @@ -119,8 +119,8 @@ public ReplicationOp operation() { } @Override - public int concurrencyKey() { - return (int) (this.cacheId.hashCode() + key); + public long concurrencyKey() { + return (this.cacheId.hashCode() + key); } } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/CodecUtil.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/CodecUtil.java index 9e80bdd9b8..f066309c18 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/CodecUtil.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/CodecUtil.java @@ -18,7 +18,7 @@ import java.nio.ByteBuffer; -final class CodecUtil { +public final class CodecUtil { private CodecUtil() { } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ConcurrentEntityMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ConcurrentEntityMessage.java index add54e0ecc..8f339a59c8 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ConcurrentEntityMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ConcurrentEntityMessage.java @@ -30,6 +30,6 @@ public interface ConcurrentEntityMessage extends EntityMessage { * @see org.terracotta.entity.ConcurrencyStrategy#concurrencyKey(EntityMessage) * @return the concurrency key */ - int concurrencyKey(); + long concurrencyKey(); } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java index 2604fd284c..4d8fbcad78 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java @@ -21,7 +21,7 @@ import java.nio.ByteBuffer; import java.util.UUID; -public abstract class ServerStoreOpMessage extends EhcacheEntityMessage implements ConcurrentEntityMessage { +public abstract class ServerStoreOpMessage extends EhcacheEntityMessage { public enum ServerStoreOp { GET_AND_APPEND((byte) 11), @@ -94,11 +94,6 @@ public String getCacheId() { return cacheId; } - @Override - public int concurrencyKey() { - return cacheId.hashCode(); - } - @Override public Type getType() { return Type.SERVER_STORE_OP; @@ -116,7 +111,7 @@ public String toString() { return getType() + "#" + operation(); } - public static abstract class KeyBasedServerStoreOpMessage extends ServerStoreOpMessage { + public static abstract class KeyBasedServerStoreOpMessage extends ServerStoreOpMessage implements ConcurrentEntityMessage { private final long key; @@ -130,8 +125,8 @@ public long getKey() { } @Override - public int concurrencyKey() { - return (int) (super.concurrencyKey() + key); + public long concurrencyKey() { + return key; } } diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java index 7890e99b34..3d972313cc 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessageTest.java @@ -32,39 +32,29 @@ public class ServerStoreOpMessageTest { private static final UUID CLIENT_ID = UUID.randomUUID(); - @Test - public void testConcurrencyKeysEqualForSameCache() throws Exception { - ServerStoreOpMessage m1 = new ServerStoreOpMessage.ClearMessage("cache1", CLIENT_ID); - ServerStoreOpMessage m2 = new ServerStoreOpMessage.ClientInvalidationAck("cache1", 1); - - assertThat(m1.concurrencyKey(), is(m2.concurrencyKey())); - } - @Test public void testConcurrencyKeysEqualForSameCacheAndKey() throws Exception { - ServerStoreOpMessage m1 = new ServerStoreOpMessage.AppendMessage("cache1", 1L, createPayload(1L), CLIENT_ID); - ServerStoreOpMessage m2 = new ServerStoreOpMessage.GetAndAppendMessage("cache1", 1L, createPayload(1L), CLIENT_ID); - ServerStoreOpMessage m3 = new ServerStoreOpMessage.GetMessage("cache1", 1L); - ServerStoreOpMessage m4 = new ServerStoreOpMessage.ReplaceAtHeadMessage("cache1", 1L, getChain(Collections.emptyList()), getChain(Collections.emptyList()), CLIENT_ID); + ConcurrentEntityMessage m1 = new ServerStoreOpMessage.AppendMessage("cache1", 1L, createPayload(1L), CLIENT_ID); + ConcurrentEntityMessage m2 = new ServerStoreOpMessage.GetAndAppendMessage("cache1", 1L, createPayload(1L), CLIENT_ID); + ConcurrentEntityMessage m3 = new ServerStoreOpMessage.ReplaceAtHeadMessage("cache1", 1L, getChain(Collections.emptyList()), getChain(Collections.emptyList()), CLIENT_ID); assertThat(m1.concurrencyKey(), is(m2.concurrencyKey())); assertThat(m2.concurrencyKey(), is(m3.concurrencyKey())); - assertThat(m3.concurrencyKey(), is(m4.concurrencyKey())); } @Test - public void testConcurrencyKeysNotEqualForDifferentCaches() throws Exception { - ServerStoreOpMessage m1 = new ServerStoreOpMessage.AppendMessage("cache1", 1L, createPayload(1L), CLIENT_ID); - ServerStoreOpMessage m2 = new ServerStoreOpMessage.GetAndAppendMessage("cache2", 1L, createPayload(1L), CLIENT_ID); + public void testConcurrencyKeysEqualForDifferentCachesSameKey() throws Exception { + ConcurrentEntityMessage m1 = new ServerStoreOpMessage.AppendMessage("cache1", 1L, createPayload(1L), CLIENT_ID); + ConcurrentEntityMessage m2 = new ServerStoreOpMessage.GetAndAppendMessage("cache2", 1L, createPayload(1L), CLIENT_ID); - assertThat(m1.concurrencyKey(), not(m2.concurrencyKey())); + assertThat(m1.concurrencyKey(), is(m2.concurrencyKey())); } @Test public void testConcurrencyKeysNotEqualForDifferentCachesAndKeys() throws Exception { - ServerStoreOpMessage m1 = new ServerStoreOpMessage.AppendMessage("cache1", 1L, createPayload(1L), CLIENT_ID); - ServerStoreOpMessage m2 = new ServerStoreOpMessage.GetAndAppendMessage("cache2", 1L, createPayload(1L), CLIENT_ID); - ServerStoreOpMessage m3 = new ServerStoreOpMessage.AppendMessage("cache1", 2L, createPayload(1L), CLIENT_ID); + ConcurrentEntityMessage m1 = new ServerStoreOpMessage.AppendMessage("cache1", 1L, createPayload(1L), CLIENT_ID); + ConcurrentEntityMessage m2 = new ServerStoreOpMessage.GetAndAppendMessage("cache2", 2L, createPayload(1L), CLIENT_ID); + ConcurrentEntityMessage m3 = new ServerStoreOpMessage.AppendMessage("cache1", 3L, createPayload(1L), CLIENT_ID); assertThat(m1.concurrencyKey(), not(m2.concurrencyKey())); assertThat(m1.concurrencyKey(), not(m3.concurrencyKey())); diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java index d2e21f8d6e..d5a36fb44b 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java @@ -28,12 +28,15 @@ import org.junit.After; import org.junit.Before; import org.junit.ClassRule; +import org.junit.Ignore; import org.junit.Test; import org.terracotta.testing.rules.BasicExternalCluster; import org.terracotta.testing.rules.Cluster; import java.io.File; import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; @@ -81,11 +84,71 @@ public void testSync() throws Exception { Cache cache = cacheManager.createCache("clustered-cache", config); + for (long i = -5; i < 5; i++) { + cache.put(i, "value" + i); + } + + CLUSTER.getClusterControl().startOneServer(); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + CLUSTER.getClusterControl().terminateActive(); + + for (long i = -5; i < 5; i++) { + assertThat(cache.get(i), equalTo("value" + i)); + } + } finally { + cacheManager.close(); + } + } + + @Ignore + @Test + public void testLifeCycleOperationsOnSync() throws Exception { + CLUSTER.getClusterControl().terminateOnePassive(); + + final CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/crud-cm")) + .autoCreate() + .defaultServerResource("primary-server-resource")); + final PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true); + + try { + CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 1, MemoryUnit.MB))).build(); + + final Cache cache = cacheManager.createCache("clustered-cache", config); + + for (long i = 0; i < 100; i++) { + cache.put(i, "value" + i); + } + + final CountDownLatch latch = new CountDownLatch(1); + final AtomicBoolean complete = new AtomicBoolean(false); + Thread lifeCycleThread = new Thread(new Runnable() { + @Override + public void run() { + while (!complete.get()) { + try { + latch.await(); + clusteredCacheManagerBuilder.build(true); + Thread.sleep(200); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } + }); + lifeCycleThread.start(); CLUSTER.getClusterControl().startOneServer(); + latch.countDown(); CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); CLUSTER.getClusterControl().terminateActive(); + complete.set(true); - cache.put(1L, "The one"); //If this doesn't throw it means that the state replication worked + for (long i = 0; i < 100; i++) { + assertThat(cache.get(i), equalTo("value" + i)); + } } finally { cacheManager.close(); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/ConcurrencyStrategies.java b/clustered/server/src/main/java/org/ehcache/clustered/server/ConcurrencyStrategies.java index 3b246948d7..b591337b59 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/ConcurrencyStrategies.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/ConcurrencyStrategies.java @@ -16,10 +16,11 @@ package org.ehcache.clustered.server; import java.util.Collections; -import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.Set; import org.ehcache.clustered.common.internal.messages.ConcurrentEntityMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.terracotta.entity.ConcurrencyStrategy; import org.terracotta.entity.EntityMessage; @@ -29,32 +30,36 @@ public final class ConcurrencyStrategies { private ConcurrencyStrategies() { } - public static final ConcurrencyStrategy defaultConcurrency(int bucketCount) { - return new DefaultConcurrencyStrategy(bucketCount); + public static final ConcurrencyStrategy defaultConcurrency(KeySegmentMapper mapper) { + return new DefaultConcurrencyStrategy(mapper); } public static class DefaultConcurrencyStrategy implements ConcurrencyStrategy { public static final int DEFAULT_KEY = 1; + public static final int DATA_CONCURRENCY_KEY_OFFSET = DEFAULT_KEY + 1; - private final int bucketCount; + private final KeySegmentMapper mapper; - public DefaultConcurrencyStrategy(int bucketCount) { - this.bucketCount = bucketCount; + public DefaultConcurrencyStrategy(KeySegmentMapper mapper) { + this.mapper = mapper; } @Override public int concurrencyKey(EntityMessage entityMessage) { - if (entityMessage instanceof ConcurrentEntityMessage) { + if (entityMessage instanceof ServerStoreOpMessage.GetMessage) { + return UNIVERSAL_KEY; + } else if (entityMessage instanceof ConcurrentEntityMessage) { ConcurrentEntityMessage concurrentEntityMessage = (ConcurrentEntityMessage) entityMessage; - return DEFAULT_KEY + Math.abs(concurrentEntityMessage.concurrencyKey() % bucketCount); + return DATA_CONCURRENCY_KEY_OFFSET + mapper.getSegmentForKey(concurrentEntityMessage.concurrencyKey()); + } else { + return DEFAULT_KEY; } - return DEFAULT_KEY; } @Override public Set getKeysForSynchronization() { - Set result = new HashSet(); - for (int i = 0; i < bucketCount; i++) { + Set result = new LinkedHashSet<>(); + for (int i = 0; i <= mapper.getSegments(); i++) { result.add(DEFAULT_KEY + i); } return Collections.unmodifiableSet(result); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 7036d8a319..fa4392ba89 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -54,7 +54,8 @@ import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; import org.ehcache.clustered.common.internal.store.Chain; import org.ehcache.clustered.common.internal.store.ServerStore; -import org.ehcache.clustered.server.messages.EntityStateSyncMessage; +import org.ehcache.clustered.server.internal.messages.EntityDataSyncMessage; +import org.ehcache.clustered.server.internal.messages.EntityStateSyncMessage; import org.ehcache.clustered.server.state.EhcacheStateService; import org.ehcache.clustered.server.state.config.EhcacheStateServiceConfig; import org.slf4j.Logger; @@ -83,6 +84,8 @@ import static org.ehcache.clustered.common.internal.messages.LifecycleMessage.ReleaseServerStore; import static org.ehcache.clustered.common.internal.messages.LifecycleMessage.ValidateServerStore; import static org.ehcache.clustered.common.internal.messages.LifecycleMessage.ValidateStoreManager; +import static org.ehcache.clustered.server.ConcurrencyStrategies.DefaultConcurrencyStrategy.DATA_CONCURRENCY_KEY_OFFSET; +import static org.ehcache.clustered.server.ConcurrencyStrategies.DefaultConcurrencyStrategy.DEFAULT_KEY; // TODO: Provide some mechanism to report on storage utilization -- PageSource provides little visibility // TODO: Ensure proper operations for concurrent requests @@ -151,7 +154,7 @@ public Class getServiceType() { } - EhcacheActiveEntity(ServiceRegistry services, byte[] config) { + EhcacheActiveEntity(ServiceRegistry services, byte[] config, final KeySegmentMapper mapper) { this.identity = ClusteredEhcacheIdentity.deserialize(config); this.responseFactory = new EhcacheEntityResponseFactory(); this.clientCommunicator = services.getService(new CommunicatorServiceConfiguration()); @@ -161,7 +164,7 @@ public Class getServiceType() { } else { this.offHeapResourceIdentifiers = offHeapResources.getAllIdentifiers(); } - ehcacheStateService = services.getService(new EhcacheStateServiceConfig(services, this.offHeapResourceIdentifiers)); + ehcacheStateService = services.getService(new EhcacheStateServiceConfig(services, this.offHeapResourceIdentifiers, mapper)); if (ehcacheStateService == null) { throw new AssertionError("Server failed to retrieve EhcacheStateService."); } @@ -306,7 +309,8 @@ public void onEviction(long key) { @Override public void synchronizeKeyToPassive(PassiveSynchronizationChannel syncChannel, int concurrencyKey) { - if (concurrencyKey == ConcurrencyStrategies.DefaultConcurrencyStrategy.DEFAULT_KEY) { + LOGGER.info("Sync started for concurrency key {}.", concurrencyKey); + if (concurrencyKey == DEFAULT_KEY) { ServerSideConfiguration configuration = new ServerSideConfiguration(ehcacheStateService.getDefaultServerResource(), ehcacheStateService.getSharedResourcePools()); @@ -317,7 +321,17 @@ public void synchronizeKeyToPassive(PassiveSynchronizationChannel { + ServerStoreImpl store = ehcacheStateService.getStore(name); + store.getSegments().get(concurrencyKey - DATA_CONCURRENCY_KEY_OFFSET).keySet().stream() + .forEach(key -> { + syncChannel.synchronizeToPassive(new EntityDataSyncMessage(name, key, store.get(key))); + }); + }); + } + LOGGER.info("Sync complete for concurrency key {}.", concurrencyKey); } @Override diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index 737e84a7c6..497e6d8c55 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -33,7 +33,9 @@ import org.ehcache.clustered.common.internal.messages.ClientIDTrackerMessage.ChainReplicationMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; -import org.ehcache.clustered.server.messages.EntityStateSyncMessage; +import org.ehcache.clustered.server.internal.messages.EntityDataSyncMessage; +import org.ehcache.clustered.server.internal.messages.EntityStateSyncMessage; +import org.ehcache.clustered.server.internal.messages.EntitySyncMessage; import org.ehcache.clustered.server.state.ClientMessageTracker; import org.ehcache.clustered.server.state.EhcacheStateService; import org.ehcache.clustered.server.state.config.EhcacheStateServiceConfig; @@ -77,7 +79,7 @@ public void invoke(EhcacheEntityMessage message) { ehcacheStateService.getStateRepositoryManager().invoke((StateRepositoryOpMessage)message); break; case SYNC_OP: - invokeSyncOperation((EntityStateSyncMessage) message); + invokeSyncOperation((EntitySyncMessage) message); break; case REPLICATION_OP: invokeRetirementMessages((ClientIDTrackerMessage)message); @@ -91,7 +93,7 @@ public void invoke(EhcacheEntityMessage message) { } - EhcachePassiveEntity(ServiceRegistry services, byte[] config) { + EhcachePassiveEntity(ServiceRegistry services, byte[] config, final KeySegmentMapper mapper) { this.identity = ClusteredEhcacheIdentity.deserialize(config); OffHeapResources offHeapResources = services.getService(new BasicServiceConfiguration(OffHeapResources.class)); if (offHeapResources == null) { @@ -99,7 +101,7 @@ public void invoke(EhcacheEntityMessage message) { } else { this.offHeapResourceIdentifiers = offHeapResources.getAllIdentifiers(); } - ehcacheStateService = services.getService(new EhcacheStateServiceConfig(services, this.offHeapResourceIdentifiers)); + ehcacheStateService = services.getService(new EhcacheStateServiceConfig(services, this.offHeapResourceIdentifiers, mapper)); if (ehcacheStateService == null) { throw new AssertionError("Server failed to retrieve EhcacheStateService."); } @@ -153,12 +155,24 @@ private void invokeServerStoreOperation(ServerStoreOpMessage message) throws Clu } } - private void invokeSyncOperation(EntityStateSyncMessage message) throws ClusterException { - ehcacheStateService.configure(message.getConfiguration()); - for (Map.Entry entry : message.getStoreConfigs().entrySet()) { - ehcacheStateService.createStore(entry.getKey(), entry.getValue()); + private void invokeSyncOperation(EntitySyncMessage message) throws ClusterException { + switch (message.operation()) { + case STATE: + EntityStateSyncMessage stateSyncMessage = (EntityStateSyncMessage) message; + + ehcacheStateService.configure(stateSyncMessage.getConfiguration()); + for (Map.Entry entry : stateSyncMessage.getStoreConfigs().entrySet()) { + ehcacheStateService.createStore(entry.getKey(), entry.getValue()); + } + stateSyncMessage.getTrackedClients().stream().forEach(id -> ehcacheStateService.getClientMessageTracker().add(id)); + break; + case DATA: + EntityDataSyncMessage dataSyncMessage = (EntityDataSyncMessage) message; + ehcacheStateService.getStore(dataSyncMessage.getCacheId()).put(dataSyncMessage.getKey(), dataSyncMessage.getChain()); + break; + default: + throw new IllegalMessageException("Unknown Sync operation " + message.operation()); } - message.getTrackedClients().stream().forEach(id -> ehcacheStateService.getClientMessageTracker().add(id)); } private void invokeLifeCycleOperation(LifecycleMessage message) throws ClusterException { @@ -228,22 +242,22 @@ private void destroyServerStore(DestroyServerStore destroyServerStore) throws Cl @Override public void startSyncEntity() { - + LOGGER.info("Sync started."); } @Override public void endSyncEntity() { - + LOGGER.info("Sync completed."); } @Override public void startSyncConcurrencyKey(int concurrencyKey) { - + LOGGER.info("Sync started for concurrency key {}.", concurrencyKey); } @Override public void endSyncConcurrencyKey(int concurrencyKey) { - + LOGGER.info("Sync complete for concurrency key {}.", concurrencyKey); } @Override diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java index 9fa365ef3b..c4a6e254af 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java @@ -18,7 +18,7 @@ import org.ehcache.clustered.common.internal.messages.EhcacheCodec; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; -import org.ehcache.clustered.server.messages.EhcacheSyncMessageCodec; +import org.ehcache.clustered.server.internal.messages.EhcacheSyncMessageCodec; import org.terracotta.entity.ConcurrencyStrategy; import org.terracotta.entity.EntityServerService; import org.terracotta.entity.MessageCodec; @@ -31,7 +31,8 @@ public class EhcacheServerEntityService implements EntityServerService { private static final long ENTITY_VERSION = 1L; - private static final int DEFAULT_CONCURRENCY = 1024; + private static final int DEFAULT_CONCURRENCY = 16; + private static final KeySegmentMapper DEFAULT_MAPPER = new KeySegmentMapper(DEFAULT_CONCURRENCY); @Override public long getVersion() { @@ -45,17 +46,17 @@ public boolean handlesEntityType(String typeName) { @Override public EhcacheActiveEntity createActiveEntity(ServiceRegistry registry, byte[] configuration) { - return new EhcacheActiveEntity(registry, configuration); + return new EhcacheActiveEntity(registry, configuration, DEFAULT_MAPPER); } @Override public PassiveServerEntity createPassiveEntity(ServiceRegistry registry, byte[] configuration) { - return new EhcachePassiveEntity(registry, configuration); + return new EhcachePassiveEntity(registry, configuration, DEFAULT_MAPPER); } @Override public ConcurrencyStrategy getConcurrencyStrategy(byte[] config) { - return defaultConcurrency(DEFAULT_CONCURRENCY); + return defaultConcurrency(DEFAULT_MAPPER); } @Override diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java index f0da1589ee..3d51f2d335 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java @@ -31,8 +31,6 @@ import org.ehcache.clustered.common.internal.exceptions.InvalidStoreManagerException; import org.ehcache.clustered.common.internal.exceptions.LifecycleException; import org.ehcache.clustered.common.internal.exceptions.ResourceConfigurationException; -import org.ehcache.clustered.common.internal.messages.LifecycleMessage.ConfigureStoreManager; -import org.ehcache.clustered.common.internal.messages.LifecycleMessage.ValidateStoreManager; import org.terracotta.entity.ServiceRegistry; import org.terracotta.offheapresource.OffHeapResource; import org.terracotta.offheapresource.OffHeapResourceIdentifier; @@ -85,12 +83,14 @@ public class EhcacheStateServiceImpl implements EhcacheStateService { private Map stores = Collections.emptyMap(); private final ClientMessageTracker messageTracker = new ClientMessageTracker(); - private final StateRepositoryManager stateRepositoryManager; + private final KeySegmentMapper mapper; + - public EhcacheStateServiceImpl(ServiceRegistry services, Set offHeapResourceIdentifiers) { + public EhcacheStateServiceImpl(ServiceRegistry services, Set offHeapResourceIdentifiers, final KeySegmentMapper mapper) { this.services = services; this.offHeapResourceIdentifiers = offHeapResourceIdentifiers; + this.mapper = mapper; this.stateRepositoryManager = new StateRepositoryManager(); } @@ -300,7 +300,7 @@ public ServerStoreImpl createStore(String name, ServerStoreConfiguration serverS } PageSource resourcePageSource = getPageSource(name, serverStoreConfiguration.getPoolAllocation()); - ServerStoreImpl serverStore = new ServerStoreImpl(serverStoreConfiguration, resourcePageSource); + ServerStoreImpl serverStore = new ServerStoreImpl(serverStoreConfiguration, resourcePageSource, mapper); stores.put(name, serverStore); return serverStore; } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/KeySegmentMapper.java b/clustered/server/src/main/java/org/ehcache/clustered/server/KeySegmentMapper.java new file mode 100644 index 0000000000..138360f263 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/KeySegmentMapper.java @@ -0,0 +1,37 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server; + +import com.tc.classloader.CommonComponent; + +@CommonComponent +public class KeySegmentMapper { + + private final int segments; + + public KeySegmentMapper(final int segments) { + this.segments = segments; + } + + public int getSegmentForKey(long key) { + return Math.abs((int) (key % segments)); + } + + public int getSegments() { + return segments; + } +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java index 2f2724e395..491da91c3e 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java @@ -19,26 +19,26 @@ import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.store.Chain; import org.ehcache.clustered.common.internal.store.ServerStore; +import org.ehcache.clustered.server.offheap.OffHeapChainMap; import org.ehcache.clustered.server.offheap.OffHeapServerStore; import org.terracotta.offheapstore.paging.PageSource; import com.tc.classloader.CommonComponent; import java.nio.ByteBuffer; +import java.util.List; @CommonComponent public class ServerStoreImpl implements ServerStore { - private static final int OFFHEAP_CHAIN_SEGMENTS = 16; - private final ServerStoreConfiguration storeConfiguration; private final PageSource pageSource; private final OffHeapServerStore store; - public ServerStoreImpl(ServerStoreConfiguration storeConfiguration, PageSource pageSource) { + public ServerStoreImpl(ServerStoreConfiguration storeConfiguration, PageSource pageSource, KeySegmentMapper mapper) { this.storeConfiguration = storeConfiguration; this.pageSource = pageSource; - this.store = new OffHeapServerStore(pageSource, OFFHEAP_CHAIN_SEGMENTS); + this.store = new OffHeapServerStore(pageSource, mapper); } public void setEvictionListener(ServerStoreEvictionListener listener) { @@ -90,4 +90,8 @@ public void clear() { public void close() { store.close(); } + + public List> getSegments() { + return store.getSegments(); + } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java new file mode 100644 index 0000000000..c8080ff882 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java @@ -0,0 +1,94 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.internal.messages; + +import org.ehcache.clustered.common.internal.messages.ChainCodec; +import org.ehcache.clustered.common.internal.messages.CodecUtil; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.Util; +import org.terracotta.entity.MessageCodecException; +import org.terracotta.entity.SyncMessageCodec; + +import java.nio.ByteBuffer; + +public class EhcacheSyncMessageCodec implements SyncMessageCodec { + + private static final byte OPCODE_SIZE = 1; + private static final byte KEY_SIZE = 8; + private static final byte CACHE_ID_LEN_SIZE = 4; + + private final ChainCodec chainCodec = new ChainCodec(); + + @Override + public byte[] encode(final int concurrencyKey, final EhcacheEntityMessage message) throws MessageCodecException { + if(message.getType() == EhcacheEntityMessage.Type.SYNC_OP) { + EntitySyncMessage syncMessage = (EntitySyncMessage)message; + switch (syncMessage.operation()) { + case STATE: { + byte[] encodedMsg = Util.marshall(syncMessage); + ByteBuffer buffer = ByteBuffer.allocate(OPCODE_SIZE + encodedMsg.length); + buffer.put(syncMessage.getOpCode()); + buffer.put(encodedMsg); + return buffer.array(); + } + case DATA: { + EntityDataSyncMessage dataSyncMessage = (EntityDataSyncMessage)message; + String cacheId = dataSyncMessage.getCacheId(); + byte[] encodedChain = chainCodec.encode(dataSyncMessage.getChain()); + ByteBuffer buffer = ByteBuffer.allocate(OPCODE_SIZE + KEY_SIZE + CACHE_ID_LEN_SIZE + + 2 * cacheId.length() + encodedChain.length); + buffer.put(dataSyncMessage.getOpCode()); + buffer.putLong(dataSyncMessage.getKey()); + buffer.putInt(cacheId.length()); + CodecUtil.putStringAsCharArray(buffer, cacheId); + buffer.put(encodedChain); + return buffer.array(); + } + default: + throw new IllegalArgumentException(this.getClass().getName() + " can not encode " + syncMessage.operation()); + } + } else { + throw new IllegalArgumentException(this.getClass().getName() + " can not encode " + message + " which is not a " + EntityStateSyncMessage.class); + } + } + + @Override + public EntitySyncMessage decode(final int concurrencyKey, final byte[] payload) throws MessageCodecException { + ByteBuffer message = ByteBuffer.wrap(payload); + EntitySyncMessage.SyncOp syncOp = EntitySyncMessage.SyncOp.getSyncOp(message.get()); + switch (syncOp) { + case STATE: { + byte[] encodedMsg = new byte[message.capacity() - OPCODE_SIZE]; + message.get(encodedMsg, 0, encodedMsg.length); + return (EntityStateSyncMessage) Util.unmarshall(encodedMsg); + } + case DATA: { + long key = message.getLong(); + int cacheIdLength = message.getInt(); + String cacheId = CodecUtil.getStringFromBuffer(message, cacheIdLength); + int chainPayloadSize = message.remaining(); + byte[] chainPayload = new byte[chainPayloadSize]; + message.get(chainPayload); + Chain chain = chainCodec.decode(chainPayload); + return new EntityDataSyncMessage(cacheId, key, chain); + } + default: + throw new IllegalArgumentException("EntityStateSyncMessage operation not defined for : " + syncOp); + } + } +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EntityDataSyncMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EntityDataSyncMessage.java new file mode 100644 index 0000000000..24e5bf2649 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EntityDataSyncMessage.java @@ -0,0 +1,52 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.internal.messages; + +import org.ehcache.clustered.common.internal.store.Chain; + +import com.tc.classloader.CommonComponent; + +@CommonComponent +public class EntityDataSyncMessage extends EntitySyncMessage { + + private final String cacheId; + private final long key; + private final Chain chain; + + public EntityDataSyncMessage(final String cacheId, final long key, final Chain chain) { + this.cacheId = cacheId; + this.key = key; + this.chain = chain; + } + + @Override + public SyncOp operation() { + return SyncOp.DATA; + } + + public String getCacheId() { + return cacheId; + } + + public long getKey() { + return key; + } + + public Chain getChain() { + return chain; + } +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/messages/EntityStateSyncMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EntityStateSyncMessage.java similarity index 73% rename from clustered/server/src/main/java/org/ehcache/clustered/server/messages/EntityStateSyncMessage.java rename to clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EntityStateSyncMessage.java index 67537a8e6d..0b854acd1d 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/messages/EntityStateSyncMessage.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EntityStateSyncMessage.java @@ -14,11 +14,12 @@ * limitations under the License. */ -package org.ehcache.clustered.server.messages; +package org.ehcache.clustered.server.internal.messages; import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; -import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; + +import com.tc.classloader.CommonComponent; import com.tc.classloader.CommonComponent; @@ -28,7 +29,7 @@ import java.util.UUID; @CommonComponent -public class EntityStateSyncMessage extends EhcacheEntityMessage implements Serializable { +public class EntityStateSyncMessage extends EntitySyncMessage implements Serializable { private final ServerSideConfiguration configuration; private final Map storeConfigs; @@ -55,28 +56,7 @@ public Set getTrackedClients() { } @Override - public Type getType() { - return Type.SYNC_OP; - } - - @Override - public byte getOpCode() { - return getType().getCode(); - } - - @Override - public void setId(final long id) { - throw new UnsupportedOperationException(); + public SyncOp operation() { + return SyncOp.STATE; } - - @Override - public long getId() { - throw new UnsupportedOperationException(); - } - - @Override - public UUID getClientId() { - throw new UnsupportedOperationException(); - } - } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EntitySyncMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EntitySyncMessage.java new file mode 100644 index 0000000000..e3922203c1 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EntitySyncMessage.java @@ -0,0 +1,85 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.internal.messages; + +import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; + +import com.tc.classloader.CommonComponent; + +import java.util.UUID; + +@CommonComponent +public abstract class EntitySyncMessage extends EhcacheEntityMessage { + + @CommonComponent + public enum SyncOp { + + STATE((byte) 31), + DATA((byte) 32), + ; + + private final byte syncOpCode; + + SyncOp(byte syncOpCode) { + this.syncOpCode = syncOpCode; + } + + public byte getOpCode() { + return this.syncOpCode; + } + + public static SyncOp getSyncOp(byte syncOpCode) { + switch (syncOpCode) { + case 31: + return STATE; + case 32: + return DATA; + default: + throw new IllegalArgumentException("Sync operation not defined for : " + syncOpCode); + } + } + + } + + @Override + public Type getType() { + return Type.SYNC_OP; + } + + public abstract SyncOp operation(); + + @Override + public byte getOpCode() { + return operation().getOpCode(); + } + + @Override + public void setId(final long id) { + throw new UnsupportedOperationException(); + } + + @Override + public long getId() { + throw new UnsupportedOperationException(); + } + + @Override + public UUID getClientId() { + throw new UnsupportedOperationException(); + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/messages/EhcacheSyncMessageCodec.java b/clustered/server/src/main/java/org/ehcache/clustered/server/messages/EhcacheSyncMessageCodec.java deleted file mode 100644 index 12e51ff7f3..0000000000 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/messages/EhcacheSyncMessageCodec.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.server.messages; - -import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; -import org.ehcache.clustered.common.internal.store.Util; -import org.terracotta.entity.MessageCodecException; -import org.terracotta.entity.SyncMessageCodec; - -import java.nio.ByteBuffer; - -public class EhcacheSyncMessageCodec implements SyncMessageCodec { - - private static final byte OPCODE_SIZE = 1; - - @Override - public byte[] encode(final int concurrencyKey, final EhcacheEntityMessage message) throws MessageCodecException { - if (message instanceof EntityStateSyncMessage) { - byte[] encodedMsg = Util.marshall(message); - ByteBuffer buffer = ByteBuffer.allocate(OPCODE_SIZE + encodedMsg.length); - buffer.put(message.getOpCode()); - buffer.put(encodedMsg); - return buffer.array(); - } else { - throw new IllegalArgumentException(this.getClass().getName() + " can not encode " + message + " which is not a " + EntityStateSyncMessage.class); - } - } - - @Override - public EhcacheEntityMessage decode(final int concurrencyKey, final byte[] payload) throws MessageCodecException { - ByteBuffer message = ByteBuffer.wrap(payload); - byte[] encodedMsg = new byte[message.capacity() - OPCODE_SIZE]; - byte opCode = message.get(); - if (opCode == EhcacheEntityMessage.Type.SYNC_OP.getCode()) { - message.get(encodedMsg, 0, encodedMsg.length); - EntityStateSyncMessage entityMessage = (EntityStateSyncMessage) Util.unmarshall(encodedMsg); - return entityMessage; - } else { - throw new IllegalArgumentException("EntityStateSyncMessage operation not defined for : " + opCode); - } - } -} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java index 37e9f04af5..c68824e16a 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java @@ -21,6 +21,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.locks.Lock; @@ -36,7 +37,10 @@ import org.terracotta.offheapstore.paging.PageSource; import org.terracotta.offheapstore.storage.portability.Portability; -class OffHeapChainMap implements MapInternals { +import com.tc.classloader.CommonComponent; + +@CommonComponent +public class OffHeapChainMap implements MapInternals { interface ChainMapEvictionListener { void onEviction(K key); @@ -203,6 +207,15 @@ public void clear() { } } + public Set keySet() { + heads.writeLock().lock(); + try { + return heads.keySet(); + } finally { + heads.writeLock().unlock(); + } + } + private void evict() { int evictionIndex = heads.getEvictionIndex(); if (evictionIndex < 0) { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapServerStore.java b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapServerStore.java index 4d173de3f2..42f8ca4b22 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapServerStore.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapServerStore.java @@ -21,6 +21,7 @@ import org.ehcache.clustered.common.internal.store.Chain; import org.ehcache.clustered.common.internal.store.ServerStore; +import org.ehcache.clustered.server.KeySegmentMapper; import org.ehcache.clustered.server.ServerStoreEvictionListener; import org.terracotta.offheapstore.exceptions.OversizeMappingException; import org.terracotta.offheapstore.paging.PageSource; @@ -31,14 +32,20 @@ public class OffHeapServerStore implements ServerStore { private final List> segments; + private final KeySegmentMapper mapper; - public OffHeapServerStore(PageSource source, int concurrency) { - segments = new ArrayList>(concurrency); - for (int i = 0; i < concurrency; i++) { + public OffHeapServerStore(PageSource source, KeySegmentMapper mapper) { + this.mapper = mapper; + segments = new ArrayList>(mapper.getSegments()); + for (int i = 0; i < mapper.getSegments(); i++) { segments.add(new OffHeapChainMap(source, LongPortability.INSTANCE, KILOBYTES.toBytes(4), MEGABYTES.toBytes(8), false)); } } + public List> getSegments() { + return segments; + } + public void setEvictionListener(final ServerStoreEvictionListener listener) { OffHeapChainMap.ChainMapEvictionListener chainMapEvictionListener = new OffHeapChainMap.ChainMapEvictionListener() { @Override @@ -184,7 +191,7 @@ public void clear() { } OffHeapChainMap segmentFor(long key) { - return segments.get(Math.abs((int) (key % segments.size()))); + return segments.get(mapper.getSegmentForKey(key)); } private void writeLockAll() { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java index a3147975a8..452b6a42d2 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java @@ -49,7 +49,8 @@ public boolean initialize(ServiceProviderConfiguration configuration, PlatformCo public T getService(long consumerID, ServiceConfiguration configuration) { if (configuration != null && configuration.getServiceType().equals(EhcacheStateService.class)) { EhcacheStateServiceConfig stateServiceConfig = (EhcacheStateServiceConfig) configuration; - EhcacheStateService storeManagerService = new EhcacheStateServiceImpl(stateServiceConfig.getServiceRegistry(), stateServiceConfig.getOffHeapResourceIdentifiers()); + EhcacheStateService storeManagerService = new EhcacheStateServiceImpl( + stateServiceConfig.getServiceRegistry(), stateServiceConfig.getOffHeapResourceIdentifiers(), stateServiceConfig.getMapper()); EhcacheStateService result = serviceMap.putIfAbsent(consumerID, storeManagerService); if (result == null) { result = storeManagerService; diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStateServiceConfig.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStateServiceConfig.java index 8eb057fc8f..fef84320a9 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStateServiceConfig.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStateServiceConfig.java @@ -16,6 +16,7 @@ package org.ehcache.clustered.server.state.config; +import org.ehcache.clustered.server.KeySegmentMapper; import org.ehcache.clustered.server.state.EhcacheStateService; import org.terracotta.entity.ServiceConfiguration; import org.terracotta.entity.ServiceRegistry; @@ -29,10 +30,14 @@ public class EhcacheStateServiceConfig implements ServiceConfiguration offHeapResourceIdentifiers; + private final KeySegmentMapper mapper; - public EhcacheStateServiceConfig(ServiceRegistry serviceRegistry, Set offHeapResourceIdentifiers) { + + public EhcacheStateServiceConfig(ServiceRegistry serviceRegistry, Set offHeapResourceIdentifiers, + final KeySegmentMapper mapper) { this.serviceRegistry = serviceRegistry; this.offHeapResourceIdentifiers = offHeapResourceIdentifiers; + this.mapper = mapper; } @Override @@ -48,4 +53,8 @@ public Set getOffHeapResourceIdentifiers() { return this.offHeapResourceIdentifiers; } + public KeySegmentMapper getMapper() { + return mapper; + } + } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/DefaultConcurrencyStrategyTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/DefaultConcurrencyStrategyTest.java index 2664204755..ae51a54962 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/DefaultConcurrencyStrategyTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/DefaultConcurrencyStrategyTest.java @@ -16,6 +16,7 @@ package org.ehcache.clustered.server; import org.ehcache.clustered.common.internal.messages.ConcurrentEntityMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.hamcrest.Matcher; import org.junit.Test; import org.terracotta.entity.ConcurrencyStrategy; @@ -24,44 +25,56 @@ import java.util.HashSet; import java.util.Set; +import static org.ehcache.clustered.server.ConcurrencyStrategies.DefaultConcurrencyStrategy.DATA_CONCURRENCY_KEY_OFFSET; import static org.ehcache.clustered.server.ConcurrencyStrategies.DefaultConcurrencyStrategy.DEFAULT_KEY; import static org.hamcrest.CoreMatchers.allOf; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.core.Is.is; +import static org.mockito.Mockito.mock; +import static org.terracotta.entity.ConcurrencyStrategy.UNIVERSAL_KEY; /** * @author Ludovic Orban */ public class DefaultConcurrencyStrategyTest { + private static final KeySegmentMapper DEFAULT_MAPPER = new KeySegmentMapper(16); + @Test public void testConcurrencyKey() throws Exception { final int concurrency = 107; - ConcurrencyStrategy strategy = ConcurrencyStrategies.defaultConcurrency(concurrency); + ConcurrencyStrategy strategy = ConcurrencyStrategies.defaultConcurrency(DEFAULT_MAPPER); assertThat(strategy.concurrencyKey(new NonConcurrentTestEntityMessage()), is(DEFAULT_KEY)); for (int i = -1024; i < 1024; i++) { - assertThat(strategy.concurrencyKey(new ConcurrentTestEntityMessage(i)), withinRange(DEFAULT_KEY, DEFAULT_KEY + concurrency)); + assertThat(strategy.concurrencyKey(new ConcurrentTestEntityMessage(i)), withinRange(DATA_CONCURRENCY_KEY_OFFSET, DATA_CONCURRENCY_KEY_OFFSET + concurrency)); } } + @Test + public void testConcurrencyKeyForServerStoreGetOperation() throws Exception { + ConcurrencyStrategy strategy = ConcurrencyStrategies.defaultConcurrency(DEFAULT_MAPPER); + ServerStoreOpMessage.GetMessage getMessage = mock(ServerStoreOpMessage.GetMessage.class); + assertThat(strategy.concurrencyKey(getMessage), is(UNIVERSAL_KEY)); + } + @Test public void testKeysForSynchronization() throws Exception { final int concurrency = 111; - ConcurrencyStrategy strategy = ConcurrencyStrategies.defaultConcurrency(concurrency); - - assertThat(strategy.concurrencyKey(new NonConcurrentTestEntityMessage()), is(DEFAULT_KEY)); + ConcurrencyStrategy strategy = ConcurrencyStrategies.defaultConcurrency(DEFAULT_MAPPER); - Set visitedConcurrencyKeys = new HashSet(); + Set visitedConcurrencyKeys = new HashSet<>(); for (int i = -1024; i < 1024; i++) { int concurrencyKey = strategy.concurrencyKey(new ConcurrentTestEntityMessage(i)); - assertThat(concurrencyKey, withinRange(DEFAULT_KEY, DEFAULT_KEY + concurrency)); + assertThat(concurrencyKey, withinRange(DATA_CONCURRENCY_KEY_OFFSET, DATA_CONCURRENCY_KEY_OFFSET + concurrency)); visitedConcurrencyKeys.add(concurrencyKey); } - assertThat(strategy.getKeysForSynchronization().containsAll(visitedConcurrencyKeys), is(true)); + Set keysForSynchronization = strategy.getKeysForSynchronization(); + assertThat(keysForSynchronization.contains(DEFAULT_KEY), is(true)); + assertThat(keysForSynchronization.containsAll(visitedConcurrencyKeys), is(true)); } private static Matcher withinRange(int greaterThanOrEqualTo, int lessThan) { @@ -80,7 +93,7 @@ public ConcurrentTestEntityMessage(int key) { } @Override - public int concurrencyKey() { + public long concurrencyKey() { return key; } } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index 908475bd1d..b2e62d2051 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -35,7 +35,7 @@ import org.ehcache.clustered.common.internal.messages.LifecycleMessage; import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; -import org.ehcache.clustered.server.messages.EntityStateSyncMessage; +import org.ehcache.clustered.server.internal.messages.EntityStateSyncMessage; import org.ehcache.clustered.server.state.EhcacheStateService; import org.hamcrest.Matchers; import org.junit.Before; @@ -67,7 +67,6 @@ import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -76,13 +75,13 @@ import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; public class EhcacheActiveEntityTest { private static final byte[] ENTITY_ID = ClusteredEhcacheIdentity.serialize(UUID.randomUUID()); private static final LifeCycleMessageFactory MESSAGE_FACTORY = new LifeCycleMessageFactory(); private static final UUID CLIENT_ID = UUID.randomUUID(); + private static final KeySegmentMapper DEFAULT_MAPPER = new KeySegmentMapper(16); @Before public void setClientId() { @@ -92,7 +91,7 @@ public void setClientId() { @Test public void testConfigTooShort() { try { - new EhcacheActiveEntity(null, new byte[ENTITY_ID.length - 1]); + new EhcacheActiveEntity(null, new byte[ENTITY_ID.length - 1], DEFAULT_MAPPER); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { //expected @@ -102,7 +101,7 @@ public void testConfigTooShort() { @Test public void testConfigTooLong() { try { - new EhcacheActiveEntity(null, new byte[ENTITY_ID.length + 1]); + new EhcacheActiveEntity(null, new byte[ENTITY_ID.length + 1], DEFAULT_MAPPER); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { //expected @@ -112,7 +111,7 @@ public void testConfigTooLong() { @Test public void testConfigNull() { try { - new EhcacheActiveEntity(null, null); + new EhcacheActiveEntity(null, null, DEFAULT_MAPPER); fail("Expected NullPointerException"); } catch (NullPointerException e) { //expected @@ -124,7 +123,7 @@ public void testConfigNull() { */ @Test public void testConnected() throws Exception { - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); @@ -138,7 +137,7 @@ public void testConnected() throws Exception { @Test public void testConnectedAgain() throws Exception { - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); @@ -155,7 +154,7 @@ public void testConnectedAgain() throws Exception { @Test public void testConnectedSecond() throws Exception { - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); @@ -177,7 +176,7 @@ public void testConnectedSecond() throws Exception { */ @Test public void testDisconnectedNotConnected() throws Exception { - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.disconnected(client); @@ -189,7 +188,7 @@ public void testDisconnectedNotConnected() throws Exception { */ @Test public void testDisconnected() throws Exception { - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); @@ -206,7 +205,7 @@ public void testDisconnected() throws Exception { */ @Test public void testDisconnectedSecond() throws Exception { - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry(), ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); @@ -225,7 +224,7 @@ public void testDisconnectedSecond() throws Exception { @Test public void testInteractionWithServerWithoutResources() throws Exception { OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); @@ -246,7 +245,7 @@ public void testConfigure() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -276,7 +275,7 @@ public void testNoAttachementFailsToInvokeServerStoreOperation() throws Exceptio registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -317,7 +316,7 @@ public void testAppendInvalidationAcksTakenIntoAccount() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client1 = new TestClientDescriptor(); ClientDescriptor client2 = new TestClientDescriptor(); ClientDescriptor client3 = new TestClientDescriptor(); @@ -416,7 +415,7 @@ public void testClearInvalidationAcksTakenIntoAccount() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client1 = new TestClientDescriptor(); ClientDescriptor client2 = new TestClientDescriptor(); ClientDescriptor client3 = new TestClientDescriptor(); @@ -515,7 +514,7 @@ public void testAppendInvalidationDisconnectionOfInvalidatingClientsTakenIntoAcc registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client1 = new TestClientDescriptor(); ClientDescriptor client2 = new TestClientDescriptor(); ClientDescriptor client3 = new TestClientDescriptor(); @@ -610,7 +609,7 @@ public void testClearInvalidationDisconnectionOfInvalidatingClientsTakenIntoAcco registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client1 = new TestClientDescriptor(); ClientDescriptor client2 = new TestClientDescriptor(); ClientDescriptor client3 = new TestClientDescriptor(); @@ -705,7 +704,7 @@ public void testAppendInvalidationDisconnectionOfBlockingClientTakenIntoAccount( registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client1 = new TestClientDescriptor(); ClientDescriptor client2 = new TestClientDescriptor(); ClientDescriptor client3 = new TestClientDescriptor(); @@ -790,7 +789,7 @@ public void testClearInvalidationDisconnectionOfBlockingClientTakenIntoAccount() registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client1 = new TestClientDescriptor(); ClientDescriptor client2 = new TestClientDescriptor(); ClientDescriptor client3 = new TestClientDescriptor(); @@ -875,7 +874,7 @@ public void testAttachedClientButNotStoreFailsInvokingServerStoreOperation() thr registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -924,7 +923,7 @@ public void testWithAttachmentSucceedsInvokingServerStoreOperation() throws Exce registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -977,7 +976,7 @@ public void testConfigureBeforeConnect() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); assertFailure(activeEntity.invoke(client, @@ -996,7 +995,7 @@ public void testConfigureAfterConfigure() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1043,7 +1042,7 @@ public void testConfigureMissingPoolResource() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("defaultServerResource", 64, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1076,7 +1075,7 @@ public void testConfigureMissingDefaultResource() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1107,7 +1106,7 @@ public void testConfigureLargeSharedPool() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1145,7 +1144,7 @@ public void testValidate2Clients() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1176,7 +1175,7 @@ public void testValidate1Client() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1208,7 +1207,7 @@ public void testValidateAfterConfigure() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1228,7 +1227,7 @@ public void testValidateExtraResource() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1256,7 +1255,7 @@ public void testValidateNoDefaultResource() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1281,7 +1280,7 @@ public void testCreateDedicatedServerStoreBeforeConfigure() throws Exception { final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(32, MemoryUnit.MEGABYTES); registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1300,7 +1299,7 @@ public void testCreateDedicatedServerStoreBeforeValidate() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1332,7 +1331,7 @@ public void testCreateDedicatedServerStore() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1382,7 +1381,7 @@ public void testCreateDedicatedServerStoreAfterValidate() throws Exception { .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1421,7 +1420,7 @@ public void testCreateDedicatedServerStoreExisting() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1462,7 +1461,7 @@ public void testCreateReleaseDedicatedServerStoreMultiple() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1532,7 +1531,7 @@ public void testValidateDedicatedServerStore() throws Exception { .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1573,7 +1572,7 @@ public void testValidateDedicatedServerStoreBad() throws Exception { .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1622,7 +1621,7 @@ public void testValidateDedicatedServerStoreBeforeCreate() throws Exception { .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1660,7 +1659,7 @@ public void testCreateSharedServerStore() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1707,7 +1706,7 @@ public void testCreateSharedServerStoreExisting() throws Exception { .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1755,7 +1754,7 @@ public void testValidateSharedServerStore() throws Exception { .shared("primary") .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -1805,7 +1804,7 @@ public void testValidateServerStore_DedicatedStoresDifferentSizes() throws Excep .dedicated("serverResource1", 2, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client1 = new TestClientDescriptor(); activeEntity.connected(client1); assertThat(activeEntity.getConnectedClients().keySet(), contains(client1)); @@ -1850,7 +1849,7 @@ public void testValidateServerStore_DedicatedStoresSameSizes() throws Exception .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client1 = new TestClientDescriptor(); activeEntity.connected(client1); assertThat(activeEntity.getConnectedClients().keySet(), contains(client1)); @@ -1888,7 +1887,7 @@ public void testValidateServerStore_DedicatedStoreResourceNamesDifferent() throw .dedicated("serverResource2", 4, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client1 = new TestClientDescriptor(); activeEntity.connected(client1); assertThat(activeEntity.getConnectedClients().keySet(), contains(client1)); @@ -1932,7 +1931,7 @@ public void testValidateServerStore_DedicatedCacheNameDifferent() throws Excepti .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client1 = new TestClientDescriptor(); activeEntity.connected(client1); assertThat(activeEntity.getConnectedClients().keySet(), contains(client1)); @@ -1965,7 +1964,7 @@ public void testServerStoreSameNameInDifferentSharedPools() throws Exception { .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -2010,7 +2009,7 @@ public void testValidateSharedServerStoreBad() throws Exception { .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -2054,7 +2053,7 @@ public void testReleaseServerStoreBeforeAttach() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -2076,7 +2075,7 @@ public void testReleaseServerStoreAfterRelease() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -2113,7 +2112,7 @@ public void testDestroyServerStore() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -2186,7 +2185,7 @@ public void testDestroyServerStoreBeforeAttach() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -2213,7 +2212,7 @@ public void testDestroyServerStoreInUse() throws Exception { .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -2273,7 +2272,7 @@ public void testSharedPoolCacheNameCollision() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -2336,7 +2335,7 @@ public void testSharedPoolCacheNameCollision() throws Exception { @Test public void testDestroyEmpty() throws Exception { - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry() , ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(new OffHeapIdentifierRegistry() , ENTITY_ID, DEFAULT_MAPPER); activeEntity.destroy(); } @@ -2346,7 +2345,7 @@ public void testDestroyWithStores() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -2420,7 +2419,7 @@ public void testValidateIdenticalConfiguration() { registry.addResource("primary-server-resource", 16, MemoryUnit.MEGABYTES); registry.addResource("secondary-server-resource", 16, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor configurer = new TestClientDescriptor(); activeEntity.connected(configurer); activeEntity.invoke(configurer, MESSAGE_FACTORY.configureStoreManager(configureConfig)); @@ -2438,7 +2437,7 @@ public void testValidateSharedPoolNamesDifferent() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor configurer = new TestClientDescriptor(); activeEntity.connected(configurer); @@ -2466,7 +2465,7 @@ public void testValidateDefaultResourceNameDifferent() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor configurer = new TestClientDescriptor(); activeEntity.connected(configurer); @@ -2494,7 +2493,7 @@ public void testValidateClientSharedPoolSizeTooBig() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor configurer = new TestClientDescriptor(); activeEntity.connected(configurer); @@ -2522,7 +2521,7 @@ public void testValidateSecondClientInheritsFirstClientConfig() throws Exception registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor configurer = new TestClientDescriptor(); activeEntity.connected(configurer); @@ -2543,7 +2542,7 @@ public void testValidateNonExistentSharedPool() throws Exception { OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(32, MemoryUnit.MEGABYTES); registry.addResource("defaultServerResource", 8, MemoryUnit.MEGABYTES); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor configurer = new TestClientDescriptor(); activeEntity.connected(configurer); @@ -2573,7 +2572,7 @@ public void testCreateServerStoreWithUnknownPool() throws Exception { .unknown() .build(); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); assertThat(activeEntity.getConnectedClients().keySet(), contains(client)); @@ -2596,7 +2595,7 @@ public void testSyncToPassive() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID); + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); ClientDescriptor client = new TestClientDescriptor(); activeEntity.connected(client); @@ -2839,7 +2838,7 @@ public Set getAllIdentifiers() { }; } else if (serviceConfiguration.getServiceType().equals(EhcacheStateService.class)) { if (storeManagerService == null) { - this.storeManagerService = new EhcacheStateServiceImpl(this, getIdentifiers(pools.keySet())); + this.storeManagerService = new EhcacheStateServiceImpl(this, getIdentifiers(pools.keySet()), DEFAULT_MAPPER); } return (T) (this.storeManagerService); } else if (serviceConfiguration.getServiceType().equals(IEntityMessenger.class)) { diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java index 8637564321..683c8b320d 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java @@ -56,6 +56,7 @@ public class EhcachePassiveEntityTest { private static final byte[] ENTITY_ID = ClusteredEhcacheIdentity.serialize(UUID.randomUUID()); private static final LifeCycleMessageFactory MESSAGE_FACTORY = new LifeCycleMessageFactory(); private static final UUID CLIENT_ID = UUID.randomUUID(); + private static final KeySegmentMapper DEFAULT_MAPPER = new KeySegmentMapper(16); @Before public void setClientId() { @@ -65,7 +66,7 @@ public void setClientId() { @Test public void testConfigTooShort() { try { - new EhcachePassiveEntity(null, new byte[ENTITY_ID.length - 1]); + new EhcachePassiveEntity(null, new byte[ENTITY_ID.length - 1], DEFAULT_MAPPER); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { //expected @@ -75,7 +76,7 @@ public void testConfigTooShort() { @Test public void testConfigTooLong() { try { - new EhcachePassiveEntity(null, new byte[ENTITY_ID.length + 1]); + new EhcachePassiveEntity(null, new byte[ENTITY_ID.length + 1], DEFAULT_MAPPER); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { //expected @@ -85,7 +86,7 @@ public void testConfigTooLong() { @Test public void testConfigNull() { try { - new EhcachePassiveEntity(null, null); + new EhcachePassiveEntity(null, null, DEFAULT_MAPPER); fail("Expected NullPointerException"); } catch (NullPointerException e) { //expected @@ -102,7 +103,7 @@ public void testConfigure() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID); + final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() .defaultResource("defaultServerResource") @@ -127,7 +128,7 @@ public void testConfigureAfterConfigure() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID); + final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() .defaultResource("defaultServerResource") @@ -163,7 +164,7 @@ public void testConfigureMissingPoolResource() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("defaultServerResource", 64, MemoryUnit.MEGABYTES); - final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID); + final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() .defaultResource("defaultServerResource") @@ -189,7 +190,7 @@ public void testConfigureMissingDefaultResource() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID); + final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() .defaultResource("defaultServerResource") @@ -213,7 +214,7 @@ public void testConfigureLargeSharedPool() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID); + final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() .defaultResource("defaultServerResource") @@ -238,7 +239,7 @@ public void testCreateDedicatedServerStore() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID); + final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) @@ -274,7 +275,7 @@ public void testCreateSharedServerStore() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID); + final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); passiveEntity.invoke( MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() @@ -309,7 +310,7 @@ public void testDestroyServerStore() throws Exception { registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 8, MemoryUnit.MEGABYTES); - final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID); + final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); passiveEntity.invoke( MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() @@ -370,7 +371,7 @@ public void testSharedPoolCacheNameCollision() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID); + final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); passiveEntity.invoke( MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() @@ -440,7 +441,7 @@ public void testDestroyWithStores() throws Exception { registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); - final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID); + final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) @@ -652,7 +653,7 @@ public Set getAllIdentifiers() { }; } else if (serviceConfiguration.getServiceType().equals(EhcacheStateService.class)) { if (storeManagerService == null) { - this.storeManagerService = new EhcacheStateServiceImpl(this, getIdentifiers(pools.keySet())); + this.storeManagerService = new EhcacheStateServiceImpl(this, getIdentifiers(pools.keySet()), DEFAULT_MAPPER); } return (T) (this.storeManagerService); } else if (serviceConfiguration.getServiceType().equals(IEntityMessenger.class)) { diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/messages/EhcacheSyncMessageCodecTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java similarity index 87% rename from clustered/server/src/test/java/org/ehcache/clustered/server/messages/EhcacheSyncMessageCodecTest.java rename to clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java index 9847ed3aa7..27a140905d 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/messages/EhcacheSyncMessageCodecTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.ehcache.clustered.server.messages; +package org.ehcache.clustered.server.internal.messages; import org.ehcache.clustered.common.Consistency; import org.ehcache.clustered.common.PoolAllocation; @@ -27,6 +27,9 @@ import java.util.Set; import java.util.UUID; +import static org.ehcache.clustered.common.internal.store.Util.chainsEqual; +import static org.ehcache.clustered.common.internal.store.Util.createPayload; +import static org.ehcache.clustered.common.internal.store.Util.getChain; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -35,7 +38,7 @@ public class EhcacheSyncMessageCodecTest { @Test - public void testEncodeDecode() throws Exception { + public void testStateSyncMessageEncodeDecode() throws Exception { Map sharedPools = new HashMap<>(); ServerSideConfiguration.Pool pool1 = new ServerSideConfiguration.Pool(1, "foo1"); ServerSideConfiguration.Pool pool2 = new ServerSideConfiguration.Pool(2, "foo2"); @@ -113,4 +116,15 @@ public void testEncodeDecode() throws Exception { assertThat(serverStoreConfiguration.getValueSerializerType(), is("valueSerializerType3")); assertThat(serverStoreConfiguration.getConsistency(), is(Consistency.STRONG)); } + + @Test + public void testDataSyncMessageEncodeDecode() throws Exception { + EhcacheSyncMessageCodec codec = new EhcacheSyncMessageCodec(); + EntityDataSyncMessage message = new EntityDataSyncMessage("foo", 123L, + getChain(true, createPayload(10L), createPayload(100L), createPayload(1000L))); + EntityDataSyncMessage decoded = (EntityDataSyncMessage) codec.decode(0, codec.encode(0, message)); + assertThat(decoded.getCacheId(), is(message.getCacheId())); + assertThat(decoded.getKey(), is(message.getKey())); + assertThat(chainsEqual(decoded.getChain(), message.getChain()), is(true)); + } } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/OffHeapServerStoreTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/OffHeapServerStoreTest.java index 19f4a34760..befa6ce0e2 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/OffHeapServerStoreTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/OffHeapServerStoreTest.java @@ -20,6 +20,7 @@ import org.ehcache.clustered.common.internal.store.Chain; import org.ehcache.clustered.common.internal.store.Element; +import org.ehcache.clustered.server.KeySegmentMapper; import org.ehcache.clustered.server.store.ChainBuilder; import org.ehcache.clustered.server.store.ElementBuilder; import org.ehcache.clustered.common.internal.store.ServerStore; @@ -45,6 +46,8 @@ public class OffHeapServerStoreTest extends ServerStoreTest { + private static final KeySegmentMapper DEFAULT_MAPPER = new KeySegmentMapper(16); + @SuppressWarnings("unchecked") private OffHeapChainMap getOffHeapChainMapMock() { return mock(OffHeapChainMap.class); @@ -52,7 +55,7 @@ private OffHeapChainMap getOffHeapChainMapMock() { @Override public ServerStore newStore() { - return new OffHeapServerStore(new UnlimitedPageSource(new OffHeapBufferSource()), 16); + return new OffHeapServerStore(new UnlimitedPageSource(new OffHeapBufferSource()), DEFAULT_MAPPER); } @Override @@ -173,7 +176,7 @@ public void testCrossSegmentShrinking() { long seed = System.nanoTime(); Random random = new Random(seed); try { - OffHeapServerStore store = new OffHeapServerStore(new UpfrontAllocatingPageSource(new OffHeapBufferSource(), MEGABYTES.toBytes(1L), MEGABYTES.toBytes(1)), 16); + OffHeapServerStore store = new OffHeapServerStore(new UpfrontAllocatingPageSource(new OffHeapBufferSource(), MEGABYTES.toBytes(1L), MEGABYTES.toBytes(1)), DEFAULT_MAPPER); ByteBuffer smallValue = ByteBuffer.allocate(1024); for (int i = 0; i < 10000; i++) { diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java index da28b24b00..1a3545d12c 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java @@ -16,13 +16,12 @@ package org.ehcache.clustered.server.state; +import org.ehcache.clustered.server.KeySegmentMapper; import org.ehcache.clustered.server.state.config.EhcacheStateServiceConfig; import org.junit.Test; -import org.terracotta.entity.ServiceConfiguration; import org.terracotta.entity.ServiceProviderCleanupException; import org.terracotta.entity.ServiceProviderConfiguration; -import static org.hamcrest.Matchers.startsWith; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertSame; @@ -33,6 +32,8 @@ public class EhcacheStateServiceProviderTest { + private static final KeySegmentMapper DEFAULT_MAPPER = new KeySegmentMapper(16); + @Test public void testInitialize() { EhcacheStateServiceProvider serviceProvider = new EhcacheStateServiceProvider(); @@ -46,15 +47,15 @@ public void testInitialize() { public void testGetService() { EhcacheStateServiceProvider serviceProvider = new EhcacheStateServiceProvider(); - EhcacheStateService ehcacheStateService = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, null)); + EhcacheStateService ehcacheStateService = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, null, DEFAULT_MAPPER)); assertNotNull(ehcacheStateService); - EhcacheStateService sameStateService = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, null)); + EhcacheStateService sameStateService = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, null, DEFAULT_MAPPER)); assertSame(ehcacheStateService, sameStateService); - EhcacheStateService anotherStateService = serviceProvider.getService(2L, new EhcacheStateServiceConfig(null, null)); + EhcacheStateService anotherStateService = serviceProvider.getService(2L, new EhcacheStateServiceConfig(null, null, DEFAULT_MAPPER)); assertNotNull(anotherStateService); assertNotSame(ehcacheStateService, anotherStateService); @@ -65,13 +66,13 @@ public void testGetService() { public void testClear() throws ServiceProviderCleanupException { EhcacheStateServiceProvider serviceProvider = new EhcacheStateServiceProvider(); - EhcacheStateService ehcacheStateService = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, null)); - EhcacheStateService anotherStateService = serviceProvider.getService(2L, new EhcacheStateServiceConfig(null, null)); + EhcacheStateService ehcacheStateService = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, null, DEFAULT_MAPPER)); + EhcacheStateService anotherStateService = serviceProvider.getService(2L, new EhcacheStateServiceConfig(null, null, DEFAULT_MAPPER)); serviceProvider.clear(); - EhcacheStateService ehcacheStateServiceAfterClear = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, null)); - EhcacheStateService anotherStateServiceAfterClear = serviceProvider.getService(2L, new EhcacheStateServiceConfig(null, null)); + EhcacheStateService ehcacheStateServiceAfterClear = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, null, DEFAULT_MAPPER)); + EhcacheStateService anotherStateServiceAfterClear = serviceProvider.getService(2L, new EhcacheStateServiceConfig(null, null, DEFAULT_MAPPER)); assertNotSame(ehcacheStateService, ehcacheStateServiceAfterClear); assertNotSame(anotherStateService, anotherStateServiceAfterClear); From 2e4d283b6381b44df9b2e4c9ab9fc19ad436ebec Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Tue, 25 Oct 2016 10:43:10 +0200 Subject: [PATCH 081/218] :construction: Fix #1559 Implement ExecutionStrategy * Bump to latest terracotta .9-beta(2) * ExecutionStrategy first cut * Make reconfigure invalid for Ehcache --- build.gradle | 8 +-- .../messages/ServerStoreOpMessage.java | 2 +- .../server/EhcacheExecutionStrategy.java | 64 +++++++++++++++++++ .../server/EhcacheServerEntityService.java | 12 ++++ 4 files changed, 81 insertions(+), 5 deletions(-) create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java diff --git a/build.gradle b/build.gradle index e38d92d174..f26ed9f42b 100644 --- a/build.gradle +++ b/build.gradle @@ -30,13 +30,13 @@ ext { // Clustered terracottaPlatformVersion = '5.0.8.beta2' managementVersion = terracottaPlatformVersion - terracottaApisVersion = '1.0.8.beta' - terracottaCoreVersion = '5.0.8-beta' + terracottaApisVersion = '1.0.9.beta' + terracottaCoreVersion = '5.0.9-beta2' offheapResourceVersion = terracottaPlatformVersion entityApiVersion = terracottaApisVersion - terracottaPassthroughTestingVersion = '1.0.8.beta' + terracottaPassthroughTestingVersion = '1.0.9.beta2' entityTestLibVersion = terracottaPassthroughTestingVersion - galvanVersion = '1.0.8-beta' + galvanVersion = '1.0.9-beta2' // Tools findbugsVersion = '3.0.1' diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java index 4d8fbcad78..82e6a4b146 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java @@ -229,7 +229,7 @@ public ServerStoreOp operation() { } } - static class ClearMessage extends ServerStoreOpMessage { + public static class ClearMessage extends ServerStoreOpMessage { ClearMessage(String cacheId, UUID clientId) { super(cacheId); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java new file mode 100644 index 0000000000..c6dfa6b7fc --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java @@ -0,0 +1,64 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server; + +import org.ehcache.clustered.common.internal.messages.ClientIDTrackerMessage; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; +import org.ehcache.clustered.common.internal.messages.LifecycleMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; +import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; +import org.ehcache.clustered.server.internal.messages.EntitySyncMessage; +import org.terracotta.entity.ExecutionStrategy; + +/** + * EhcacheExecutionStrategy + */ +class EhcacheExecutionStrategy implements ExecutionStrategy { + @Override + public Location getExecutionLocation(EhcacheEntityMessage message) { + if (message instanceof ServerStoreOpMessage.ReplaceAtHeadMessage || message instanceof ServerStoreOpMessage.ClearMessage) { + // ServerStoreOp needing replication + return Location.BOTH; + } else if (message instanceof ServerStoreOpMessage) { + // ServerStoreOp not needing replication + return Location.ACTIVE; + } else if (message instanceof LifecycleMessage.ConfigureStoreManager) { + return Location.BOTH; + } else if (message instanceof LifecycleMessage.ValidateStoreManager) { + return Location.ACTIVE; + } else if (message instanceof LifecycleMessage.CreateServerStore) { + return Location.BOTH; + } else if (message instanceof LifecycleMessage.ValidateServerStore) { + return Location.ACTIVE; + } else if (message instanceof LifecycleMessage.ReleaseServerStore) { + return Location.ACTIVE; + } else if (message instanceof LifecycleMessage.DestroyServerStore) { + return Location.BOTH; + } else if (message instanceof StateRepositoryOpMessage.PutIfAbsentMessage) { + // StateRepositoryOp needing replication + return Location.BOTH; + } else if (message instanceof StateRepositoryOpMessage) { + // StateRepositoryOp not needing replication + return Location.ACTIVE; + } else if (message instanceof ClientIDTrackerMessage) { + return Location.PASSIVE; + } else if (message instanceof EntitySyncMessage) { + throw new AssertionError("Unexpected use of ExecutionStrategy for sync messages"); + } + throw new AssertionError("Unknown message type: " + message.getClass()); + } +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java index c4a6e254af..0aaa732db3 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java @@ -19,8 +19,10 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; import org.ehcache.clustered.server.internal.messages.EhcacheSyncMessageCodec; +import org.terracotta.entity.CommonServerEntity; import org.terracotta.entity.ConcurrencyStrategy; import org.terracotta.entity.EntityServerService; +import org.terracotta.entity.ExecutionStrategy; import org.terracotta.entity.MessageCodec; import org.terracotta.entity.PassiveServerEntity; import org.terracotta.entity.ServiceRegistry; @@ -68,4 +70,14 @@ public MessageCodec getMessageCodec public SyncMessageCodec getSyncMessageCodec() { return new EhcacheSyncMessageCodec(); } + + @Override + public > AP reconfigureEntity(ServiceRegistry registry, AP oldEntity, byte[] configuration) { + throw new UnsupportedOperationException("Reconfigure not supported in Ehcache"); + } + + @Override + public ExecutionStrategy getExecutionStrategy(byte[] configuration) { + return new EhcacheExecutionStrategy(); + } } From 76d867d1c2da80d5f3c1bc01b0f0cf471c065294 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Fri, 7 Oct 2016 09:09:18 +0530 Subject: [PATCH 082/218] ReconnectListeners for reconnect data #1211 --- .../client/internal/EhcacheClientEntity.java | 13 +++++++++- .../store/StrongServerStoreProxy.java | 13 +++++++--- .../internal/messages/ReconnectData.java | 24 ++++++++++++++++--- .../messages/ReconnectDataCodecTest.java | 23 ++++++++++++++++++ .../server/EhcachePassiveEntity.java | 6 +++-- .../server/state/EhcacheStateService.java | 1 + 6 files changed, 71 insertions(+), 9 deletions(-) diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java index ebb8f81923..33f8f1430a 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java @@ -78,12 +78,17 @@ public interface DisconnectionListener { void onDisconnection(); } + public interface ReconnectListener { + void onHandleReconnect(ReconnectData reconnectData); + } + private final AtomicLong sequenceGenerator = new AtomicLong(0L); private final EntityClientEndpoint endpoint; private final LifeCycleMessageFactory messageFactory; private final Map, List>> responseListeners = new ConcurrentHashMap, List>>(); private final List disconnectionListeners = new CopyOnWriteArrayList(); + private final List reconnectListeners = new CopyOnWriteArrayList(); private final ReconnectDataCodec reconnectDataCodec = new ReconnectDataCodec(); private volatile boolean connected = true; private final ReconnectData reconnectData = new ReconnectData(); @@ -104,6 +109,9 @@ public void handleMessage(EntityResponse messageFromServer) { @Override public byte[] createExtendedReconnectData() { + for (ReconnectListener reconnectListener : reconnectListeners) { + reconnectListener.onHandleReconnect(reconnectData); + } return reconnectDataCodec.encode(reconnectData); } @@ -155,6 +163,10 @@ public void addDisconnectionListener(DisconnectionListener listener) { disconnectionListeners.add(listener); } + public void addReconnectListener(ReconnectListener listener) { + reconnectListeners.add(listener); + } + public void addResponseListener(Class responseType, ResponseListener responseListener) { List> responseListeners = this.responseListeners.get(responseType); if (responseListeners == null) { @@ -293,7 +305,6 @@ private EhcacheEntityResponse invokeInternal(TimeoutDuration timeLimit, EhcacheE public InvokeFuture invokeAsync(EhcacheEntityMessage message, boolean replicate) throws MessageCodecException { - InvokeFuture invoke; getClientId(); if (replicate) { message.setId(sequenceGenerator.getAndIncrement()); diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java index cf06ad66b6..baaea69b22 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java @@ -17,14 +17,17 @@ import org.ehcache.clustered.client.internal.EhcacheClientEntity; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.ehcache.clustered.common.internal.messages.ReconnectData; import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; import org.ehcache.clustered.common.internal.store.Chain; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.nio.ByteBuffer; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CopyOnWriteArrayList; @@ -34,9 +37,6 @@ import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; -/** - * @author Ludovic Orban - */ public class StrongServerStoreProxy implements ServerStoreProxy { private static final Logger LOGGER = LoggerFactory.getLogger(StrongServerStoreProxy.class); @@ -51,6 +51,13 @@ public class StrongServerStoreProxy implements ServerStoreProxy { public StrongServerStoreProxy(final ServerStoreMessageFactory messageFactory, final EhcacheClientEntity entity) { this.delegate = new NoInvalidationServerStoreProxy(messageFactory, entity); this.entity = entity; + entity.addReconnectListener(new EhcacheClientEntity.ReconnectListener() { + @Override + public void onHandleReconnect(ReconnectData reconnectData) { + Set inflightInvalidations = new HashSet(hashInvalidationsInProgress.keySet()); + reconnectData.addInvalidationsInProgress(delegate.getCacheId(), inflightInvalidations); + } + }); entity.addResponseListener(EhcacheEntityResponse.HashInvalidationDone.class, new EhcacheClientEntity.ResponseListener() { @Override public void onResponse(EhcacheEntityResponse.HashInvalidationDone response) { diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectData.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectData.java index a28fdb049f..cd46dd71e5 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectData.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectData.java @@ -17,6 +17,8 @@ package org.ehcache.clustered.common.internal.messages; import java.util.Collections; +import java.util.HashMap; +import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; @@ -26,10 +28,12 @@ public class ReconnectData { private static final byte CLIENT_ID_SIZE = 16; private static final byte ENTRY_SIZE = 4; + private static final byte HASH_SIZE = 8; private volatile UUID clientId; private final Set reconnectData = Collections.newSetFromMap(new ConcurrentHashMap()); private final AtomicInteger reconnectDatalen = new AtomicInteger(CLIENT_ID_SIZE); + private final ConcurrentHashMap> hashInvalidationsInProgressPerCache = new ConcurrentHashMap>(); public UUID getClientId() { if (clientId == null) { @@ -44,13 +48,13 @@ public void setClientId(UUID clientId) { public void add(String name) { reconnectData.add(name); - reconnectDatalen.addAndGet(2 * name.length() + ENTRY_SIZE); + reconnectDatalen.addAndGet(2 * name.length() + 2 * ENTRY_SIZE); } public void remove(String name) { if (!reconnectData.contains(name)) { reconnectData.remove(name); - reconnectDatalen.addAndGet(-(2 * name.length() + ENTRY_SIZE)); + reconnectDatalen.addAndGet(-(2 * name.length() + 2 * ENTRY_SIZE)); } } @@ -58,8 +62,22 @@ public Set getAllCaches() { return Collections.unmodifiableSet(reconnectData); } - public int getDataLength() { + int getDataLength() { return reconnectDatalen.get(); } + public void addInvalidationsInProgress(String cacheId, Set hashInvalidationsInProgress) { + hashInvalidationsInProgressPerCache.put(cacheId, hashInvalidationsInProgress); + reconnectDatalen.addAndGet(hashInvalidationsInProgress.size() * HASH_SIZE); + } + + public Set removeInvalidationsInProgress(String cacheId) { + Set hashToInvalidate = hashInvalidationsInProgressPerCache.remove(cacheId); + if (hashToInvalidate != null) { //TODO: while handling eventual + reconnectDatalen.addAndGet(-(hashToInvalidate.size() * HASH_SIZE)); + return hashToInvalidate; + } + return Collections.EMPTY_SET; + } + } diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodecTest.java index 922487a8ed..ac4bd5383b 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodecTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodecTest.java @@ -16,11 +16,16 @@ package org.ehcache.clustered.common.internal.messages; +import org.hamcrest.Matcher; import org.junit.Test; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; import java.util.UUID; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.junit.Assert.assertThat; @@ -36,6 +41,20 @@ public void testCodec() { reconnectData.setClientId(UUID.randomUUID()); + Set firstSetToInvalidate = new HashSet(); + firstSetToInvalidate.add(1L); + firstSetToInvalidate.add(11L); + firstSetToInvalidate.add(111L); + + Set secondSetToInvalidate = new HashSet(); + secondSetToInvalidate.add(2L); + secondSetToInvalidate.add(22L); + secondSetToInvalidate.add(222L); + secondSetToInvalidate.add(2222L); + reconnectData.addInvalidationsInProgress("test", firstSetToInvalidate); + reconnectData.addInvalidationsInProgress("test1", Collections.EMPTY_SET); + reconnectData.addInvalidationsInProgress("test2", secondSetToInvalidate); + ReconnectDataCodec dataCodec = new ReconnectDataCodec(); ReconnectData decoded = dataCodec.decode(dataCodec.encode(reconnectData)); @@ -44,5 +63,9 @@ public void testCodec() { assertThat(decoded.getClientId(), is(reconnectData.getClientId())); assertThat(decoded.getAllCaches(), containsInAnyOrder("test", "test1", "test2")); + assertThat(decoded.removeInvalidationsInProgress("test"), containsInAnyOrder(firstSetToInvalidate.toArray())); + assertThat(decoded.removeInvalidationsInProgress("test1").isEmpty(), is(true)); + assertThat(decoded.removeInvalidationsInProgress("test2"), containsInAnyOrder(secondSetToInvalidate.toArray())); + } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index 497e6d8c55..e676be94cc 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -111,6 +111,7 @@ private void invokeRetirementMessages(ClientIDTrackerMessage message) throws Clu switch (message.operation()) { case CHAIN_REPLICATION_OP: + LOGGER.debug("Chain Replication message for msgId {} & client Id {}", message.getId(), message.getClientId()); ChainReplicationMessage retirementMessage = (ChainReplicationMessage)message; ServerStoreImpl cacheStore = ehcacheStateService.getStore(retirementMessage.getCacheId()); if (cacheStore == null) { @@ -121,6 +122,7 @@ private void invokeRetirementMessages(ClientIDTrackerMessage message) throws Clu ehcacheStateService.getClientMessageTracker().applied(message.getId(), message.getClientId()); break; case CLIENTID_TRACK_OP: + LOGGER.debug("ClientIDTrackerMessage message for msgId {} & client Id {}", message.getId(), message.getClientId()); ehcacheStateService.getClientMessageTracker().add(message.getClientId()); break; default: @@ -138,11 +140,12 @@ private void invokeServerStoreOperation(ServerStoreOpMessage message) throws Clu switch (message.operation()) { case APPEND: case GET_AND_APPEND: { + LOGGER.debug("ServerStore append/getAndAppend message for msgId {} & client Id {}", message.getId(), message.getClientId()); ehcacheStateService.getClientMessageTracker().track(message.getId(), message.getClientId()); break; } case REPLACE: { - ServerStoreOpMessage.ReplaceAtHeadMessage replaceAtHeadMessage = (ServerStoreOpMessage.ReplaceAtHeadMessage) message; + ServerStoreOpMessage.ReplaceAtHeadMessage replaceAtHeadMessage = (ServerStoreOpMessage.ReplaceAtHeadMessage)message; cacheStore.replaceAtHead(replaceAtHeadMessage.getKey(), replaceAtHeadMessage.getExpect(), replaceAtHeadMessage.getUpdate()); break; } @@ -224,7 +227,6 @@ private void createServerStore(CreateServerStore createServerStore) throws Clust ServerStoreConfiguration storeConfiguration = createServerStore.getStoreConfiguration(); ehcacheStateService.createStore(name, storeConfiguration); - } private void destroyServerStore(DestroyServerStore destroyServerStore) throws ClusterException { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java index b7ecbe8a67..3185436974 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java @@ -53,4 +53,5 @@ public interface EhcacheStateService { StateRepositoryManager getStateRepositoryManager() throws ClusterException; ClientMessageTracker getClientMessageTracker(); + } From 988ae1ac7592387c57f7d0e2aceb447600a8c6ab Mon Sep 17 00:00:00 2001 From: Abhilash Date: Fri, 7 Oct 2016 15:16:29 +0530 Subject: [PATCH 083/218] Inflight invalidations for strong caches #1211 --- .../store/StrongServerStoreProxy.java | 5 +- .../internal/messages/ReconnectData.java | 19 +- .../internal/messages/ReconnectDataCodec.java | 26 +- .../messages/ReconnectDataCodecTest.java | 2 - clustered/integration-test/build.gradle | 7 + ...asicClusteredCacheCRUDReplicationTest.java | 183 ------------ ...dCacheOpsReplicationMultiThreadedTest.java | 263 ++++++++++++++++++ ...BasicClusteredCacheOpsReplicationTest.java | 212 ++++++++++++++ ...OpsReplicationWithMulitpleClientsTest.java | 210 ++++++++++++++ .../clustered/server/EhcacheActiveEntity.java | 106 +++++-- .../server/EhcachePassiveEntity.java | 2 +- 11 files changed, 818 insertions(+), 217 deletions(-) delete mode 100644 clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheCRUDReplicationTest.java create mode 100644 clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java create mode 100644 clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java create mode 100644 clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMulitpleClientsTest.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java index baaea69b22..7b1056033e 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java @@ -44,7 +44,7 @@ public class StrongServerStoreProxy implements ServerStoreProxy { private final ServerStoreProxy delegate; private final ConcurrentMap hashInvalidationsInProgress = new ConcurrentHashMap(); private final Lock invalidateAllLock = new ReentrantLock(); - private CountDownLatch invalidateAllLatch; + private volatile CountDownLatch invalidateAllLatch; private final List invalidationListeners = new CopyOnWriteArrayList(); private final EhcacheClientEntity entity; @@ -56,6 +56,9 @@ public StrongServerStoreProxy(final ServerStoreMessageFactory messageFactory, fi public void onHandleReconnect(ReconnectData reconnectData) { Set inflightInvalidations = new HashSet(hashInvalidationsInProgress.keySet()); reconnectData.addInvalidationsInProgress(delegate.getCacheId(), inflightInvalidations); + if (invalidateAllLatch != null) { + reconnectData.addClearInProgress(delegate.getCacheId()); + } } }); entity.addResponseListener(EhcacheEntityResponse.HashInvalidationDone.class, new EhcacheClientEntity.ResponseListener() { diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectData.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectData.java index cd46dd71e5..b0061b5f92 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectData.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectData.java @@ -17,8 +17,7 @@ package org.ehcache.clustered.common.internal.messages; import java.util.Collections; -import java.util.HashMap; -import java.util.Map; +import java.util.HashSet; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; @@ -29,11 +28,13 @@ public class ReconnectData { private static final byte CLIENT_ID_SIZE = 16; private static final byte ENTRY_SIZE = 4; private static final byte HASH_SIZE = 8; + private static final byte CLEAR_IN_PROGRESS_STATUS_SIZE = 1; private volatile UUID clientId; private final Set reconnectData = Collections.newSetFromMap(new ConcurrentHashMap()); private final AtomicInteger reconnectDatalen = new AtomicInteger(CLIENT_ID_SIZE); private final ConcurrentHashMap> hashInvalidationsInProgressPerCache = new ConcurrentHashMap>(); + private final Set cachesWithClearInProgress = Collections.newSetFromMap(new ConcurrentHashMap()); public UUID getClientId() { if (clientId == null) { @@ -48,13 +49,13 @@ public void setClientId(UUID clientId) { public void add(String name) { reconnectData.add(name); - reconnectDatalen.addAndGet(2 * name.length() + 2 * ENTRY_SIZE); + reconnectDatalen.addAndGet(2 * name.length() + 2 * ENTRY_SIZE + CLEAR_IN_PROGRESS_STATUS_SIZE); } public void remove(String name) { if (!reconnectData.contains(name)) { reconnectData.remove(name); - reconnectDatalen.addAndGet(-(2 * name.length() + 2 * ENTRY_SIZE)); + reconnectDatalen.addAndGet(-(2 * name.length() + 2 * ENTRY_SIZE + CLEAR_IN_PROGRESS_STATUS_SIZE)); } } @@ -80,4 +81,14 @@ public Set removeInvalidationsInProgress(String cacheId) { return Collections.EMPTY_SET; } + public void addClearInProgress(String cacheId) { + cachesWithClearInProgress.add(cacheId); + } + + public Set getClearInProgressCaches() { + Set caches = new HashSet(cachesWithClearInProgress); + cachesWithClearInProgress.clear(); + return caches; + } + } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodec.java index 7a27b712a3..9822128b5a 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodec.java @@ -20,6 +20,8 @@ import org.ehcache.clustered.common.internal.ClusteredEhcacheIdentity; import java.nio.ByteBuffer; +import java.util.HashSet; +import java.util.Set; import java.util.UUID; public class ReconnectDataCodec { @@ -27,9 +29,20 @@ public class ReconnectDataCodec { public byte[] encode(ReconnectData reconnectData) { ByteBuffer encodedMsg = ByteBuffer.allocate(reconnectData.getDataLength()); encodedMsg.put(ClusteredEhcacheIdentity.serialize(reconnectData.getClientId())); + Set clearInProgress = reconnectData.getClearInProgressCaches(); for (String cacheId : reconnectData.getAllCaches()) { encodedMsg.putInt(cacheId.length()); CodecUtil.putStringAsCharArray(encodedMsg, cacheId); + if (clearInProgress.contains(cacheId)) { + encodedMsg.put((byte)1); + } else { + encodedMsg.put((byte)0); + } + Set hashToInvalidate = reconnectData.removeInvalidationsInProgress(cacheId); + encodedMsg.putInt(hashToInvalidate.size()); + for (Long hash : hashToInvalidate) { + encodedMsg.putLong(hash); + } } return encodedMsg.array(); @@ -44,7 +57,18 @@ public ReconnectData decode(byte[] payload) { while (byteBuffer.hasRemaining()) { int cacheIdSize = byteBuffer.getInt(); - reconnectData.add(CodecUtil.getStringFromBuffer(byteBuffer, cacheIdSize)); + String cacheId = CodecUtil.getStringFromBuffer(byteBuffer, cacheIdSize); + reconnectData.add(cacheId); + byte clearInProgress = byteBuffer.get(); + if (clearInProgress == 1) { + reconnectData.addClearInProgress(cacheId); + } + Set hashToInvalidate = new HashSet(); + int numOfHash = byteBuffer.getInt(); + for (int i = 0; i < numOfHash; i++) { + hashToInvalidate.add(byteBuffer.getLong()); + } + reconnectData.addInvalidationsInProgress(cacheId, hashToInvalidate); } return reconnectData; } diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodecTest.java index ac4bd5383b..cbbf23733e 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodecTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodecTest.java @@ -16,7 +16,6 @@ package org.ehcache.clustered.common.internal.messages; -import org.hamcrest.Matcher; import org.junit.Test; import java.util.Collections; @@ -25,7 +24,6 @@ import java.util.UUID; import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.junit.Assert.assertThat; diff --git a/clustered/integration-test/build.gradle b/clustered/integration-test/build.gradle index 2501d2d035..4037c76dfd 100644 --- a/clustered/integration-test/build.gradle +++ b/clustered/integration-test/build.gradle @@ -67,6 +67,13 @@ compileTestJava { options.forkOptions.executable = Jvm.current().javacExecutable } +sourceCompatibility = 1.8 +targetCompatibility = 1.8 + +checkstyle { + toolVersion = '5.9' +} + test { dependsOn unzipKit executable = Jvm.current().javaExecutable diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheCRUDReplicationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheCRUDReplicationTest.java deleted file mode 100644 index 93281289c4..0000000000 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheCRUDReplicationTest.java +++ /dev/null @@ -1,183 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.replication; - -import org.ehcache.Cache; -import org.ehcache.PersistentCacheManager; -import org.ehcache.clustered.client.config.ClusteredStoreConfiguration; -import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; -import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; -import org.ehcache.clustered.common.Consistency; -import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.CacheManagerBuilder; -import org.ehcache.config.builders.ResourcePoolsBuilder; -import org.ehcache.config.units.EntryUnit; -import org.ehcache.config.units.MemoryUnit; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.terracotta.testing.rules.BasicExternalCluster; -import org.terracotta.testing.rules.Cluster; - -import java.io.File; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; - -import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; -import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; -import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; - -public class BasicClusteredCacheCRUDReplicationTest { - - private static final String RESOURCE_CONFIG = - "" - + "" - + "16" - + "" + - "\n"; - - @ClassRule - public static Cluster CLUSTER = - new BasicExternalCluster(new File("build/cluster"), 2, Collections.emptyList(), "", RESOURCE_CONFIG, ""); - - @Before - public void startServers() throws Exception { - CLUSTER.getClusterControl().waitForActive(); - CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); - } - - @After - public void tearDown() throws Exception { - CLUSTER.getClusterControl().terminateActive(); - CLUSTER.getClusterControl().startAllServers(); - } - - @Test - public void testCRUD() throws Exception { - final CacheManagerBuilder clusteredCacheManagerBuilder - = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/crud-cm-replication")) - .autoCreate() - .defaultServerResource("primary-server-resource")); - final PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true); - - try { - CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder().heap(100, EntryUnit.ENTRIES) - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB))).build(); - - Cache cache = cacheManager.createCache("clustered-cache", config); - cache.put(1L, "The one"); - cache.put(2L, "The two"); - cache.put(1L, "Another one"); - cache.put(3L, "The three"); - cache.put(4L, "The four"); - assertThat(cache.get(1L), equalTo("Another one")); - assertThat(cache.get(2L), equalTo("The two")); - assertThat(cache.get(3L), equalTo("The three")); - cache.remove(4L); - - CLUSTER.getClusterControl().terminateActive(); - - assertThat(cache.get(1L), equalTo("Another one")); - assertThat(cache.get(2L), equalTo("The two")); - assertThat(cache.get(3L), equalTo("The three")); - assertThat(cache.get(4L), nullValue()); - - } finally { - cacheManager.close(); - } - } - - @Test - public void testBulkOps() throws Exception { - final CacheManagerBuilder clusteredCacheManagerBuilder = - newCacheManagerBuilder() - .with(cluster(CLUSTER.getConnectionURI().resolve("/bulk-cm-replication")).autoCreate()) - .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder().heap(100, EntryUnit.ENTRIES) - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB))) - .add(new ClusteredStoreConfiguration(Consistency.STRONG))); - - final PersistentCacheManager cacheManager1 = clusteredCacheManagerBuilder.build(true); - try { - final Cache cache1 = cacheManager1.getCache("clustered-cache", Long.class, String.class); - - Map entriesMap = new HashMap(); - entriesMap.put(1L, "one"); - entriesMap.put(2L, "two"); - entriesMap.put(3L, "three"); - entriesMap.put(4L, "four"); - entriesMap.put(5L, "five"); - entriesMap.put(6L, "six"); - cache1.putAll(entriesMap); - - CLUSTER.getClusterControl().terminateActive(); - - Set keySet = entriesMap.keySet(); - Map all = cache1.getAll(keySet); - assertThat(all.get(1L), is("one")); - assertThat(all.get(2L), is("two")); - assertThat(all.get(3L), is("three")); - assertThat(all.get(4L), is("four")); - assertThat(all.get(5L), is("five")); - assertThat(all.get(6L), is("six")); - - } finally { - cacheManager1.close(); - } - } - - @Test - public void testCAS() throws Exception { - final CacheManagerBuilder clusteredCacheManagerBuilder = - newCacheManagerBuilder() - .with(cluster(CLUSTER.getConnectionURI().resolve("/cas-cm-replication")).autoCreate()) - .withCache("clustered-cache", newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder().heap(100, EntryUnit.ENTRIES) - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 8, MemoryUnit.MB))) - .add(new ClusteredStoreConfiguration(Consistency.STRONG))); - - final PersistentCacheManager cacheManager1 = clusteredCacheManagerBuilder.build(true); - try { - final Cache cache1 = cacheManager1.getCache("clustered-cache", Long.class, String.class); - - assertThat(cache1.putIfAbsent(1L, "one"), nullValue()); - assertThat(cache1.putIfAbsent(2L, "two"), nullValue()); - assertThat(cache1.putIfAbsent(3L, "three"), nullValue()); - assertThat(cache1.replace(3L, "another one", "yet another one"), is(false)); - - CLUSTER.getClusterControl().terminateActive(); - - assertThat(cache1.putIfAbsent(1L, "another one"), is("one")); - assertThat(cache1.remove(2L, "not two"), is(false)); - assertThat(cache1.replace(3L, "three", "another three"), is(true)); - assertThat(cache1.replace(2L, "new two"), is("two")); - } finally { - cacheManager1.close(); - } - } - -} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java new file mode 100644 index 0000000000..708e1646c9 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java @@ -0,0 +1,263 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.replication; + +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteredStoreConfigurationBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.terracotta.testing.rules.BasicExternalCluster; +import org.terracotta.testing.rules.Cluster; + +import java.io.File; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.assertThat; + +/** + * This test asserts Active-Passive fail-over with + * multi-threaded/multi-client scenarios. + * Note that fail-over is happening while client threads are still writing + * Finally the same key set correctness is asserted. + */ +public class BasicClusteredCacheOpsReplicationMultiThreadedTest { + + private static final int NUM_OF_THREADS = 10; + private static final int JOB_SIZE = 100; + private static final String RESOURCE_CONFIG = + "" + + "" + + "16" + + "" + + "\n"; + + private static CacheManager CACHE_MANAGER1; + private static CacheManager CACHE_MANAGER2; + private static Cache CACHE1; + private static Cache CACHE2; + + @ClassRule + public static Cluster CLUSTER = + new BasicExternalCluster(new File("build/cluster"), 2, Collections.emptyList(), "", RESOURCE_CONFIG, ""); + + @Before + public void startServers() throws Exception { + CLUSTER.getClusterControl().waitForActive(); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + final CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/crud-cm-replication")) + .autoCreate() + .defaultServerResource("primary-server-resource")); + CACHE_MANAGER1 = clusteredCacheManagerBuilder.build(true); + CACHE_MANAGER2 = clusteredCacheManagerBuilder.build(true); + CacheConfiguration config = CacheConfigurationBuilder + .newCacheConfigurationBuilder(Long.class, BlobValue.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(500, EntryUnit.ENTRIES) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))) + .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG)) + .build(); + + CACHE1 = CACHE_MANAGER1.createCache("clustered-cache", config); + CACHE2 = CACHE_MANAGER2.createCache("clustered-cache", config); + } + + @After + public void tearDown() throws Exception { + CACHE_MANAGER1.close(); + CACHE_MANAGER2.close(); + CLUSTER.getClusterControl().terminateActive(); + CLUSTER.getClusterControl().startAllServers(); + } + + @Test + public void testCRUD() throws Exception { + List> caches = new ArrayList<>(); + caches.add(CACHE1); + caches.add(CACHE2); + Random random = new Random(); + Set universalSet = Collections.newSetFromMap(new ConcurrentHashMap<>()); + + ExecutorService executorService = Executors.newWorkStealingPool(NUM_OF_THREADS); + + List futures = new ArrayList<>(); + + caches.forEach(cache -> { + for (int i = 0; i < NUM_OF_THREADS; i++) { + futures.add(executorService.submit(() -> random.longs().limit(JOB_SIZE).forEach(x -> { + cache.put(x, new BlobValue()); + universalSet.add(x); + }))); + } + }); + + //This step is to add values in local tier randomly to test invalidations happen correctly + futures.add(executorService.submit(() -> universalSet.forEach(x -> { + CACHE1.get(x); + CACHE2.get(x); + }))); + + CLUSTER.getClusterControl().terminateActive(); + + for (Future f : futures ) { + f.get(); + } + + Set readKeysByCache1AfterFailOver = new HashSet<>(); + Set readKeysByCache2AfterFailOver = new HashSet<>(); + universalSet.forEach(x -> { + if (CACHE1.get(x) != null) { + readKeysByCache1AfterFailOver.add(x); + } + if (CACHE2.get(x) != null) { + readKeysByCache2AfterFailOver.add(x); + } + }); + + assertThat(readKeysByCache2AfterFailOver.size(), equalTo(readKeysByCache1AfterFailOver.size())); + + readKeysByCache2AfterFailOver.stream().forEach(y -> assertThat(readKeysByCache1AfterFailOver.contains(y), is(true))); + + } + + @Test + public void testBulkOps() throws Exception { + List> caches = new ArrayList<>(); + caches.add(CACHE1); + caches.add(CACHE2); + Random random = new Random(); + Set universalSet = Collections.newSetFromMap(new ConcurrentHashMap<>()); + + ExecutorService executorService = Executors.newWorkStealingPool(NUM_OF_THREADS); + + List futures = new ArrayList<>(); + + caches.forEach(cache -> { + for (int i = 0; i < NUM_OF_THREADS; i++) { + Map map = random.longs().limit(JOB_SIZE).collect(HashMap::new, (hashMap, x) -> hashMap.put(x, new BlobValue()), HashMap::putAll); + futures.add(executorService.submit(() -> { + cache.putAll(map); + universalSet.addAll(map.keySet()); + })); + } + }); + + //This step is to add values in local tier randomly to test invalidations happen correctly + futures.add(executorService.submit(() -> { + universalSet.forEach(x -> { + CACHE1.get(x); + CACHE2.get(x); + }); + })); + + CLUSTER.getClusterControl().terminateActive(); + + for (Future f : futures ) { + f.get(); + } + + Set readKeysByCache1AfterFailOver = new HashSet<>(); + Set readKeysByCache2AfterFailOver = new HashSet<>(); + universalSet.forEach(x -> { + if (CACHE1.get(x) != null) { + readKeysByCache1AfterFailOver.add(x); + } + if (CACHE2.get(x) != null) { + readKeysByCache2AfterFailOver.add(x); + } + }); + + assertThat(readKeysByCache2AfterFailOver.size(), equalTo(readKeysByCache1AfterFailOver.size())); + + readKeysByCache2AfterFailOver.stream().forEach(y -> assertThat(readKeysByCache1AfterFailOver.contains(y), is(true))); + + } + + @Test + public void testClear() throws Exception { + List> caches = new ArrayList<>(); + caches.add(CACHE1); + caches.add(CACHE2); + Random random = new Random(); + Set universalSet = Collections.newSetFromMap(new ConcurrentHashMap<>()); + + ExecutorService executorService = Executors.newWorkStealingPool(NUM_OF_THREADS); + + List futures = new ArrayList<>(); + + caches.forEach(cache -> { + for (int i = 0; i < NUM_OF_THREADS; i++) { + Map map = random.longs().limit(JOB_SIZE).collect(HashMap::new, (hashMap, x) -> hashMap.put(x, new BlobValue()), HashMap::putAll); + futures.add(executorService.submit(() -> { + cache.putAll(map); + universalSet.addAll(map.keySet()); + })); + } + }); + + for (Future f : futures ) { + f.get(); + } + + universalSet.forEach(x -> { + CACHE1.get(x); + CACHE2.get(x); + }); + + Future clearFuture = executorService.submit(() -> CACHE1.clear()); + + CLUSTER.getClusterControl().terminateActive(); + + clearFuture.get(); + + universalSet.forEach(x -> assertThat(CACHE2.get(x), nullValue())); + + } + + private static class BlobValue implements Serializable { + private final byte[] data = new byte[10 * 1024]; + } + +} \ No newline at end of file diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java new file mode 100644 index 0000000000..b6d49ac753 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java @@ -0,0 +1,212 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.replication; + +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteredStoreConfigurationBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.terracotta.testing.rules.BasicExternalCluster; +import org.terracotta.testing.rules.Cluster; + +import java.io.File; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.assertThat; + +public class BasicClusteredCacheOpsReplicationTest { + + private static final String RESOURCE_CONFIG = + "" + + "" + + "16" + + "" + + "\n"; + + private static CacheManager CACHE_MANAGER; + private static Cache CACHE1; + private static Cache CACHE2; + + @ClassRule + public static Cluster CLUSTER = + new BasicExternalCluster(new File("build/cluster"), 2, Collections.emptyList(), "", RESOURCE_CONFIG, ""); + + @Before + public void startServers() throws Exception { + CLUSTER.getClusterControl().waitForActive(); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + final CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/cm-replication")) + .autoCreate() + .defaultServerResource("primary-server-resource")); + CACHE_MANAGER = clusteredCacheManagerBuilder.build(true); + CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(100, EntryUnit.ENTRIES) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))) + .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG)) + .build(); + + CACHE1 = CACHE_MANAGER.createCache("clustered-cache", config); + CACHE2 = CACHE_MANAGER.createCache("another-cache", config); + } + + @After + public void tearDown() throws Exception { + CACHE_MANAGER.close(); + CLUSTER.getClusterControl().terminateActive(); + CLUSTER.getClusterControl().startAllServers(); + } + + @Test + public void testCRUD() throws Exception { + List> caches = new ArrayList<>(); + caches.add(CACHE1); + caches.add(CACHE2); + caches.forEach(x -> { + x.put(1L, "The one"); + x.put(2L, "The two"); + x.put(1L, "Another one"); + x.put(3L, "The three"); + x.put(4L, "The four"); + assertThat(x.get(1L), equalTo("Another one")); + assertThat(x.get(2L), equalTo("The two")); + assertThat(x.get(3L), equalTo("The three")); + x.remove(4L); + }); + + CLUSTER.getClusterControl().terminateActive(); + + caches.forEach(x -> { + assertThat(x.get(1L), equalTo("Another one")); + assertThat(x.get(2L), equalTo("The two")); + assertThat(x.get(3L), equalTo("The three")); + assertThat(x.get(4L), nullValue()); + }); + } + + @Test + public void testBulkOps() throws Exception { + List> caches = new ArrayList<>(); + caches.add(CACHE1); + caches.add(CACHE2); + + Map entriesMap = new HashMap<>(); + entriesMap.put(1L, "one"); + entriesMap.put(2L, "two"); + entriesMap.put(3L, "three"); + entriesMap.put(4L, "four"); + entriesMap.put(5L, "five"); + entriesMap.put(6L, "six"); + caches.forEach(cache -> cache.putAll(entriesMap)); + + CLUSTER.getClusterControl().terminateActive(); + + Set keySet = entriesMap.keySet(); + caches.forEach(cache -> { + Map all = cache.getAll(keySet); + assertThat(all.get(1L), is("one")); + assertThat(all.get(2L), is("two")); + assertThat(all.get(3L), is("three")); + assertThat(all.get(4L), is("four")); + assertThat(all.get(5L), is("five")); + assertThat(all.get(6L), is("six")); + }); + + } + + @Test + public void testCAS() throws Exception { + List> caches = new ArrayList<>(); + caches.add(CACHE1); + caches.add(CACHE2); + caches.forEach(cache -> { + assertThat(cache.putIfAbsent(1L, "one"), nullValue()); + assertThat(cache.putIfAbsent(2L, "two"), nullValue()); + assertThat(cache.putIfAbsent(3L, "three"), nullValue()); + assertThat(cache.replace(3L, "another one", "yet another one"), is(false)); + }); + + CLUSTER.getClusterControl().terminateActive(); + + caches.forEach(cache -> { + assertThat(cache.putIfAbsent(1L, "another one"), is("one")); + assertThat(cache.remove(2L, "not two"), is(false)); + assertThat(cache.replace(3L, "three", "another three"), is(true)); + assertThat(cache.replace(2L, "new two"), is("two")); + }); + } + + @Test + public void testClear() throws Exception { + + List> caches = new ArrayList<>(); + caches.add(CACHE1); + caches.add(CACHE2); + + Map entriesMap = new HashMap<>(); + entriesMap.put(1L, "one"); + entriesMap.put(2L, "two"); + entriesMap.put(3L, "three"); + entriesMap.put(4L, "four"); + entriesMap.put(5L, "five"); + entriesMap.put(6L, "six"); + caches.forEach(cache -> cache.putAll(entriesMap)); + + Set keySet = entriesMap.keySet(); + caches.forEach(cache -> { + Map all = cache.getAll(keySet); + assertThat(all.get(1L), is("one")); + assertThat(all.get(2L), is("two")); + assertThat(all.get(3L), is("three")); + assertThat(all.get(4L), is("four")); + assertThat(all.get(5L), is("five")); + assertThat(all.get(6L), is("six")); + }); + + CACHE1.clear(); + CACHE2.clear(); + + CLUSTER.getClusterControl().terminateActive(); + + keySet.forEach(x -> assertThat(CACHE1.get(x), nullValue())); + keySet.forEach(x -> assertThat(CACHE2.get(x), nullValue())); + + } + +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMulitpleClientsTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMulitpleClientsTest.java new file mode 100644 index 0000000000..4a53cddbba --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMulitpleClientsTest.java @@ -0,0 +1,210 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.replication; + +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteredStoreConfigurationBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.terracotta.testing.rules.BasicExternalCluster; +import org.terracotta.testing.rules.Cluster; + +import java.io.File; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import java.util.stream.LongStream; + +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.assertThat; + +/** + * The point of this test is to assert proper data read after fail-over handling. + */ +public class BasicClusteredCacheOpsReplicationWithMulitpleClientsTest { + + private static final String RESOURCE_CONFIG = + "" + + "" + + "16" + + "" + + "\n"; + + private static CacheManager CACHE_MANAGER1; + private static CacheManager CACHE_MANAGER2; + private static Cache CACHE1; + private static Cache CACHE2; + + @ClassRule + public static Cluster CLUSTER = + new BasicExternalCluster(new File("build/cluster"), 2, Collections.emptyList(), "", RESOURCE_CONFIG, ""); + + @Before + public void startServers() throws Exception { + CLUSTER.getClusterControl().waitForActive(); + CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); + final CacheManagerBuilder clusteredCacheManagerBuilder + = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/crud-cm-replication")) + .autoCreate() + .defaultServerResource("primary-server-resource")); + CACHE_MANAGER1 = clusteredCacheManagerBuilder.build(true); + CACHE_MANAGER2 = clusteredCacheManagerBuilder.build(true); + CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, BlobValue.class, + ResourcePoolsBuilder.newResourcePoolsBuilder().heap(500, EntryUnit.ENTRIES) + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))) + .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG)) + .build(); + + CACHE1 = CACHE_MANAGER1.createCache("clustered-cache", config); + CACHE2 = CACHE_MANAGER2.createCache("clustered-cache", config); + } + + @After + public void tearDown() throws Exception { + CACHE_MANAGER1.close(); + CACHE_MANAGER2.close(); + CLUSTER.getClusterControl().terminateActive(); + CLUSTER.getClusterControl().startAllServers(); + } + + @Test + public void testCRUD() throws Exception { + Random random = new Random(); + LongStream longStream = random.longs(1000); + Set added = new HashSet<>(); + longStream.forEach(x -> { + CACHE1.put(x, new BlobValue()); + added.add(x); + }); + + Set readKeysByCache2BeforeFailOver = new HashSet<>(); + added.forEach(x -> { + if (CACHE2.get(x) != null) { + readKeysByCache2BeforeFailOver.add(x); + } + }); + + CLUSTER.getClusterControl().terminateActive(); + + Set readKeysByCache1AfterFailOver = new HashSet<>(); + added.forEach(x -> { + if (CACHE1.get(x) != null) { + readKeysByCache1AfterFailOver.add(x); + } + }); + + assertThat(readKeysByCache2BeforeFailOver.size(), greaterThanOrEqualTo(readKeysByCache1AfterFailOver.size())); + + readKeysByCache1AfterFailOver.stream().filter(readKeysByCache2BeforeFailOver::contains).forEach(y -> assertThat(CACHE2.get(y), notNullValue())); + + } + + @Test + public void testBulkOps() throws Exception { + List> caches = new ArrayList<>(); + caches.add(CACHE1); + caches.add(CACHE2); + + Map entriesMap = new HashMap<>(); + + Random random = new Random(); + LongStream longStream = random.longs(1000); + + longStream.forEach(x -> entriesMap.put(x, new BlobValue())); + caches.forEach(cache -> cache.putAll(entriesMap)); + + Set keySet = entriesMap.keySet(); + + Set readKeysByCache2BeforeFailOver = new HashSet<>(); + keySet.forEach(x -> { + if (CACHE2.get(x) != null) { + readKeysByCache2BeforeFailOver.add(x); + } + }); + + CLUSTER.getClusterControl().terminateActive(); + + Set readKeysByCache1AfterFailOver = new HashSet<>(); + keySet.forEach(x -> { + if (CACHE1.get(x) != null) { + readKeysByCache1AfterFailOver.add(x); + } + }); + + assertThat(readKeysByCache2BeforeFailOver.size(), greaterThanOrEqualTo(readKeysByCache1AfterFailOver.size())); + + readKeysByCache1AfterFailOver.stream().filter(readKeysByCache2BeforeFailOver::contains).forEach(y -> assertThat(CACHE2.get(y), notNullValue())); + + } + + @Test + public void testClear() throws Exception { + List> caches = new ArrayList<>(); + caches.add(CACHE1); + caches.add(CACHE2); + + Map entriesMap = new HashMap<>(); + + Random random = new Random(); + LongStream longStream = random.longs(1000); + + longStream.forEach(x -> entriesMap.put(x, new BlobValue())); + caches.forEach(cache -> cache.putAll(entriesMap)); + + Set keySet = entriesMap.keySet(); + + Set readKeysByCache2BeforeFailOver = new HashSet<>(); + keySet.forEach(x -> { + if (CACHE2.get(x) != null) { + readKeysByCache2BeforeFailOver.add(x); + } + }); + + CACHE1.clear(); + + CLUSTER.getClusterControl().terminateActive(); + + readKeysByCache2BeforeFailOver.forEach(x -> assertThat(CACHE2.get(x), nullValue())); + + } + + private static class BlobValue implements Serializable { + private final byte[] data = new byte[10 * 1024]; + } +} \ No newline at end of file diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index fa4392ba89..fc9f2b4338 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -15,10 +15,12 @@ */ package org.ehcache.clustered.server; +import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; +import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; @@ -101,21 +103,22 @@ class EhcacheActiveEntity implements ActiveServerEntity clientStateMap = new HashMap(); + private final Map clientStateMap = new HashMap<>(); private final ConcurrentHashMap> storeClientMap = - new ConcurrentHashMap>(); + new ConcurrentHashMap<>(); private final ConcurrentHashMap clientIdMap = new ConcurrentHashMap<>(); private final Set trackedClients = Collections.newSetFromMap(new ConcurrentHashMap<>()); private final ReconnectDataCodec reconnectDataCodec = new ReconnectDataCodec(); private final ServerStoreCompatibility storeCompatibility = new ServerStoreCompatibility(); private final EhcacheEntityResponseFactory responseFactory; - private final ConcurrentMap clientsWaitingForInvalidation = new ConcurrentHashMap(); + private final ConcurrentMap clientsWaitingForInvalidation = new ConcurrentHashMap<>(); private final AtomicInteger invalidationIdGenerator = new AtomicInteger(); private final ClientCommunicator clientCommunicator; private final EhcacheStateService ehcacheStateService; private final IEntityMessenger entityMessenger; + private volatile ConcurrentHashMap> inflightInvalidations; static class InvalidationHolder { final ClientDescriptor clientDescriptorWaitingForInvalidation; @@ -182,7 +185,7 @@ public Class getServiceType() { */ // This method is intended for unit test use; modifications are likely needed for other (monitoring) purposes Map> getConnectedClients() { - final HashMap> clientMap = new HashMap>(); + final HashMap> clientMap = new HashMap<>(); for (Entry entry : clientStateMap.entrySet()) { clientMap.put(entry.getKey(), entry.getValue().getAttachedStores()); } @@ -197,9 +200,9 @@ Map> getConnectedClients() { */ // This method is intended for unit test use; modifications are likely needed for other (monitoring) purposes Map> getInUseStores() { - final HashMap> storeMap = new HashMap>(); + final HashMap> storeMap = new HashMap<>(); for (Map.Entry> entry : storeClientMap.entrySet()) { - storeMap.put(entry.getKey(), Collections.unmodifiableSet(new HashSet(entry.getValue()))); + storeMap.put(entry.getKey(), Collections.unmodifiableSet(new HashSet<>(entry.getValue()))); } return Collections.unmodifiableMap(storeMap); } @@ -279,6 +282,9 @@ public EhcacheEntityResponse invoke(ClientDescriptor clientDescriptor, EhcacheEn @Override public void handleReconnect(ClientDescriptor clientDescriptor, byte[] extendedReconnectData) { + if (inflightInvalidations == null) { + throw new AssertionError("Load existing was not invoked before handleReconnect"); + } ClientState clientState = this.clientStateMap.get(clientDescriptor); if (clientState == null) { throw new AssertionError("Client "+ clientDescriptor +" trying to reconnect is not connected to entity"); @@ -287,6 +293,7 @@ public void handleReconnect(ClientDescriptor clientDescriptor, byte[] extendedRe ReconnectData reconnectData = reconnectDataCodec.decode(extendedReconnectData); addClientId(clientDescriptor, reconnectData.getClientId()); Set cacheIds = reconnectData.getAllCaches(); + Set clearInProgressCaches = reconnectData.getClearInProgressCaches(); for (final String cacheId : cacheIds) { ServerStoreImpl serverStore = ehcacheStateService.getStore(cacheId); if (serverStore == null) { @@ -295,12 +302,20 @@ public void handleReconnect(ClientDescriptor clientDescriptor, byte[] extendedRe LOGGER.warn("ServerStore '{}' does not exist as expected by Client '{}'.", cacheId, clientDescriptor); continue; } - serverStore.setEvictionListener(new ServerStoreEvictionListener() { - @Override - public void onEviction(long key) { - invalidateHashAfterEviction(cacheId, key); - } - }); + if (serverStore.getStoreConfiguration().getConsistency().equals(Consistency.STRONG)) { + Set invalidationsInProgress = reconnectData.removeInvalidationsInProgress(cacheId); + LOGGER.debug("Number of Inflight Invalidations from client ID {} for cache {} is {}.", reconnectData.getClientId(), cacheId, invalidationsInProgress + .size()); + inflightInvalidations.compute(cacheId, (s, tuples) -> { + if (tuples == null) { + tuples = new ArrayList<>(); + } + tuples.add(new InvalidationTuple(clientDescriptor, invalidationsInProgress, clearInProgressCaches.contains(cacheId))); + return tuples; + }); + } + + serverStore.setEvictionListener(key -> invalidateHashAfterEviction(cacheId, key)); attachStore(clientDescriptor, cacheId); } LOGGER.info("Client '{}' successfully reconnected to newly promoted ACTIVE after failover.", clientDescriptor); @@ -341,7 +356,8 @@ public void createNew() { @Override public void loadExisting() { - //nothing to do + LOGGER.debug("Preparing for handling Inflight Invalidations and independent Passive Evictions in loadExisting"); + inflightInvalidations = new ConcurrentHashMap<>(); } private void validateClientConnected(ClientDescriptor clientDescriptor) throws ClusterException { @@ -405,6 +421,22 @@ private EhcacheEntityResponse invokeServerStoreOperation(ClientDescriptor client throw new LifecycleException("Client not attached to clustered tier '" + message.getCacheId() + "'"); } + // This logic totally counts on the fact that invokes will only happen + // after all handleReconnects are done, else this is flawed. + if (inflightInvalidations != null && inflightInvalidations.containsKey(message.getCacheId())) { + inflightInvalidations.computeIfPresent(message.getCacheId(), (cacheId, tuples) -> { + LOGGER.debug("Stalling all operations for cache {} for firing inflight invalidations again.", cacheId); + tuples.forEach(invalidationState -> { + if (invalidationState.isClearInProgress()) { + invalidateAll(invalidationState.getClientDescriptor(), cacheId); + } + invalidationState.getInvalidationsInProgress() + .forEach(hashInvalidationToBeResent -> invalidateHashForClient(invalidationState.getClientDescriptor(), cacheId, hashInvalidationToBeResent)); + }); + return null; + }); + } + switch (message.operation()) { case GET: { ServerStoreOpMessage.GetMessage getMessage = (ServerStoreOpMessage.GetMessage) message; @@ -412,7 +444,7 @@ private EhcacheEntityResponse invokeServerStoreOperation(ClientDescriptor client } case APPEND: { ServerStoreOpMessage.AppendMessage appendMessage = (ServerStoreOpMessage.AppendMessage)message; - cacheStore.getAndAppend(appendMessage.getKey(), appendMessage.getPayload()); + cacheStore.append(appendMessage.getKey(), appendMessage.getPayload()); sendMessageToSelfAndDeferRetirement(appendMessage, cacheStore.get(appendMessage.getKey())); invalidateHashForClient(clientDescriptor, appendMessage.getCacheId(), appendMessage.getKey()); return responseFactory.success(); @@ -536,6 +568,11 @@ private void invalidateAll(ClientDescriptor originatingClientDescriptor, String private void clientInvalidated(ClientDescriptor clientDescriptor, int invalidationId) { InvalidationHolder invalidationHolder = clientsWaitingForInvalidation.get(invalidationId); + if (invalidationHolder == null) { // Happens when client is re-sending/sending invalidations for which server has lost track since fail-over happened. + LOGGER.warn("Ignoring invalidation from client {} " + clientDescriptor); + return; + } + if (ehcacheStateService.getStore(invalidationHolder.cacheId).getStoreConfiguration().getConsistency() == Consistency.STRONG) { invalidationHolder.clientsHavingToInvalidate.remove(clientDescriptor); if (invalidationHolder.clientsHavingToInvalidate.isEmpty()) { @@ -671,12 +708,7 @@ private void createServerStore(ClientDescriptor clientDescriptor, CreateServerSt serverStore = ehcacheStateService.getStore(name); } - serverStore.setEvictionListener(new ServerStoreEvictionListener() { - @Override - public void onEviction(long key) { - invalidateHashAfterEviction(name, key); - } - }); + serverStore.setEvictionListener(key -> invalidateHashAfterEviction(name, key)); attachStore(clientDescriptor, name); } @@ -783,12 +815,12 @@ private void attachStore(ClientDescriptor clientDescriptor, String storeId) { Set clients = storeClientMap.get(storeId); Set newClients; if (clients == null) { - newClients = new HashSet(); + newClients = new HashSet<>(); newClients.add(clientDescriptor); updated = (storeClientMap.putIfAbsent(storeId, newClients) == null); } else if (!clients.contains(clientDescriptor)) { - newClients = new HashSet(clients); + newClients = new HashSet<>(clients); newClients.add(clientDescriptor); updated = storeClientMap.replace(storeId, clients, newClients); @@ -821,7 +853,7 @@ private boolean detachStore(ClientDescriptor clientDescriptor, String storeId) { Set clients = storeClientMap.get(storeId); if (clients != null && clients.contains(clientDescriptor)) { wasRegistered = true; - Set newClients = new HashSet(clients); + Set newClients = new HashSet<>(clients); newClients.remove(clientDescriptor); updated = storeClientMap.replace(storeId, clients, newClients); } else { @@ -852,7 +884,7 @@ private static class ClientState { /** * The set of stores to which the client has attached. */ - private final Set attachedStores = new HashSet(); + private final Set attachedStores = new HashSet<>(); boolean isAttached() { return attached; @@ -871,7 +903,31 @@ boolean removeStore(String storeName) { } Set getAttachedStores() { - return Collections.unmodifiableSet(new HashSet(this.attachedStores)); + return Collections.unmodifiableSet(new HashSet<>(this.attachedStores)); + } + } + + private static class InvalidationTuple { + private final ClientDescriptor clientDescriptor; + private final Set invalidationsInProgress; + private final boolean isClearInProgress; + + InvalidationTuple(ClientDescriptor clientDescriptor, Set invalidationsInProgress, boolean isClearInProgress) { + this.clientDescriptor = clientDescriptor; + this.invalidationsInProgress = invalidationsInProgress; + this.isClearInProgress = isClearInProgress; + } + + ClientDescriptor getClientDescriptor() { + return clientDescriptor; + } + + Set getInvalidationsInProgress() { + return invalidationsInProgress; + } + + boolean isClearInProgress() { + return isClearInProgress; } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index e676be94cc..bff6443fc1 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -140,7 +140,7 @@ private void invokeServerStoreOperation(ServerStoreOpMessage message) throws Clu switch (message.operation()) { case APPEND: case GET_AND_APPEND: { - LOGGER.debug("ServerStore append/getAndAppend message for msgId {} & client Id {}", message.getId(), message.getClientId()); + LOGGER.debug("ServerStore append/getAndAppend message for msgId {} & client Id {} is tracked now.", message.getId(), message.getClientId()); ehcacheStateService.getClientMessageTracker().track(message.getId(), message.getClientId()); break; } From ec3c5153166e558677c488b105d825f617b3fd64 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Thu, 13 Oct 2016 13:19:44 +0530 Subject: [PATCH 084/218] Active track invalidation for eventual cache #1211 --- .../clustered/server/EhcacheActiveEntity.java | 42 ++++++++----------- 1 file changed, 18 insertions(+), 24 deletions(-) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index fc9f2b4338..dc89536075 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -516,10 +516,8 @@ private void invalidateHashForClient(ClientDescriptor originatingClientDescripto clientsToInvalidate.remove(originatingClientDescriptor); InvalidationHolder invalidationHolder = null; - if (ehcacheStateService.getStore(cacheId).getStoreConfiguration().getConsistency() == Consistency.STRONG) { - invalidationHolder = new InvalidationHolder(originatingClientDescriptor, clientsToInvalidate, cacheId, key); - clientsWaitingForInvalidation.put(invalidationId, invalidationHolder); - } + invalidationHolder = new InvalidationHolder(originatingClientDescriptor, clientsToInvalidate, cacheId, key); + clientsWaitingForInvalidation.put(invalidationId, invalidationHolder); LOGGER.debug("SERVER: requesting {} client(s) invalidation of hash {} in cache {} (ID {})", clientsToInvalidate.size(), key, cacheId, invalidationId); for (ClientDescriptor clientDescriptorThatHasToInvalidate : clientsToInvalidate) { @@ -544,10 +542,8 @@ private void invalidateAll(ClientDescriptor originatingClientDescriptor, String clientsToInvalidate.remove(originatingClientDescriptor); InvalidationHolder invalidationHolder = null; - if (ehcacheStateService.getStore(cacheId).getStoreConfiguration().getConsistency() == Consistency.STRONG) { - invalidationHolder = new InvalidationHolder(originatingClientDescriptor, clientsToInvalidate, cacheId); - clientsWaitingForInvalidation.put(invalidationId, invalidationHolder); - } + invalidationHolder = new InvalidationHolder(originatingClientDescriptor, clientsToInvalidate, cacheId); + clientsWaitingForInvalidation.put(invalidationId, invalidationHolder); LOGGER.debug("SERVER: requesting {} client(s) invalidation of all in cache {} (ID {})", clientsToInvalidate.size(), cacheId, invalidationId); for (ClientDescriptor clientDescriptorThatHasToInvalidate : clientsToInvalidate) { @@ -573,23 +569,21 @@ private void clientInvalidated(ClientDescriptor clientDescriptor, int invalidati return; } - if (ehcacheStateService.getStore(invalidationHolder.cacheId).getStoreConfiguration().getConsistency() == Consistency.STRONG) { - invalidationHolder.clientsHavingToInvalidate.remove(clientDescriptor); - if (invalidationHolder.clientsHavingToInvalidate.isEmpty()) { - if (clientsWaitingForInvalidation.remove(invalidationId) != null) { - try { - Long key = invalidationHolder.key; - if (key == null) { - clientCommunicator.sendNoResponse(invalidationHolder.clientDescriptorWaitingForInvalidation, allInvalidationDone(invalidationHolder.cacheId)); - LOGGER.debug("SERVER: notifying originating client that all other clients invalidated all in cache {} from {} (ID {})", invalidationHolder.cacheId, clientDescriptor, invalidationId); - } else { - clientCommunicator.sendNoResponse(invalidationHolder.clientDescriptorWaitingForInvalidation, hashInvalidationDone(invalidationHolder.cacheId, key)); - LOGGER.debug("SERVER: notifying originating client that all other clients invalidated key {} in cache {} from {} (ID {})", key, invalidationHolder.cacheId, clientDescriptor, invalidationId); - } - } catch (MessageCodecException mce) { - //TODO: what should be done here? - LOGGER.error("Codec error", mce); + invalidationHolder.clientsHavingToInvalidate.remove(clientDescriptor); + if (invalidationHolder.clientsHavingToInvalidate.isEmpty()) { + if (clientsWaitingForInvalidation.remove(invalidationId) != null) { + try { + Long key = invalidationHolder.key; + if (key == null) { + clientCommunicator.sendNoResponse(invalidationHolder.clientDescriptorWaitingForInvalidation, allInvalidationDone(invalidationHolder.cacheId)); + LOGGER.debug("SERVER: notifying originating client that all other clients invalidated all in cache {} from {} (ID {})", invalidationHolder.cacheId, clientDescriptor, invalidationId); + } else { + clientCommunicator.sendNoResponse(invalidationHolder.clientDescriptorWaitingForInvalidation, hashInvalidationDone(invalidationHolder.cacheId, key)); + LOGGER.debug("SERVER: notifying originating client that all other clients invalidated key {} in cache {} from {} (ID {})", key, invalidationHolder.cacheId, clientDescriptor, invalidationId); } + } catch (MessageCodecException mce) { + //TODO: what should be done here? + LOGGER.error("Codec error", mce); } } } From 2af01dd7dc8fa3520f41a195751256a1f6d35474 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Tue, 25 Oct 2016 20:01:21 +0530 Subject: [PATCH 085/218] Eventual invalidation tracking at passive #1211 --- .../client/internal/EhcacheClientEntity.java | 16 ++- .../store/StrongServerStoreProxy.java | 3 +- .../internal/messages/EhcacheCodec.java | 12 +-- ...ge.java => PassiveReplicationMessage.java} | 99 +++++++++++++++---- ...va => PassiveReplicationMessageCodec.java} | 44 +++++++-- .../internal/messages/ReconnectData.java | 5 +- .../internal/messages/EhcacheCodecTest.java | 25 ++--- ...> PassiveReplicationMessageCodecTest.java} | 43 ++++++-- clustered/integration-test/build.gradle | 4 - ...dCacheOpsReplicationMultiThreadedTest.java | 22 ++++- ...BasicClusteredCacheOpsReplicationTest.java | 16 ++- ...OpsReplicationWithMulitpleClientsTest.java | 22 ++++- .../clustered/server/EhcacheActiveEntity.java | 47 +++++++-- .../server/EhcachePassiveEntity.java | 39 +++++++- .../server/EhcacheStateServiceImpl.java | 25 ++++- .../server/state/EhcacheStateService.java | 2 + .../server/state/InvalidationTracker.java | 43 ++++++++ .../server/EhcachePassiveEntityTest.java | 3 +- 18 files changed, 377 insertions(+), 93 deletions(-) rename clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/{ClientIDTrackerMessage.java => PassiveReplicationMessage.java} (55%) rename clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/{ClientIDTrackerMessageCodec.java => PassiveReplicationMessageCodec.java} (62%) rename clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/{ClientIDTrackerMessageCodecTest.java => PassiveReplicationMessageCodecTest.java} (50%) create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/state/InvalidationTracker.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java index 33f8f1430a..a601c0567b 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java @@ -47,6 +47,7 @@ import org.terracotta.entity.MessageCodecException; import org.terracotta.exception.EntityException; +import java.util.ArrayList; import java.util.EnumSet; import java.util.List; import java.util.Map; @@ -88,10 +89,11 @@ public interface ReconnectListener { private final LifeCycleMessageFactory messageFactory; private final Map, List>> responseListeners = new ConcurrentHashMap, List>>(); private final List disconnectionListeners = new CopyOnWriteArrayList(); - private final List reconnectListeners = new CopyOnWriteArrayList(); + private final List reconnectListeners = new ArrayList(); private final ReconnectDataCodec reconnectDataCodec = new ReconnectDataCodec(); private volatile boolean connected = true; private final ReconnectData reconnectData = new ReconnectData(); + private final Object lock = new Object(); private volatile UUID clientId; private Timeouts timeouts = Timeouts.builder().build(); @@ -109,10 +111,12 @@ public void handleMessage(EntityResponse messageFromServer) { @Override public byte[] createExtendedReconnectData() { - for (ReconnectListener reconnectListener : reconnectListeners) { - reconnectListener.onHandleReconnect(reconnectData); + synchronized (lock) { + for (ReconnectListener reconnectListener : reconnectListeners) { + reconnectListener.onHandleReconnect(reconnectData); + } + return reconnectDataCodec.encode(reconnectData); } - return reconnectDataCodec.encode(reconnectData); } @Override @@ -164,7 +168,9 @@ public void addDisconnectionListener(DisconnectionListener listener) { } public void addReconnectListener(ReconnectListener listener) { - reconnectListeners.add(listener); + synchronized (lock) { + reconnectListeners.add(listener); + } } public void addResponseListener(Class responseType, ResponseListener responseListener) { diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java index 7b1056033e..8dde6fb0a3 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java @@ -24,7 +24,6 @@ import org.slf4j.LoggerFactory; import java.nio.ByteBuffer; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -54,7 +53,7 @@ public StrongServerStoreProxy(final ServerStoreMessageFactory messageFactory, fi entity.addReconnectListener(new EhcacheClientEntity.ReconnectListener() { @Override public void onHandleReconnect(ReconnectData reconnectData) { - Set inflightInvalidations = new HashSet(hashInvalidationsInProgress.keySet()); + Set inflightInvalidations = hashInvalidationsInProgress.keySet(); reconnectData.addInvalidationsInProgress(delegate.getCacheId(), inflightInvalidations); if (invalidateAllLatch != null) { reconnectData.addClearInProgress(delegate.getCacheId()); diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java index c5d52fd90e..98e5d20e65 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java @@ -28,25 +28,25 @@ public class EhcacheCodec implements MessageCodec { private static final MessageCodec SERVER_INSTANCE = - new EhcacheCodec(new ServerStoreOpCodec(), new LifeCycleMessageCodec(), new StateRepositoryOpCodec(), new ResponseCodec(), new ClientIDTrackerMessageCodec()); + new EhcacheCodec(new ServerStoreOpCodec(), new LifeCycleMessageCodec(), new StateRepositoryOpCodec(), new ResponseCodec(), new PassiveReplicationMessageCodec()); private final ServerStoreOpCodec serverStoreOpCodec; private final LifeCycleMessageCodec lifeCycleMessageCodec; private final StateRepositoryOpCodec stateRepositoryOpCodec; private final ResponseCodec responseCodec; - private final ClientIDTrackerMessageCodec clientIDTrackerMessageCodec; + private final PassiveReplicationMessageCodec passiveReplicationMessageCodec; public static MessageCodec messageCodec() { return SERVER_INSTANCE; } EhcacheCodec(ServerStoreOpCodec serverStoreOpCodec, LifeCycleMessageCodec lifeCycleMessageCodec, - StateRepositoryOpCodec stateRepositoryOpCodec, ResponseCodec responseCodec, ClientIDTrackerMessageCodec clientIDTrackerMessageCodec) { + StateRepositoryOpCodec stateRepositoryOpCodec, ResponseCodec responseCodec, PassiveReplicationMessageCodec passiveReplicationMessageCodec) { this.serverStoreOpCodec = serverStoreOpCodec; this.lifeCycleMessageCodec = lifeCycleMessageCodec; this.stateRepositoryOpCodec = stateRepositoryOpCodec; this.responseCodec = responseCodec; - this.clientIDTrackerMessageCodec = clientIDTrackerMessageCodec; + this.passiveReplicationMessageCodec = passiveReplicationMessageCodec; } @Override @@ -59,7 +59,7 @@ public byte[] encodeMessage(EhcacheEntityMessage message) { case STATE_REPO_OP: return stateRepositoryOpCodec.encode((StateRepositoryOpMessage) message); case REPLICATION_OP: - return clientIDTrackerMessageCodec.encode((ClientIDTrackerMessage)message); + return passiveReplicationMessageCodec.encode((PassiveReplicationMessage)message); default: throw new IllegalArgumentException("Undefined message type: " + message.getType()); } @@ -75,7 +75,7 @@ public EhcacheEntityMessage decodeMessage(byte[] payload) throws MessageCodecExc } else if (opCode <= STATE_REPO_OP.getCode()) { return stateRepositoryOpCodec.decode(payload); } else if (opCode > SYNC_OP.getCode() && opCode <= REPLICATION_OP.getCode()) { - return clientIDTrackerMessageCodec.decode(payload); + return passiveReplicationMessageCodec.decode(payload); } else { throw new UnsupportedOperationException("Undefined message code: " + opCode); } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java similarity index 55% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessage.java rename to clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java index 6941205ac4..578e3bd511 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java @@ -23,11 +23,13 @@ /** * This message is sent by the Active Entity to Passive Entity. */ -public class ClientIDTrackerMessage extends EhcacheEntityMessage { +public abstract class PassiveReplicationMessage extends EhcacheEntityMessage { public enum ReplicationOp { CHAIN_REPLICATION_OP((byte) 41), - CLIENTID_TRACK_OP((byte) 42) + CLIENTID_TRACK_OP((byte) 42), + CLEAR_INVALIDATION_COMPLETE((byte) 43), + INVALIDATION_COMPLETE((byte) 44) ; private final byte replicationOpCode; @@ -47,20 +49,16 @@ public static ReplicationOp getReplicationOp(byte replicationOpCode) { return CHAIN_REPLICATION_OP; case 42: return CLIENTID_TRACK_OP; + case 43: + return CLEAR_INVALIDATION_COMPLETE; + case 44: + return INVALIDATION_COMPLETE; default: throw new IllegalArgumentException("Replication operation not defined for : " + replicationOpCode); } } } - private final UUID clientId; - private final long msgId; - - public ClientIDTrackerMessage(long msgId, UUID clientId) { - this.msgId = msgId; - this.clientId = clientId; - } - @Override public Type getType() { return Type.REPLICATION_OP; @@ -76,16 +74,27 @@ public void setId(long id) { throw new UnsupportedOperationException("This method is not supported on replication message"); } - public ReplicationOp operation() { - return ReplicationOp.CLIENTID_TRACK_OP; - } + public abstract ReplicationOp operation(); - public long getId() { - return msgId; - } + public static class ClientIDTrackerMessage extends PassiveReplicationMessage { + private final UUID clientId; + private final long msgId; + + public ClientIDTrackerMessage(long msgId, UUID clientId) { + this.msgId = msgId; + this.clientId = clientId; + } - public UUID getClientId() { - return clientId; + public ReplicationOp operation() { + return ReplicationOp.CLIENTID_TRACK_OP; + } + public long getId() { + return msgId; + } + + public UUID getClientId() { + return clientId; + } } public static class ChainReplicationMessage extends ClientIDTrackerMessage implements ConcurrentEntityMessage { @@ -123,4 +132,58 @@ public long concurrencyKey() { return (this.cacheId.hashCode() + key); } } + + public static class ClearInvalidationCompleteMessage extends PassiveReplicationMessage implements ConcurrentEntityMessage { + private final String cacheId; + + public ClearInvalidationCompleteMessage(String cacheId) { + this.cacheId = cacheId; + } + + @Override + public long concurrencyKey() { + return this.cacheId.hashCode(); + } + + @Override + public long getId() { + throw new UnsupportedOperationException("Not supported for ClearInvalidationCompleteMessage"); + } + + @Override + public UUID getClientId() { + throw new UnsupportedOperationException("Not supported for ClearInvalidationCompleteMessage"); + } + + public ReplicationOp operation() { + return ReplicationOp.CLEAR_INVALIDATION_COMPLETE; + } + + public String getCacheId() { + return cacheId; + } + } + + public static class InvalidationCompleteMessage extends ClearInvalidationCompleteMessage { + + private final long key; + + public InvalidationCompleteMessage(String cacheId, long key) { + super(cacheId); + this.key = key; + } + + @Override + public long concurrencyKey() { + return (getCacheId().hashCode() + key); + } + + public ReplicationOp operation() { + return ReplicationOp.INVALIDATION_COMPLETE; + } + + public long getKey() { + return key; + } + } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessageCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodec.java similarity index 62% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessageCodec.java rename to clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodec.java index 325c43fa61..c600883335 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessageCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodec.java @@ -17,14 +17,16 @@ package org.ehcache.clustered.common.internal.messages; import org.ehcache.clustered.common.internal.ClusteredEhcacheIdentity; -import org.ehcache.clustered.common.internal.messages.ClientIDTrackerMessage.ReplicationOp; -import org.ehcache.clustered.common.internal.messages.ClientIDTrackerMessage.ChainReplicationMessage; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ReplicationOp; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ChainReplicationMessage; import org.ehcache.clustered.common.internal.store.Chain; import java.nio.ByteBuffer; import java.util.UUID; -class ClientIDTrackerMessageCodec { +import static org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.*; + +class PassiveReplicationMessageCodec { private static final byte OP_CODE_SIZE = 1; private static final byte CACHE_ID_LEN_SIZE = 4; @@ -33,7 +35,7 @@ class ClientIDTrackerMessageCodec { private ChainCodec chainCodec = new ChainCodec(); - public byte[] encode(ClientIDTrackerMessage message) { + public byte[] encode(PassiveReplicationMessage message) { ByteBuffer encodedMsg; switch (message.operation()) { @@ -56,6 +58,19 @@ public byte[] encode(ClientIDTrackerMessage message) { encodedMsg.putLong(chainReplicationMessage.getKey()); encodedMsg.put(encodedChain); return encodedMsg.array(); + case CLEAR_INVALIDATION_COMPLETE: + ClearInvalidationCompleteMessage clearInvalidationCompleteMessage = (ClearInvalidationCompleteMessage)message; + encodedMsg = ByteBuffer.allocate(OP_CODE_SIZE + 2 * clearInvalidationCompleteMessage.getCacheId().length()); + encodedMsg.put(message.getOpCode()); + CodecUtil.putStringAsCharArray(encodedMsg, clearInvalidationCompleteMessage.getCacheId()); + return encodedMsg.array(); + case INVALIDATION_COMPLETE: + InvalidationCompleteMessage invalidationCompleteMessage = (InvalidationCompleteMessage)message; + encodedMsg = ByteBuffer.allocate(OP_CODE_SIZE + KEY_SIZE + 2 * invalidationCompleteMessage.getCacheId().length()); + encodedMsg.put(message.getOpCode()); + encodedMsg.putLong(invalidationCompleteMessage.getKey()); + CodecUtil.putStringAsCharArray(encodedMsg, invalidationCompleteMessage.getCacheId()); + return encodedMsg.array(); default: throw new UnsupportedOperationException("This operation is not supported : " + message.operation()); } @@ -65,19 +80,32 @@ public byte[] encode(ClientIDTrackerMessage message) { public EhcacheEntityMessage decode(byte[] payload) { ByteBuffer byteBuffer = ByteBuffer.wrap(payload); ReplicationOp replicationOp = ReplicationOp.getReplicationOp(byteBuffer.get()); - UUID clientId = getClientId(byteBuffer); - long msgId = byteBuffer.getLong(); + UUID clientId; + long msgId; + String cacheId; + long key; switch (replicationOp) { case CHAIN_REPLICATION_OP: + clientId = getClientId(byteBuffer); + msgId = byteBuffer.getLong(); int length = byteBuffer.getInt(); - String cacheId = CodecUtil.getStringFromBuffer(byteBuffer, length); - long key = byteBuffer.getLong(); + cacheId = CodecUtil.getStringFromBuffer(byteBuffer, length); + key = byteBuffer.getLong(); byte[] encodedChain = new byte[byteBuffer.remaining()]; byteBuffer.get(encodedChain); Chain chain = chainCodec.decode(encodedChain); return new ChainReplicationMessage(cacheId, key, chain, msgId, clientId); case CLIENTID_TRACK_OP: + clientId = getClientId(byteBuffer); + msgId = byteBuffer.getLong(); return new ClientIDTrackerMessage(msgId, clientId); + case CLEAR_INVALIDATION_COMPLETE: + cacheId = CodecUtil.getStringFromBuffer(byteBuffer, byteBuffer.remaining()/2); + return new ClearInvalidationCompleteMessage(cacheId); + case INVALIDATION_COMPLETE: + key = byteBuffer.getLong(); + cacheId = CodecUtil.getStringFromBuffer(byteBuffer, byteBuffer.remaining()/2); + return new InvalidationCompleteMessage(cacheId, key); default: throw new UnsupportedOperationException("This operation code is not supported : " + replicationOp); } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectData.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectData.java index b0061b5f92..ca5b85479c 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectData.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectData.java @@ -53,8 +53,7 @@ public void add(String name) { } public void remove(String name) { - if (!reconnectData.contains(name)) { - reconnectData.remove(name); + if (reconnectData.remove(name)) { reconnectDatalen.addAndGet(-(2 * name.length() + 2 * ENTRY_SIZE + CLEAR_IN_PROGRESS_STATUS_SIZE)); } } @@ -74,7 +73,7 @@ public void addInvalidationsInProgress(String cacheId, Set hashInvalidatio public Set removeInvalidationsInProgress(String cacheId) { Set hashToInvalidate = hashInvalidationsInProgressPerCache.remove(cacheId); - if (hashToInvalidate != null) { //TODO: while handling eventual + if (hashToInvalidate != null) { reconnectDatalen.addAndGet(-(hashToInvalidate.size() * HASH_SIZE)); return hashToInvalidate; } diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java index 926a5db284..7c7dc563a1 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java @@ -16,6 +16,7 @@ package org.ehcache.clustered.common.internal.messages; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; import org.junit.Test; import java.util.UUID; @@ -36,36 +37,36 @@ public void encodeMessage() throws Exception { ServerStoreOpCodec serverStoreOpCodec = mock(ServerStoreOpCodec.class); LifeCycleMessageCodec lifeCycleMessageCodec = mock(LifeCycleMessageCodec.class); StateRepositoryOpCodec stateRepositoryOpCodec = mock(StateRepositoryOpCodec.class); - ClientIDTrackerMessageCodec clientIDTrackerMessageCodec = mock(ClientIDTrackerMessageCodec.class); - EhcacheCodec codec = new EhcacheCodec(serverStoreOpCodec, lifeCycleMessageCodec, stateRepositoryOpCodec, null, clientIDTrackerMessageCodec); + PassiveReplicationMessageCodec passiveReplicationMessageCodec = mock(PassiveReplicationMessageCodec.class); + EhcacheCodec codec = new EhcacheCodec(serverStoreOpCodec, lifeCycleMessageCodec, stateRepositoryOpCodec, null, passiveReplicationMessageCodec); LifecycleMessage.DestroyServerStore lifecycleMessage = new LifecycleMessage.DestroyServerStore("foo", CLIENT_ID); codec.encodeMessage(lifecycleMessage); verify(lifeCycleMessageCodec, only()).encode(any(LifecycleMessage.class)); verify(serverStoreOpCodec, never()).encode(any(ServerStoreOpMessage.class)); verify(stateRepositoryOpCodec, never()).encode(any(StateRepositoryOpMessage.class)); - verify(clientIDTrackerMessageCodec, never()).encode(any(ClientIDTrackerMessage.class)); + verify(passiveReplicationMessageCodec, never()).encode(any(PassiveReplicationMessage.class)); ServerStoreOpMessage.ClearMessage serverStoreOpMessage = new ServerStoreOpMessage.ClearMessage("foo", CLIENT_ID); codec.encodeMessage(serverStoreOpMessage); verify(lifeCycleMessageCodec, only()).encode(any(LifecycleMessage.class)); verify(serverStoreOpCodec, only()).encode(any(ServerStoreOpMessage.class)); verify(stateRepositoryOpCodec, never()).encode(any(StateRepositoryOpMessage.class)); - verify(clientIDTrackerMessageCodec, never()).encode(any(ClientIDTrackerMessage.class)); + verify(passiveReplicationMessageCodec, never()).encode(any(PassiveReplicationMessage.class)); StateRepositoryOpMessage.EntrySetMessage stateRepositoryOpMessage = new StateRepositoryOpMessage.EntrySetMessage("foo", "bar", CLIENT_ID); codec.encodeMessage(stateRepositoryOpMessage); verify(lifeCycleMessageCodec, only()).encode(any(LifecycleMessage.class)); verify(serverStoreOpCodec, only()).encode(any(ServerStoreOpMessage.class)); verify(stateRepositoryOpCodec, only()).encode(any(StateRepositoryOpMessage.class)); - verify(clientIDTrackerMessageCodec, never()).encode(any(ClientIDTrackerMessage.class)); + verify(passiveReplicationMessageCodec, never()).encode(any(PassiveReplicationMessage.class)); ClientIDTrackerMessage clientIDTrackerMessage = new ClientIDTrackerMessage(20L, CLIENT_ID); codec.encodeMessage(clientIDTrackerMessage); verify(lifeCycleMessageCodec, only()).encode(any(LifecycleMessage.class)); verify(serverStoreOpCodec, only()).encode(any(ServerStoreOpMessage.class)); verify(stateRepositoryOpCodec, only()).encode(any(StateRepositoryOpMessage.class)); - verify(clientIDTrackerMessageCodec, only()).encode(any(ClientIDTrackerMessage.class)); + verify(passiveReplicationMessageCodec, only()).encode(any(PassiveReplicationMessage.class)); } @@ -74,8 +75,8 @@ public void decodeMessage() throws Exception { ServerStoreOpCodec serverStoreOpCodec = mock(ServerStoreOpCodec.class); LifeCycleMessageCodec lifeCycleMessageCodec = mock(LifeCycleMessageCodec.class); StateRepositoryOpCodec stateRepositoryOpCodec = mock(StateRepositoryOpCodec.class); - ClientIDTrackerMessageCodec clientIDTrackerMessageCodec = mock(ClientIDTrackerMessageCodec.class); - EhcacheCodec codec = new EhcacheCodec(serverStoreOpCodec, lifeCycleMessageCodec, stateRepositoryOpCodec, null, clientIDTrackerMessageCodec); + PassiveReplicationMessageCodec passiveReplicationMessageCodec = mock(PassiveReplicationMessageCodec.class); + EhcacheCodec codec = new EhcacheCodec(serverStoreOpCodec, lifeCycleMessageCodec, stateRepositoryOpCodec, null, passiveReplicationMessageCodec); byte[] payload = new byte[1]; @@ -86,7 +87,7 @@ public void decodeMessage() throws Exception { verify(lifeCycleMessageCodec, times(10)).decode(payload); verify(serverStoreOpCodec, never()).decode(payload); verify(stateRepositoryOpCodec, never()).decode(payload); - verify(clientIDTrackerMessageCodec, never()).decode(payload); + verify(passiveReplicationMessageCodec, never()).decode(payload); for (byte i = 11; i <= EhcacheEntityMessage.Type.SERVER_STORE_OP.getCode(); i++) { payload[0] = i; @@ -95,7 +96,7 @@ public void decodeMessage() throws Exception { verify(lifeCycleMessageCodec, times(10)).decode(payload); verify(serverStoreOpCodec, times(10)).decode(payload); verify(stateRepositoryOpCodec, never()).decode(payload); - verify(clientIDTrackerMessageCodec, never()).decode(payload); + verify(passiveReplicationMessageCodec, never()).decode(payload); for (byte i = 21; i <= EhcacheEntityMessage.Type.STATE_REPO_OP.getCode(); i++) { payload[0] = i; @@ -104,7 +105,7 @@ public void decodeMessage() throws Exception { verify(lifeCycleMessageCodec, times(10)).decode(payload); verify(serverStoreOpCodec, times(10)).decode(payload); verify(stateRepositoryOpCodec, times(10)).decode(payload); - verify(clientIDTrackerMessageCodec, never()).decode(payload); + verify(passiveReplicationMessageCodec, never()).decode(payload); for (byte i = 41; i <= EhcacheEntityMessage.Type.REPLICATION_OP.getCode(); i++) { payload[0] = i; @@ -113,7 +114,7 @@ public void decodeMessage() throws Exception { verify(lifeCycleMessageCodec, times(10)).decode(payload); verify(serverStoreOpCodec, times(10)).decode(payload); verify(stateRepositoryOpCodec, times(10)).decode(payload); - verify(clientIDTrackerMessageCodec, times(10)).decode(payload); + verify(passiveReplicationMessageCodec, times(10)).decode(payload); } } \ No newline at end of file diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessageCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodecTest.java similarity index 50% rename from clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessageCodecTest.java rename to clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodecTest.java index 339610af6d..900c3f7c89 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ClientIDTrackerMessageCodecTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodecTest.java @@ -16,7 +16,10 @@ package org.ehcache.clustered.common.internal.messages; -import org.ehcache.clustered.common.internal.messages.ClientIDTrackerMessage.ChainReplicationMessage; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ChainReplicationMessage; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.InvalidationCompleteMessage; import org.ehcache.clustered.common.internal.store.Chain; import org.junit.Test; @@ -25,20 +28,21 @@ import static org.ehcache.clustered.common.internal.store.Util.createPayload; import static org.ehcache.clustered.common.internal.store.Util.getChain; import static org.ehcache.clustered.common.internal.store.Util.chainsEqual; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; -public class ClientIDTrackerMessageCodecTest { +public class PassiveReplicationMessageCodecTest { @Test public void testClientIDTrackerMessageCodec() { ClientIDTrackerMessage clientIDTrackerMessage = new ClientIDTrackerMessage(200L, UUID.randomUUID()); - ClientIDTrackerMessageCodec clientIDTrackerMessageCodec = new ClientIDTrackerMessageCodec(); + PassiveReplicationMessageCodec passiveReplicationMessageCodec = new PassiveReplicationMessageCodec(); - ClientIDTrackerMessage decodedMsg = (ClientIDTrackerMessage)clientIDTrackerMessageCodec.decode(clientIDTrackerMessageCodec + PassiveReplicationMessage decodedMsg = (PassiveReplicationMessage)passiveReplicationMessageCodec.decode(passiveReplicationMessageCodec .encode(clientIDTrackerMessage)); assertThat(decodedMsg.getClientId(), is(clientIDTrackerMessage.getClientId())); @@ -51,9 +55,9 @@ public void testChainReplicationMessageCodec() { Chain chain = getChain(false, createPayload(2L), createPayload(20L)); ChainReplicationMessage chainReplicationMessage = new ChainReplicationMessage("test", 2L, chain, 200L, UUID.randomUUID()); - ClientIDTrackerMessageCodec clientIDTrackerMessageCodec = new ClientIDTrackerMessageCodec(); + PassiveReplicationMessageCodec passiveReplicationMessageCodec = new PassiveReplicationMessageCodec(); - ChainReplicationMessage decodedMsg = (ChainReplicationMessage)clientIDTrackerMessageCodec.decode(clientIDTrackerMessageCodec + ChainReplicationMessage decodedMsg = (ChainReplicationMessage)passiveReplicationMessageCodec.decode(passiveReplicationMessageCodec .encode(chainReplicationMessage)); assertThat(decodedMsg.getCacheId(), is(chainReplicationMessage.getCacheId())); @@ -64,4 +68,31 @@ public void testChainReplicationMessageCodec() { } + @Test + public void testClearInvalidationCompleteMessage() { + ClearInvalidationCompleteMessage clearInvalidationCompleteMessage = new ClearInvalidationCompleteMessage("test"); + + PassiveReplicationMessageCodec messageCodec = new PassiveReplicationMessageCodec(); + + ClearInvalidationCompleteMessage decoded = (ClearInvalidationCompleteMessage)messageCodec.decode(messageCodec.encode(clearInvalidationCompleteMessage)); + + assertThat(decoded.getOpCode(), equalTo(clearInvalidationCompleteMessage.getOpCode())); + assertThat(decoded.getCacheId(), equalTo(clearInvalidationCompleteMessage.getCacheId())); + + } + + @Test + public void testInvalidationCompleteMessage() { + + InvalidationCompleteMessage invalidationCompleteMessage = new InvalidationCompleteMessage("test", 20L); + + PassiveReplicationMessageCodec messageCodec = new PassiveReplicationMessageCodec(); + + InvalidationCompleteMessage decoded = (InvalidationCompleteMessage)messageCodec.decode(messageCodec.encode(invalidationCompleteMessage)); + + assertThat(decoded.getOpCode(), equalTo(invalidationCompleteMessage.getOpCode())); + assertThat(decoded.getCacheId(), equalTo(invalidationCompleteMessage.getCacheId())); + assertThat(decoded.getKey(), equalTo(invalidationCompleteMessage.getKey())); + } + } diff --git a/clustered/integration-test/build.gradle b/clustered/integration-test/build.gradle index 4037c76dfd..e8d6c2ebd9 100644 --- a/clustered/integration-test/build.gradle +++ b/clustered/integration-test/build.gradle @@ -70,10 +70,6 @@ compileTestJava { sourceCompatibility = 1.8 targetCompatibility = 1.8 -checkstyle { - toolVersion = '5.9' -} - test { dependsOn unzipKit executable = Jvm.current().javaExecutable diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java index 708e1646c9..7626995512 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java @@ -33,6 +33,10 @@ import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameter; +import org.junit.runners.Parameterized.Parameters; import org.terracotta.testing.rules.BasicExternalCluster; import org.terracotta.testing.rules.Cluster; @@ -51,6 +55,7 @@ import java.util.concurrent.Executors; import java.util.concurrent.Future; +import static java.util.Arrays.asList; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -62,6 +67,7 @@ * Note that fail-over is happening while client threads are still writing * Finally the same key set correctness is asserted. */ +@RunWith(Parameterized.class) public class BasicClusteredCacheOpsReplicationMultiThreadedTest { private static final int NUM_OF_THREADS = 10; @@ -78,6 +84,14 @@ public class BasicClusteredCacheOpsReplicationMultiThreadedTest { private static Cache CACHE1; private static Cache CACHE2; + @Parameters(name = "consistency={0}") + public static Consistency[] data() { + return Consistency.values(); + } + + @Parameter + public Consistency cacheConsistency; + @ClassRule public static Cluster CLUSTER = new BasicExternalCluster(new File("build/cluster"), 2, Collections.emptyList(), "", RESOURCE_CONFIG, ""); @@ -97,7 +111,7 @@ public void startServers() throws Exception { .newCacheConfigurationBuilder(Long.class, BlobValue.class, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(500, EntryUnit.ENTRIES) .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))) - .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG)) + .add(ClusteredStoreConfigurationBuilder.withConsistency(cacheConsistency)) .build(); CACHE1 = CACHE_MANAGER1.createCache("clustered-cache", config); @@ -112,7 +126,7 @@ public void tearDown() throws Exception { CLUSTER.getClusterControl().startAllServers(); } - @Test + @Test(timeout=180000) public void testCRUD() throws Exception { List> caches = new ArrayList<>(); caches.add(CACHE1); @@ -162,7 +176,7 @@ public void testCRUD() throws Exception { } - @Test + @Test(timeout=180000) public void testBulkOps() throws Exception { List> caches = new ArrayList<>(); caches.add(CACHE1); @@ -215,7 +229,7 @@ public void testBulkOps() throws Exception { } - @Test + @Test(timeout=180000) public void testClear() throws Exception { List> caches = new ArrayList<>(); caches.add(CACHE1); diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java index b6d49ac753..4498f3a272 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java @@ -33,6 +33,10 @@ import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameter; +import org.junit.runners.Parameterized.Parameters; import org.terracotta.testing.rules.BasicExternalCluster; import org.terracotta.testing.rules.Cluster; @@ -44,11 +48,13 @@ import java.util.Map; import java.util.Set; +import static java.util.Arrays.asList; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.assertThat; +@RunWith(Parameterized.class) public class BasicClusteredCacheOpsReplicationTest { private static final String RESOURCE_CONFIG = @@ -62,6 +68,14 @@ public class BasicClusteredCacheOpsReplicationTest { private static Cache CACHE1; private static Cache CACHE2; + @Parameters(name = "consistency={0}") + public static Consistency[] data() { + return Consistency.values(); + } + + @Parameter + public Consistency cacheConsistency; + @ClassRule public static Cluster CLUSTER = new BasicExternalCluster(new File("build/cluster"), 2, Collections.emptyList(), "", RESOURCE_CONFIG, ""); @@ -79,7 +93,7 @@ public void startServers() throws Exception { CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(100, EntryUnit.ENTRIES) .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))) - .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG)) + .add(ClusteredStoreConfigurationBuilder.withConsistency(cacheConsistency)) .build(); CACHE1 = CACHE_MANAGER.createCache("clustered-cache", config); diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMulitpleClientsTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMulitpleClientsTest.java index 4a53cddbba..b29e662465 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMulitpleClientsTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMulitpleClientsTest.java @@ -33,6 +33,10 @@ import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameter; +import org.junit.runners.Parameterized.Parameters; import org.terracotta.testing.rules.BasicExternalCluster; import org.terracotta.testing.rules.Cluster; @@ -48,6 +52,7 @@ import java.util.Set; import java.util.stream.LongStream; +import static java.util.Arrays.asList; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -56,6 +61,7 @@ /** * The point of this test is to assert proper data read after fail-over handling. */ +@RunWith(Parameterized.class) public class BasicClusteredCacheOpsReplicationWithMulitpleClientsTest { private static final String RESOURCE_CONFIG = @@ -70,6 +76,14 @@ public class BasicClusteredCacheOpsReplicationWithMulitpleClientsTest { private static Cache CACHE1; private static Cache CACHE2; + @Parameters(name = "consistency={0}") + public static Consistency[] data() { + return Consistency.values(); + } + + @Parameter + public Consistency cacheConsistency; + @ClassRule public static Cluster CLUSTER = new BasicExternalCluster(new File("build/cluster"), 2, Collections.emptyList(), "", RESOURCE_CONFIG, ""); @@ -88,7 +102,7 @@ public void startServers() throws Exception { CacheConfiguration config = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, BlobValue.class, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(500, EntryUnit.ENTRIES) .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))) - .add(ClusteredStoreConfigurationBuilder.withConsistency(Consistency.STRONG)) + .add(ClusteredStoreConfigurationBuilder.withConsistency(cacheConsistency)) .build(); CACHE1 = CACHE_MANAGER1.createCache("clustered-cache", config); @@ -103,7 +117,7 @@ public void tearDown() throws Exception { CLUSTER.getClusterControl().startAllServers(); } - @Test + @Test(timeout=180000) public void testCRUD() throws Exception { Random random = new Random(); LongStream longStream = random.longs(1000); @@ -135,7 +149,7 @@ public void testCRUD() throws Exception { } - @Test + @Test(timeout=180000) public void testBulkOps() throws Exception { List> caches = new ArrayList<>(); caches.add(CACHE1); @@ -173,7 +187,7 @@ public void testBulkOps() throws Exception { } - @Test + @Test(timeout=180000) public void testClear() throws Exception { List> caches = new ArrayList<>(); caches.add(CACHE1); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index dc89536075..e1766f274d 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -47,10 +47,12 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponseFactory; import org.ehcache.clustered.common.internal.messages.LifecycleMessage; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.InvalidationCompleteMessage; import org.ehcache.clustered.common.internal.messages.ReconnectData; import org.ehcache.clustered.common.internal.messages.ReconnectDataCodec; -import org.ehcache.clustered.common.internal.messages.ClientIDTrackerMessage; -import org.ehcache.clustered.common.internal.messages.ClientIDTrackerMessage.ChainReplicationMessage; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ChainReplicationMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.KeyBasedServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; @@ -59,6 +61,7 @@ import org.ehcache.clustered.server.internal.messages.EntityDataSyncMessage; import org.ehcache.clustered.server.internal.messages.EntityStateSyncMessage; import org.ehcache.clustered.server.state.EhcacheStateService; +import org.ehcache.clustered.server.state.InvalidationTracker; import org.ehcache.clustered.server.state.config.EhcacheStateServiceConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -230,7 +233,8 @@ public void disconnected(ClientDescriptor clientDescriptor) { Iterator> it = clientsWaitingForInvalidation.entrySet().iterator(); while (it.hasNext()) { Entry next = it.next(); - if (next.getValue().clientDescriptorWaitingForInvalidation.equals(clientDescriptor)) { + ClientDescriptor clientDescriptorWaitingForInvalidation = next.getValue().clientDescriptorWaitingForInvalidation; + if (clientDescriptorWaitingForInvalidation != null && clientDescriptorWaitingForInvalidation.equals(clientDescriptor)) { it.remove(); } } @@ -358,6 +362,18 @@ public void createNew() { public void loadExisting() { LOGGER.debug("Preparing for handling Inflight Invalidations and independent Passive Evictions in loadExisting"); inflightInvalidations = new ConcurrentHashMap<>(); + Set caches = ehcacheStateService.getStores(); + caches.forEach(cacheId -> { + InvalidationTracker invalidationTracker = ehcacheStateService.getInvalidationTracker(cacheId); + inflightInvalidations.computeIfPresent(cacheId, (s, invalidationTuples) -> { + if (invalidationTuples == null) { + invalidationTuples = new ArrayList<>(); + } + invalidationTuples.add(new InvalidationTuple(null, invalidationTracker.getInvalidationMap() + .keySet(), invalidationTracker.isClearInProgress())); + return invalidationTuples; + }); + }); } private void validateClientConnected(ClientDescriptor clientDescriptor) throws ClusterException { @@ -513,7 +529,9 @@ private void invalidateHashForClient(ClientDescriptor originatingClientDescripto int invalidationId = invalidationIdGenerator.getAndIncrement(); Set clientsToInvalidate = Collections.newSetFromMap(new ConcurrentHashMap()); clientsToInvalidate.addAll(storeClientMap.get(cacheId)); - clientsToInvalidate.remove(originatingClientDescriptor); + if (originatingClientDescriptor != null) { + clientsToInvalidate.remove(originatingClientDescriptor); + } InvalidationHolder invalidationHolder = null; invalidationHolder = new InvalidationHolder(originatingClientDescriptor, clientsToInvalidate, cacheId, key); @@ -539,7 +557,9 @@ private void invalidateAll(ClientDescriptor originatingClientDescriptor, String int invalidationId = invalidationIdGenerator.getAndIncrement(); Set clientsToInvalidate = Collections.newSetFromMap(new ConcurrentHashMap()); clientsToInvalidate.addAll(storeClientMap.get(cacheId)); - clientsToInvalidate.remove(originatingClientDescriptor); + if (originatingClientDescriptor != null) { + clientsToInvalidate.remove(originatingClientDescriptor); + } InvalidationHolder invalidationHolder = null; invalidationHolder = new InvalidationHolder(originatingClientDescriptor, clientsToInvalidate, cacheId); @@ -574,12 +594,21 @@ private void clientInvalidated(ClientDescriptor clientDescriptor, int invalidati if (clientsWaitingForInvalidation.remove(invalidationId) != null) { try { Long key = invalidationHolder.key; + boolean isStrong = ehcacheStateService.getStore(invalidationHolder.cacheId).getStoreConfiguration().getConsistency() == Consistency.STRONG; if (key == null) { - clientCommunicator.sendNoResponse(invalidationHolder.clientDescriptorWaitingForInvalidation, allInvalidationDone(invalidationHolder.cacheId)); - LOGGER.debug("SERVER: notifying originating client that all other clients invalidated all in cache {} from {} (ID {})", invalidationHolder.cacheId, clientDescriptor, invalidationId); + if (isStrong) { + clientCommunicator.sendNoResponse(invalidationHolder.clientDescriptorWaitingForInvalidation, allInvalidationDone(invalidationHolder.cacheId)); + LOGGER.debug("SERVER: notifying originating client that all other clients invalidated all in cache {} from {} (ID {})", invalidationHolder.cacheId, clientDescriptor, invalidationId); + } else { + entityMessenger.messageSelf(new ClearInvalidationCompleteMessage(invalidationHolder.cacheId)); + } } else { - clientCommunicator.sendNoResponse(invalidationHolder.clientDescriptorWaitingForInvalidation, hashInvalidationDone(invalidationHolder.cacheId, key)); - LOGGER.debug("SERVER: notifying originating client that all other clients invalidated key {} in cache {} from {} (ID {})", key, invalidationHolder.cacheId, clientDescriptor, invalidationId); + if (isStrong) { + clientCommunicator.sendNoResponse(invalidationHolder.clientDescriptorWaitingForInvalidation, hashInvalidationDone(invalidationHolder.cacheId, key)); + LOGGER.debug("SERVER: notifying originating client that all other clients invalidated key {} in cache {} from {} (ID {})", key, invalidationHolder.cacheId, clientDescriptor, invalidationId); + } else { + entityMessenger.messageSelf(new InvalidationCompleteMessage(invalidationHolder.cacheId, key)); + } } } catch (MessageCodecException mce) { //TODO: what should be done here? diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index bff6443fc1..d02ffc2ec9 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -29,8 +29,10 @@ import org.ehcache.clustered.common.internal.messages.LifecycleMessage.ConfigureStoreManager; import org.ehcache.clustered.common.internal.messages.LifecycleMessage.CreateServerStore; import org.ehcache.clustered.common.internal.messages.LifecycleMessage.DestroyServerStore; -import org.ehcache.clustered.common.internal.messages.ClientIDTrackerMessage; -import org.ehcache.clustered.common.internal.messages.ClientIDTrackerMessage.ChainReplicationMessage; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ChainReplicationMessage; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.InvalidationCompleteMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; import org.ehcache.clustered.server.internal.messages.EntityDataSyncMessage; @@ -38,6 +40,7 @@ import org.ehcache.clustered.server.internal.messages.EntitySyncMessage; import org.ehcache.clustered.server.state.ClientMessageTracker; import org.ehcache.clustered.server.state.EhcacheStateService; +import org.ehcache.clustered.server.state.InvalidationTracker; import org.ehcache.clustered.server.state.config.EhcacheStateServiceConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -82,7 +85,7 @@ public void invoke(EhcacheEntityMessage message) { invokeSyncOperation((EntitySyncMessage) message); break; case REPLICATION_OP: - invokeRetirementMessages((ClientIDTrackerMessage)message); + invokeRetirementMessages((PassiveReplicationMessage)message); break; default: throw new IllegalMessageException("Unknown message : " + message); @@ -107,7 +110,7 @@ public void invoke(EhcacheEntityMessage message) { } } - private void invokeRetirementMessages(ClientIDTrackerMessage message) throws ClusterException { + private void invokeRetirementMessages(PassiveReplicationMessage message) throws ClusterException { switch (message.operation()) { case CHAIN_REPLICATION_OP: @@ -120,11 +123,33 @@ private void invokeRetirementMessages(ClientIDTrackerMessage message) throws Clu } cacheStore.put(retirementMessage.getKey(), retirementMessage.getChain()); ehcacheStateService.getClientMessageTracker().applied(message.getId(), message.getClientId()); + InvalidationTracker invalidationTracker = ehcacheStateService.getInvalidationTracker(retirementMessage.getCacheId()); + if (invalidationTracker != null) { + invalidationTracker.getInvalidationMap().computeIfPresent(retirementMessage.getKey(), (key, count) -> { + if (count == null) { + return count = 1; + } else { + return count++; + } + }); + } break; case CLIENTID_TRACK_OP: - LOGGER.debug("ClientIDTrackerMessage message for msgId {} & client Id {}", message.getId(), message.getClientId()); + LOGGER.debug("PassiveReplicationMessage message for msgId {} & client Id {}", message.getId(), message.getClientId()); ehcacheStateService.getClientMessageTracker().add(message.getClientId()); break; + case INVALIDATION_COMPLETE: + InvalidationCompleteMessage invalidationCompleteMessage = (InvalidationCompleteMessage)message; + ehcacheStateService.getInvalidationTracker(invalidationCompleteMessage.getCacheId()).getInvalidationMap().computeIfPresent(invalidationCompleteMessage.getKey(), (key, count) -> { + if (count == 1) { + return null; + } + return count--; + }); + break; + case CLEAR_INVALIDATION_COMPLETE: + ehcacheStateService.getInvalidationTracker(((ClearInvalidationCompleteMessage)message).getCacheId()).setClearInProgress(false); + break; default: throw new IllegalMessageException("Unknown Retirement Message : " + message); } @@ -151,6 +176,10 @@ private void invokeServerStoreOperation(ServerStoreOpMessage message) throws Clu } case CLEAR: { cacheStore.clear(); + InvalidationTracker invalidationTracker = ehcacheStateService.getInvalidationTracker(message.getCacheId()); + if (invalidationTracker != null) { + invalidationTracker.setClearInProgress(true); + } break; } default: diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java index 3d51f2d335..d655973853 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java @@ -16,6 +16,7 @@ package org.ehcache.clustered.server; +import org.ehcache.clustered.common.Consistency; import org.ehcache.clustered.common.PoolAllocation; import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; @@ -23,6 +24,7 @@ import org.ehcache.clustered.server.repo.StateRepositoryManager; import org.ehcache.clustered.server.state.ClientMessageTracker; import org.ehcache.clustered.server.state.EhcacheStateService; +import org.ehcache.clustered.server.state.InvalidationTracker; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.ehcache.clustered.common.internal.exceptions.IllegalMessageException; @@ -46,6 +48,8 @@ import java.util.Iterator; import java.util.Map; import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; import static java.util.stream.Collectors.toMap; import static org.terracotta.offheapstore.util.MemoryUnit.GIGABYTES; @@ -74,7 +78,7 @@ public class EhcacheStateServiceImpl implements EhcacheStateService { * The clustered dedicated resource pools specified by caches defined in CacheManagers using this * {@code EhcacheActiveEntity}. The index is the cache identifier (alias). */ - private Map dedicatedResourcePools = new HashMap(); + private Map dedicatedResourcePools = new HashMap<>(); /** * The clustered stores representing the server-side of a {@code ClusterStore}. @@ -83,6 +87,7 @@ public class EhcacheStateServiceImpl implements EhcacheStateService { private Map stores = Collections.emptyMap(); private final ClientMessageTracker messageTracker = new ClientMessageTracker(); + private final ConcurrentMap invalidationMap = new ConcurrentHashMap<>(); private final StateRepositoryManager stateRepositoryManager; private final KeySegmentMapper mapper; @@ -103,7 +108,7 @@ public Set getStores() { } Set getSharedResourcePoolIds() { - return sharedResourcePools == null ? new HashSet() : Collections.unmodifiableSet(sharedResourcePools.keySet()); + return sharedResourcePools == null ? new HashSet<>() : Collections.unmodifiableSet(sharedResourcePools.keySet()); } Set getDedicatedResourcePoolIds() { @@ -158,7 +163,7 @@ private void checkConfigurationCompatibility(ServerSideConfiguration incomingCon } private static Map resolveResourcePools(ServerSideConfiguration configuration) throws InvalidServerSideConfigurationException { - Map pools = new HashMap(); + Map pools = new HashMap<>(); for (Map.Entry e : configuration.getResourcePools().entrySet()) { ServerSideConfiguration.Pool pool = e.getValue(); if (pool.getServerResource() == null) { @@ -187,7 +192,7 @@ public void configure(ServerSideConfiguration configuration) throws ClusterExcep } this.sharedResourcePools = createPools(resolveResourcePools(configuration)); - this.stores = new HashMap(); + this.stores = new HashMap<>(); } else { throw new InvalidStoreManagerException("Clustered Tier Manager already configured"); @@ -195,7 +200,7 @@ public void configure(ServerSideConfiguration configuration) throws ClusterExcep } private Map createPools(Map resourcePools) throws ResourceConfigurationException { - Map pools = new HashMap(); + Map pools = new HashMap<>(); try { for (Map.Entry e : resourcePools.entrySet()) { pools.put(e.getKey(), createPageSource(e.getKey(), e.getValue())); @@ -271,6 +276,7 @@ public void destroy() { releasePools("dedicated", this.dedicatedResourcePools); this.sharedResourcePools = null; + invalidationMap.clear(); } private void releasePools(String poolType, Map resourcePools) { @@ -302,6 +308,9 @@ public ServerStoreImpl createStore(String name, ServerStoreConfiguration serverS PageSource resourcePageSource = getPageSource(name, serverStoreConfiguration.getPoolAllocation()); ServerStoreImpl serverStore = new ServerStoreImpl(serverStoreConfiguration, resourcePageSource, mapper); stores.put(name, serverStore); + if(serverStoreConfiguration.getConsistency() == Consistency.EVENTUAL) { + invalidationMap.put(name, new InvalidationTracker()); + } return serverStore; } @@ -314,6 +323,7 @@ public void destroyServerStore(String name) throws ClusterException { store.close(); } stateRepositoryManager.destroyStateRepository(name); + invalidationMap.remove(name); } private PageSource getPageSource(String name, PoolAllocation allocation) throws ClusterException { @@ -356,6 +366,11 @@ private PageSource getPageSource(String name, PoolAllocation allocation) throws } + @Override + public InvalidationTracker getInvalidationTracker(String cacheId) { + return this.invalidationMap.get(cacheId); + } + public boolean isConfigured() { return (sharedResourcePools != null); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java index 3185436974..3497914a67 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java @@ -54,4 +54,6 @@ public interface EhcacheStateService { ClientMessageTracker getClientMessageTracker(); + InvalidationTracker getInvalidationTracker(String cacheId); + } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/InvalidationTracker.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/InvalidationTracker.java new file mode 100644 index 0000000000..49177d2fe3 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/InvalidationTracker.java @@ -0,0 +1,43 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.state; + +import com.tc.classloader.CommonComponent; + +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.AtomicBoolean; + +@CommonComponent +public class InvalidationTracker { + + private final ConcurrentMap invalidationMap = new ConcurrentHashMap<>(); + private final AtomicBoolean isClearInProgress = new AtomicBoolean(false); + + public boolean isClearInProgress() { + return isClearInProgress.get(); + } + + public void setClearInProgress(boolean clearInProgress) { + isClearInProgress.getAndSet(clearInProgress); + } + + public ConcurrentMap getInvalidationMap() { + return invalidationMap; + } + +} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java index 683c8b320d..cfcb28e4e8 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java @@ -22,7 +22,8 @@ import org.ehcache.clustered.common.internal.ClusteredEhcacheIdentity; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; -import org.ehcache.clustered.common.internal.messages.ClientIDTrackerMessage; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; import org.ehcache.clustered.server.state.EhcacheStateService; import org.hamcrest.Matchers; import org.junit.Before; From 43618acad3fb3d342dcc0d3f7d415b7aa655fe55 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Wed, 26 Oct 2016 00:31:10 +0530 Subject: [PATCH 086/218] Refactor reconnect codec logic #1211 --- .../client/internal/EhcacheClientEntity.java | 26 ++--- .../store/StrongServerStoreProxy.java | 8 +- .../internal/messages/ReconnectData.java | 93 ----------------- .../internal/messages/ReconnectDataCodec.java | 76 -------------- .../internal/messages/ReconnectMessage.java | 66 +++++++++++++ .../messages/ReconnectMessageCodec.java | 99 +++++++++++++++++++ ...st.java => ReconnectMessageCodecTest.java} | 44 +++++---- .../clustered/server/EhcacheActiveEntity.java | 92 ++++++++--------- .../server/EhcachePassiveEntity.java | 41 ++++---- .../server/EhcacheStateServiceImpl.java | 6 ++ .../server/state/EhcacheStateService.java | 2 + 11 files changed, 285 insertions(+), 268 deletions(-) delete mode 100644 clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectData.java delete mode 100644 clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodec.java create mode 100644 clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessage.java create mode 100644 clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodec.java rename clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/{ReconnectDataCodecTest.java => ReconnectMessageCodecTest.java} (54%) diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java index a601c0567b..e6cd70ac2c 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java @@ -34,8 +34,8 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.Failure; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.Type; import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; -import org.ehcache.clustered.common.internal.messages.ReconnectData; -import org.ehcache.clustered.common.internal.messages.ReconnectDataCodec; +import org.ehcache.clustered.common.internal.messages.ReconnectMessage; +import org.ehcache.clustered.common.internal.messages.ReconnectMessageCodec; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ServerStoreOp; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -48,6 +48,7 @@ import org.terracotta.exception.EntityException; import java.util.ArrayList; +import java.util.Collections; import java.util.EnumSet; import java.util.List; import java.util.Map; @@ -80,7 +81,7 @@ public interface DisconnectionListener { } public interface ReconnectListener { - void onHandleReconnect(ReconnectData reconnectData); + void onHandleReconnect(ReconnectMessage reconnectMessage); } private final AtomicLong sequenceGenerator = new AtomicLong(0L); @@ -90,9 +91,9 @@ public interface ReconnectListener { private final Map, List>> responseListeners = new ConcurrentHashMap, List>>(); private final List disconnectionListeners = new CopyOnWriteArrayList(); private final List reconnectListeners = new ArrayList(); - private final ReconnectDataCodec reconnectDataCodec = new ReconnectDataCodec(); + private final ReconnectMessageCodec reconnectMessageCodec = new ReconnectMessageCodec(); private volatile boolean connected = true; - private final ReconnectData reconnectData = new ReconnectData(); + private final Set caches = Collections.newSetFromMap(new ConcurrentHashMap()); private final Object lock = new Object(); private volatile UUID clientId; @@ -112,10 +113,12 @@ public void handleMessage(EntityResponse messageFromServer) { @Override public byte[] createExtendedReconnectData() { synchronized (lock) { + ReconnectMessage reconnectMessage = new ReconnectMessage(clientId, caches); for (ReconnectListener reconnectListener : reconnectListeners) { - reconnectListener.onHandleReconnect(reconnectData); + reconnectListener.onHandleReconnect(reconnectMessage); } - return reconnectDataCodec.encode(reconnectData); +// return reconnectDataCodec.encode(reconnectData); + return reconnectMessageCodec.encode(reconnectMessage); } } @@ -197,7 +200,6 @@ public void validate(ServerSideConfiguration config) throws ClusteredTierManager try { clientId = UUID.randomUUID(); this.messageFactory.setClientId(clientId); - this.reconnectData.setClientId(clientId); invokeInternal(timeouts.getLifecycleOperationTimeout(), messageFactory.validateStoreManager(config), false); break; } catch (InvalidClientIdException e) { @@ -213,7 +215,6 @@ public void configure(ServerSideConfiguration config) throws ClusteredTierManage try { clientId = UUID.randomUUID(); this.messageFactory.setClientId(clientId); - this.reconnectData.setClientId(clientId); invokeInternal(timeouts.getLifecycleOperationTimeout(), messageFactory.configureStoreManager(config), true); } catch (ClusterException e) { throw new ClusteredTierManagerConfigurationException("Error configuring clustered tier manager", e); @@ -224,7 +225,7 @@ public void createCache(String name, ServerStoreConfiguration serverStoreConfigu throws ClusteredTierCreationException, TimeoutException { try { invokeInternal(timeouts.getLifecycleOperationTimeout(), messageFactory.createServerStore(name, serverStoreConfiguration), true); - reconnectData.add(name); + caches.add(name); } catch (ClusterException e) { throw new ClusteredTierCreationException("Error creating clustered tier '" + name + "'", e); } @@ -234,7 +235,7 @@ public void validateCache(String name, ServerStoreConfiguration serverStoreConfi throws ClusteredTierValidationException, TimeoutException { try { invokeInternal(timeouts.getLifecycleOperationTimeout(), messageFactory.validateServerStore(name , serverStoreConfiguration), false); - reconnectData.add(name); + caches.add(name); } catch (ClusterException e) { throw new ClusteredTierValidationException("Error validating clustered tier '" + name + "'", e); } @@ -243,7 +244,7 @@ public void validateCache(String name, ServerStoreConfiguration serverStoreConfi public void releaseCache(String name) throws ClusteredTierReleaseException, TimeoutException { try { invokeInternal(timeouts.getLifecycleOperationTimeout(), messageFactory.releaseServerStore(name), false); - reconnectData.remove(name); + caches.remove(name); } catch (ClusterException e) { throw new ClusteredTierReleaseException("Error releasing clustered tier '" + name + "'", e); } @@ -252,7 +253,6 @@ public void releaseCache(String name) throws ClusteredTierReleaseException, Time public void destroyCache(String name) throws ClusteredTierDestructionException, TimeoutException { try { invokeInternal(timeouts.getLifecycleOperationTimeout(), messageFactory.destroyServerStore(name), true); - reconnectData.remove(name); } catch (ResourceBusyException e) { throw new ClusteredTierDestructionException(e.getMessage(), e); } catch (ClusterException e) { diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java index 8dde6fb0a3..83ea76839d 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java @@ -17,7 +17,7 @@ import org.ehcache.clustered.client.internal.EhcacheClientEntity; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; -import org.ehcache.clustered.common.internal.messages.ReconnectData; +import org.ehcache.clustered.common.internal.messages.ReconnectMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; import org.ehcache.clustered.common.internal.store.Chain; import org.slf4j.Logger; @@ -52,11 +52,11 @@ public StrongServerStoreProxy(final ServerStoreMessageFactory messageFactory, fi this.entity = entity; entity.addReconnectListener(new EhcacheClientEntity.ReconnectListener() { @Override - public void onHandleReconnect(ReconnectData reconnectData) { + public void onHandleReconnect(ReconnectMessage reconnectMessage) { Set inflightInvalidations = hashInvalidationsInProgress.keySet(); - reconnectData.addInvalidationsInProgress(delegate.getCacheId(), inflightInvalidations); + reconnectMessage.addInvalidationsInProgress(delegate.getCacheId(), inflightInvalidations); if (invalidateAllLatch != null) { - reconnectData.addClearInProgress(delegate.getCacheId()); + reconnectMessage.addClearInProgress(delegate.getCacheId()); } } }); diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectData.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectData.java deleted file mode 100644 index ca5b85479c..0000000000 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectData.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.common.internal.messages; - -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; -import java.util.UUID; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicInteger; - -public class ReconnectData { - - private static final byte CLIENT_ID_SIZE = 16; - private static final byte ENTRY_SIZE = 4; - private static final byte HASH_SIZE = 8; - private static final byte CLEAR_IN_PROGRESS_STATUS_SIZE = 1; - - private volatile UUID clientId; - private final Set reconnectData = Collections.newSetFromMap(new ConcurrentHashMap()); - private final AtomicInteger reconnectDatalen = new AtomicInteger(CLIENT_ID_SIZE); - private final ConcurrentHashMap> hashInvalidationsInProgressPerCache = new ConcurrentHashMap>(); - private final Set cachesWithClearInProgress = Collections.newSetFromMap(new ConcurrentHashMap()); - - public UUID getClientId() { - if (clientId == null) { - throw new AssertionError("Client ID cannot be null"); - } - return clientId; - } - - public void setClientId(UUID clientId) { - this.clientId = clientId; - } - - public void add(String name) { - reconnectData.add(name); - reconnectDatalen.addAndGet(2 * name.length() + 2 * ENTRY_SIZE + CLEAR_IN_PROGRESS_STATUS_SIZE); - } - - public void remove(String name) { - if (reconnectData.remove(name)) { - reconnectDatalen.addAndGet(-(2 * name.length() + 2 * ENTRY_SIZE + CLEAR_IN_PROGRESS_STATUS_SIZE)); - } - } - - public Set getAllCaches() { - return Collections.unmodifiableSet(reconnectData); - } - - int getDataLength() { - return reconnectDatalen.get(); - } - - public void addInvalidationsInProgress(String cacheId, Set hashInvalidationsInProgress) { - hashInvalidationsInProgressPerCache.put(cacheId, hashInvalidationsInProgress); - reconnectDatalen.addAndGet(hashInvalidationsInProgress.size() * HASH_SIZE); - } - - public Set removeInvalidationsInProgress(String cacheId) { - Set hashToInvalidate = hashInvalidationsInProgressPerCache.remove(cacheId); - if (hashToInvalidate != null) { - reconnectDatalen.addAndGet(-(hashToInvalidate.size() * HASH_SIZE)); - return hashToInvalidate; - } - return Collections.EMPTY_SET; - } - - public void addClearInProgress(String cacheId) { - cachesWithClearInProgress.add(cacheId); - } - - public Set getClearInProgressCaches() { - Set caches = new HashSet(cachesWithClearInProgress); - cachesWithClearInProgress.clear(); - return caches; - } - -} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodec.java deleted file mode 100644 index 9822128b5a..0000000000 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodec.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.common.internal.messages; - - -import org.ehcache.clustered.common.internal.ClusteredEhcacheIdentity; - -import java.nio.ByteBuffer; -import java.util.HashSet; -import java.util.Set; -import java.util.UUID; - -public class ReconnectDataCodec { - - public byte[] encode(ReconnectData reconnectData) { - ByteBuffer encodedMsg = ByteBuffer.allocate(reconnectData.getDataLength()); - encodedMsg.put(ClusteredEhcacheIdentity.serialize(reconnectData.getClientId())); - Set clearInProgress = reconnectData.getClearInProgressCaches(); - for (String cacheId : reconnectData.getAllCaches()) { - encodedMsg.putInt(cacheId.length()); - CodecUtil.putStringAsCharArray(encodedMsg, cacheId); - if (clearInProgress.contains(cacheId)) { - encodedMsg.put((byte)1); - } else { - encodedMsg.put((byte)0); - } - Set hashToInvalidate = reconnectData.removeInvalidationsInProgress(cacheId); - encodedMsg.putInt(hashToInvalidate.size()); - for (Long hash : hashToInvalidate) { - encodedMsg.putLong(hash); - } - } - - return encodedMsg.array(); - } - - public ReconnectData decode(byte[] payload) { - ReconnectData reconnectData = new ReconnectData(); - ByteBuffer byteBuffer = ByteBuffer.wrap(payload); - long msb = byteBuffer.getLong(); - long lsb = byteBuffer.getLong(); - reconnectData.setClientId(new UUID(msb, lsb)); - - while (byteBuffer.hasRemaining()) { - int cacheIdSize = byteBuffer.getInt(); - String cacheId = CodecUtil.getStringFromBuffer(byteBuffer, cacheIdSize); - reconnectData.add(cacheId); - byte clearInProgress = byteBuffer.get(); - if (clearInProgress == 1) { - reconnectData.addClearInProgress(cacheId); - } - Set hashToInvalidate = new HashSet(); - int numOfHash = byteBuffer.getInt(); - for (int i = 0; i < numOfHash; i++) { - hashToInvalidate.add(byteBuffer.getLong()); - } - reconnectData.addInvalidationsInProgress(cacheId, hashToInvalidate); - } - return reconnectData; - } - -} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessage.java new file mode 100644 index 0000000000..4987b41d9b --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessage.java @@ -0,0 +1,66 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; +import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +public class ReconnectMessage { + + private final UUID clientId; + private final Set caches; + private final ConcurrentMap> hashInvalidationsInProgressPerCache = new ConcurrentHashMap>(); + private final Set cachesWithClearInProgress = Collections.newSetFromMap(new ConcurrentHashMap()); + + public ReconnectMessage(UUID clientId, Set caches) { + if (clientId == null) { + throw new IllegalStateException("ClientID cannot be null"); + } + this.clientId = clientId; + this.caches = new HashSet(caches); + } + + public UUID getClientId() { + return clientId; + } + + public Set getAllCaches() { + return this.caches; + } + + public void addInvalidationsInProgress(String cacheId, Set hashInvalidationsInProgress) { + hashInvalidationsInProgressPerCache.put(cacheId, hashInvalidationsInProgress); + } + + public Set getInvalidationsInProgress(String cacheId) { + Set hashToInvalidate = hashInvalidationsInProgressPerCache.get(cacheId); + return hashToInvalidate == null ? Collections.EMPTY_SET : hashToInvalidate; + } + + public void addClearInProgress(String cacheId) { + cachesWithClearInProgress.add(cacheId); + } + + public boolean isClearInProgress(String cacheId) { + return cachesWithClearInProgress.contains(cacheId); + } + +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodec.java new file mode 100644 index 0000000000..57dd41bf15 --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodec.java @@ -0,0 +1,99 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.internal.ClusteredEhcacheIdentity; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; + +public class ReconnectMessageCodec { + + private static final byte CLIENT_ID_SIZE = 16; + private static final byte ENTRY_SIZE = 4; + private static final byte HASH_SIZE = 8; + private static final byte CLEAR_IN_PROGRESS_STATUS_SIZE = 1; + + public byte[] encode(ReconnectMessage reconnectMessage) { + int totalLength = 0; + Set caches = reconnectMessage.getAllCaches(); + List byteBuffers = new ArrayList(); + for (String cache : caches) { + Set hashToInvalidate = reconnectMessage.getInvalidationsInProgress(cache); + int sizeOfBuffer = 2 * cache.length() + CLEAR_IN_PROGRESS_STATUS_SIZE + hashToInvalidate.size() * HASH_SIZE + 2 * ENTRY_SIZE; + ByteBuffer encodedCache = ByteBuffer.allocate(sizeOfBuffer); + encodedCache.putInt(cache.length()); + CodecUtil.putStringAsCharArray(encodedCache, cache); + if (reconnectMessage.isClearInProgress(cache)) { + encodedCache.put((byte)1); + } else { + encodedCache.put((byte)0); + } + encodedCache.putInt(hashToInvalidate.size()); + for (long hash : hashToInvalidate) { + encodedCache.putLong(hash); + } + encodedCache.flip(); + byteBuffers.add(encodedCache); + totalLength += sizeOfBuffer; + } + ByteBuffer encodedMsg = ByteBuffer.allocate(totalLength + CLIENT_ID_SIZE); + encodedMsg.put(ClusteredEhcacheIdentity.serialize(reconnectMessage.getClientId())); + for (ByteBuffer byteBuffer : byteBuffers) { + encodedMsg.put(byteBuffer); + } + return encodedMsg.array(); + } + + public ReconnectMessage decode(byte[] payload) { + ByteBuffer byteBuffer = ByteBuffer.wrap(payload); + long msb = byteBuffer.getLong(); + long lsb = byteBuffer.getLong(); + + Map> caches = new HashMap>(); + Set clearInProgressCache = new HashSet(); + + while (byteBuffer.hasRemaining()) { + int cacheIdSize = byteBuffer.getInt(); + String cacheId = CodecUtil.getStringFromBuffer(byteBuffer, cacheIdSize); + byte clearInProgress = byteBuffer.get(); + if (clearInProgress == 1) { + clearInProgressCache.add(cacheId); + } + Set hashToInvalidate = new HashSet(); + int numOfHash = byteBuffer.getInt(); + for (int i = 0; i < numOfHash; i++) { + hashToInvalidate.add(byteBuffer.getLong()); + } + caches.put(cacheId, hashToInvalidate); + } + ReconnectMessage reconnectMessage = new ReconnectMessage(new UUID(msb, lsb), caches.keySet()); + for (Map.Entry> cacheEntry : caches.entrySet()) { + if (clearInProgressCache.contains(cacheEntry.getKey())) { + reconnectMessage.addClearInProgress(cacheEntry.getKey()); + } + reconnectMessage.addInvalidationsInProgress(cacheEntry.getKey(), cacheEntry.getValue()); + } + return reconnectMessage; + } +} diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodecTest.java similarity index 54% rename from clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodecTest.java rename to clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodecTest.java index cbbf23733e..521936c8e3 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectDataCodecTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodecTest.java @@ -23,21 +23,22 @@ import java.util.Set; import java.util.UUID; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; import static org.junit.Assert.assertThat; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.containsInAnyOrder; -public class ReconnectDataCodecTest { +public class ReconnectMessageCodecTest { @Test public void testCodec() { - ReconnectData reconnectData = new ReconnectData(); - reconnectData.add("test"); - reconnectData.add("test1"); - reconnectData.add("test2"); - reconnectData.setClientId(UUID.randomUUID()); + Set caches = new HashSet(); + caches.add("test"); + caches.add("test1"); + caches.add("test2"); + + ReconnectMessage reconnectMessage = new ReconnectMessage(UUID.randomUUID(), caches); Set firstSetToInvalidate = new HashSet(); firstSetToInvalidate.add(1L); @@ -49,21 +50,22 @@ public void testCodec() { secondSetToInvalidate.add(22L); secondSetToInvalidate.add(222L); secondSetToInvalidate.add(2222L); - reconnectData.addInvalidationsInProgress("test", firstSetToInvalidate); - reconnectData.addInvalidationsInProgress("test1", Collections.EMPTY_SET); - reconnectData.addInvalidationsInProgress("test2", secondSetToInvalidate); - - ReconnectDataCodec dataCodec = new ReconnectDataCodec(); + reconnectMessage.addInvalidationsInProgress("test", firstSetToInvalidate); + reconnectMessage.addInvalidationsInProgress("test1", Collections.EMPTY_SET); + reconnectMessage.addInvalidationsInProgress("test2", secondSetToInvalidate); + reconnectMessage.addClearInProgress("test"); - ReconnectData decoded = dataCodec.decode(dataCodec.encode(reconnectData)); + ReconnectMessageCodec dataCodec = new ReconnectMessageCodec(); + ReconnectMessage decoded = dataCodec.decode(dataCodec.encode(reconnectMessage)); assertThat(decoded, notNullValue()); - assertThat(decoded.getClientId(), is(reconnectData.getClientId())); + assertThat(decoded.getClientId(), is(reconnectMessage.getClientId())); assertThat(decoded.getAllCaches(), containsInAnyOrder("test", "test1", "test2")); - - assertThat(decoded.removeInvalidationsInProgress("test"), containsInAnyOrder(firstSetToInvalidate.toArray())); - assertThat(decoded.removeInvalidationsInProgress("test1").isEmpty(), is(true)); - assertThat(decoded.removeInvalidationsInProgress("test2"), containsInAnyOrder(secondSetToInvalidate.toArray())); - + assertThat(decoded.getInvalidationsInProgress("test"), containsInAnyOrder(firstSetToInvalidate.toArray())); + assertThat(decoded.getInvalidationsInProgress("test1").isEmpty(), is(true)); + assertThat(decoded.getInvalidationsInProgress("test2"), containsInAnyOrder(secondSetToInvalidate.toArray())); + assertThat(decoded.isClearInProgress("test"), is(true)); + assertThat(decoded.isClearInProgress("test1"), is(false)); + assertThat(decoded.isClearInProgress("test2"), is(false)); } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index e1766f274d..76516b0b4e 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -50,9 +50,9 @@ import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.InvalidationCompleteMessage; -import org.ehcache.clustered.common.internal.messages.ReconnectData; -import org.ehcache.clustered.common.internal.messages.ReconnectDataCodec; import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ChainReplicationMessage; +import org.ehcache.clustered.common.internal.messages.ReconnectMessage; +import org.ehcache.clustered.common.internal.messages.ReconnectMessageCodec; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.KeyBasedServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; @@ -113,7 +113,7 @@ class EhcacheActiveEntity implements ActiveServerEntity clientIdMap = new ConcurrentHashMap<>(); private final Set trackedClients = Collections.newSetFromMap(new ConcurrentHashMap<>()); - private final ReconnectDataCodec reconnectDataCodec = new ReconnectDataCodec(); + private final ReconnectMessageCodec reconnectMessageCodec = new ReconnectMessageCodec(); private final ServerStoreCompatibility storeCompatibility = new ServerStoreCompatibility(); private final EhcacheEntityResponseFactory responseFactory; private final ConcurrentMap clientsWaitingForInvalidation = new ConcurrentHashMap<>(); @@ -294,10 +294,9 @@ public void handleReconnect(ClientDescriptor clientDescriptor, byte[] extendedRe throw new AssertionError("Client "+ clientDescriptor +" trying to reconnect is not connected to entity"); } clientState.attach(); - ReconnectData reconnectData = reconnectDataCodec.decode(extendedReconnectData); - addClientId(clientDescriptor, reconnectData.getClientId()); - Set cacheIds = reconnectData.getAllCaches(); - Set clearInProgressCaches = reconnectData.getClearInProgressCaches(); + ReconnectMessage reconnectMessage = reconnectMessageCodec.decode(extendedReconnectData); + addClientId(clientDescriptor, reconnectMessage.getClientId()); + Set cacheIds = reconnectMessage.getAllCaches(); for (final String cacheId : cacheIds) { ServerStoreImpl serverStore = ehcacheStateService.getStore(cacheId); if (serverStore == null) { @@ -306,18 +305,7 @@ public void handleReconnect(ClientDescriptor clientDescriptor, byte[] extendedRe LOGGER.warn("ServerStore '{}' does not exist as expected by Client '{}'.", cacheId, clientDescriptor); continue; } - if (serverStore.getStoreConfiguration().getConsistency().equals(Consistency.STRONG)) { - Set invalidationsInProgress = reconnectData.removeInvalidationsInProgress(cacheId); - LOGGER.debug("Number of Inflight Invalidations from client ID {} for cache {} is {}.", reconnectData.getClientId(), cacheId, invalidationsInProgress - .size()); - inflightInvalidations.compute(cacheId, (s, tuples) -> { - if (tuples == null) { - tuples = new ArrayList<>(); - } - tuples.add(new InvalidationTuple(clientDescriptor, invalidationsInProgress, clearInProgressCaches.contains(cacheId))); - return tuples; - }); - } + addInflightInvalidationsForStrongCache(clientDescriptor, reconnectMessage, cacheId, serverStore); serverStore.setEvictionListener(key -> invalidateHashAfterEviction(cacheId, key)); attachStore(clientDescriptor, cacheId); @@ -326,6 +314,21 @@ public void handleReconnect(ClientDescriptor clientDescriptor, byte[] extendedRe } + private void addInflightInvalidationsForStrongCache(ClientDescriptor clientDescriptor, ReconnectMessage reconnectMessage, String cacheId, ServerStoreImpl serverStore) { + if (serverStore.getStoreConfiguration().getConsistency().equals(Consistency.STRONG)) { + Set invalidationsInProgress = reconnectMessage.getInvalidationsInProgress(cacheId); + LOGGER.debug("Number of Inflight Invalidations from client ID {} for cache {} is {}.", reconnectMessage.getClientId(), cacheId, invalidationsInProgress + .size()); + inflightInvalidations.compute(cacheId, (s, tuples) -> { + if (tuples == null) { + tuples = new ArrayList<>(); + } + tuples.add(new InvalidationTuple(clientDescriptor, invalidationsInProgress, reconnectMessage.isClearInProgress(cacheId))); + return tuples; + }); + } + } + @Override public void synchronizeKeyToPassive(PassiveSynchronizationChannel syncChannel, int concurrencyKey) { LOGGER.info("Sync started for concurrency key {}.", concurrencyKey); @@ -362,17 +365,24 @@ public void createNew() { public void loadExisting() { LOGGER.debug("Preparing for handling Inflight Invalidations and independent Passive Evictions in loadExisting"); inflightInvalidations = new ConcurrentHashMap<>(); + addInflightInvalidationsForEventualCaches(); + ehcacheStateService.clearInvalidationTrackers(); + } + + private void addInflightInvalidationsForEventualCaches() { Set caches = ehcacheStateService.getStores(); caches.forEach(cacheId -> { InvalidationTracker invalidationTracker = ehcacheStateService.getInvalidationTracker(cacheId); - inflightInvalidations.computeIfPresent(cacheId, (s, invalidationTuples) -> { - if (invalidationTuples == null) { - invalidationTuples = new ArrayList<>(); - } - invalidationTuples.add(new InvalidationTuple(null, invalidationTracker.getInvalidationMap() - .keySet(), invalidationTracker.isClearInProgress())); - return invalidationTuples; - }); + if (invalidationTracker != null) { + inflightInvalidations.compute(cacheId, (s, invalidationTuples) -> { + if (invalidationTuples == null) { + invalidationTuples = new ArrayList<>(); + } + invalidationTuples.add(new InvalidationTuple(null, invalidationTracker.getInvalidationMap() + .keySet(), invalidationTracker.isClearInProgress())); + return invalidationTuples; + }); + } }); } @@ -501,7 +511,7 @@ private void sendMessageToSelfAndDeferRetirement(KeyBasedServerStoreOpMessage me try { entityMessenger.messageSelfAndDeferRetirement(message, new ChainReplicationMessage(message.getCacheId(), message.getKey(), result, message.getId(), message.getClientId())); } catch (MessageCodecException e) { - LOGGER.error("Codec Exception", e); + throw new AssertionError("Codec error", e); } } @@ -519,8 +529,7 @@ private void invalidateHashAfterEviction(String cacheId, long key) { try { clientCommunicator.sendNoResponse(clientDescriptorThatHasToInvalidate, serverInvalidateHash(cacheId, key)); } catch (MessageCodecException mce) { - //TODO: what should be done here? - LOGGER.error("Codec error", mce); + throw new AssertionError("Codec error", mce); } } } @@ -533,8 +542,7 @@ private void invalidateHashForClient(ClientDescriptor originatingClientDescripto clientsToInvalidate.remove(originatingClientDescriptor); } - InvalidationHolder invalidationHolder = null; - invalidationHolder = new InvalidationHolder(originatingClientDescriptor, clientsToInvalidate, cacheId, key); + InvalidationHolder invalidationHolder = new InvalidationHolder(originatingClientDescriptor, clientsToInvalidate, cacheId, key); clientsWaitingForInvalidation.put(invalidationId, invalidationHolder); LOGGER.debug("SERVER: requesting {} client(s) invalidation of hash {} in cache {} (ID {})", clientsToInvalidate.size(), key, cacheId, invalidationId); @@ -543,12 +551,11 @@ private void invalidateHashForClient(ClientDescriptor originatingClientDescripto try { clientCommunicator.sendNoResponse(clientDescriptorThatHasToInvalidate, clientInvalidateHash(cacheId, key, invalidationId)); } catch (MessageCodecException mce) { - //TODO: what should be done here? - LOGGER.error("Codec error", mce); + throw new AssertionError("Codec error", mce); } } - if (invalidationHolder != null && clientsToInvalidate.isEmpty()) { + if (clientsToInvalidate.isEmpty()) { clientInvalidated(invalidationHolder.clientDescriptorWaitingForInvalidation, invalidationId); } } @@ -561,8 +568,7 @@ private void invalidateAll(ClientDescriptor originatingClientDescriptor, String clientsToInvalidate.remove(originatingClientDescriptor); } - InvalidationHolder invalidationHolder = null; - invalidationHolder = new InvalidationHolder(originatingClientDescriptor, clientsToInvalidate, cacheId); + InvalidationHolder invalidationHolder = new InvalidationHolder(originatingClientDescriptor, clientsToInvalidate, cacheId); clientsWaitingForInvalidation.put(invalidationId, invalidationHolder); LOGGER.debug("SERVER: requesting {} client(s) invalidation of all in cache {} (ID {})", clientsToInvalidate.size(), cacheId, invalidationId); @@ -571,12 +577,11 @@ private void invalidateAll(ClientDescriptor originatingClientDescriptor, String try { clientCommunicator.sendNoResponse(clientDescriptorThatHasToInvalidate, clientInvalidateAll(cacheId, invalidationId)); } catch (MessageCodecException mce) { - //TODO: what should be done here? - LOGGER.error("Codec error", mce); + throw new AssertionError("Codec error", mce); } } - if (invalidationHolder != null && clientsToInvalidate.isEmpty()) { + if (clientsToInvalidate.isEmpty()) { clientInvalidated(invalidationHolder.clientDescriptorWaitingForInvalidation, invalidationId); } } @@ -585,7 +590,7 @@ private void clientInvalidated(ClientDescriptor clientDescriptor, int invalidati InvalidationHolder invalidationHolder = clientsWaitingForInvalidation.get(invalidationId); if (invalidationHolder == null) { // Happens when client is re-sending/sending invalidations for which server has lost track since fail-over happened. - LOGGER.warn("Ignoring invalidation from client {} " + clientDescriptor); + LOGGER.debug("Ignoring invalidation from client {} " + clientDescriptor); return; } @@ -611,8 +616,7 @@ private void clientInvalidated(ClientDescriptor clientDescriptor, int invalidati } } } catch (MessageCodecException mce) { - //TODO: what should be done here? - LOGGER.error("Codec error", mce); + throw new AssertionError("Codec error", mce); } } } @@ -681,7 +685,7 @@ private void validate(ClientDescriptor clientDescriptor, ValidateStoreManager me try { entityMessenger.messageSelfAndDeferRetirement(message, new ClientIDTrackerMessage(message.getId(), message.getClientId())); } catch (MessageCodecException e) { - LOGGER.error("Codec Exception", e); + throw new AssertionError("Codec error", e); } addClientId(clientDescriptor, message.getClientId()); ehcacheStateService.validate(message.getConfiguration()); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index d02ffc2ec9..3f7b09a49b 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -123,29 +123,14 @@ private void invokeRetirementMessages(PassiveReplicationMessage message) throws } cacheStore.put(retirementMessage.getKey(), retirementMessage.getChain()); ehcacheStateService.getClientMessageTracker().applied(message.getId(), message.getClientId()); - InvalidationTracker invalidationTracker = ehcacheStateService.getInvalidationTracker(retirementMessage.getCacheId()); - if (invalidationTracker != null) { - invalidationTracker.getInvalidationMap().computeIfPresent(retirementMessage.getKey(), (key, count) -> { - if (count == null) { - return count = 1; - } else { - return count++; - } - }); - } + trackHashInvalidationForEventualCache(retirementMessage); break; case CLIENTID_TRACK_OP: LOGGER.debug("PassiveReplicationMessage message for msgId {} & client Id {}", message.getId(), message.getClientId()); ehcacheStateService.getClientMessageTracker().add(message.getClientId()); break; case INVALIDATION_COMPLETE: - InvalidationCompleteMessage invalidationCompleteMessage = (InvalidationCompleteMessage)message; - ehcacheStateService.getInvalidationTracker(invalidationCompleteMessage.getCacheId()).getInvalidationMap().computeIfPresent(invalidationCompleteMessage.getKey(), (key, count) -> { - if (count == 1) { - return null; - } - return count--; - }); + untrackHashInvalidationForEventualCache((InvalidationCompleteMessage)message); break; case CLEAR_INVALIDATION_COMPLETE: ehcacheStateService.getInvalidationTracker(((ClearInvalidationCompleteMessage)message).getCacheId()).setClearInProgress(false); @@ -155,6 +140,28 @@ private void invokeRetirementMessages(PassiveReplicationMessage message) throws } } + private void untrackHashInvalidationForEventualCache(InvalidationCompleteMessage message) {InvalidationCompleteMessage invalidationCompleteMessage = message; + ehcacheStateService.getInvalidationTracker(invalidationCompleteMessage.getCacheId()).getInvalidationMap().computeIfPresent(invalidationCompleteMessage.getKey(), (key, count) -> { + if (count == 1) { + return null; + } + return count--; + }); + } + + private void trackHashInvalidationForEventualCache(ChainReplicationMessage retirementMessage) { + InvalidationTracker invalidationTracker = ehcacheStateService.getInvalidationTracker(retirementMessage.getCacheId()); + if (invalidationTracker != null) { + invalidationTracker.getInvalidationMap().compute(retirementMessage.getKey(), (key, count) -> { + if (count == null) { + return 1; + } else { + return count++; + } + }); + } + } + private void invokeServerStoreOperation(ServerStoreOpMessage message) throws ClusterException { ServerStoreImpl cacheStore = ehcacheStateService.getStore(message.getCacheId()); if (cacheStore == null) { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java index d655973853..e1bce3798c 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java @@ -371,6 +371,12 @@ public InvalidationTracker getInvalidationTracker(String cacheId) { return this.invalidationMap.get(cacheId); } + @Override + public void clearInvalidationTrackers() { + invalidationMap.forEach((cache, invalidationTracker) -> invalidationTracker.getInvalidationMap().clear()); + invalidationMap.clear(); + } + public boolean isConfigured() { return (sharedResourcePools != null); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java index 3497914a67..dc9ca7feff 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java @@ -56,4 +56,6 @@ public interface EhcacheStateService { InvalidationTracker getInvalidationTracker(String cacheId); + void clearInvalidationTrackers(); + } From eb78803fada5fc839d03571c3b253d4768c396e1 Mon Sep 17 00:00:00 2001 From: Henri Tremblay Date: Wed, 19 Oct 2016 14:39:26 -0400 Subject: [PATCH 087/218] Add do nothing implementations --- .../store/basic/EmptyValueHolder.java | 75 +++++++ .../impl/internal/store/basic/NopStore.java | 195 ++++++++++++++++++ 2 files changed, 270 insertions(+) create mode 100644 impl/src/main/java/org/ehcache/impl/internal/store/basic/EmptyValueHolder.java create mode 100644 impl/src/main/java/org/ehcache/impl/internal/store/basic/NopStore.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/basic/EmptyValueHolder.java b/impl/src/main/java/org/ehcache/impl/internal/store/basic/EmptyValueHolder.java new file mode 100644 index 0000000000..827744192b --- /dev/null +++ b/impl/src/main/java/org/ehcache/impl/internal/store/basic/EmptyValueHolder.java @@ -0,0 +1,75 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.store.basic; + +import org.ehcache.core.spi.store.Store; + +import java.util.concurrent.TimeUnit; + +/** + * A value holder that always contains null + * + * @author Henri Tremblay + */ +public class EmptyValueHolder implements Store.ValueHolder { + + private static final Store.ValueHolder EMPTY = new EmptyValueHolder(); + + @SuppressWarnings("unchecked") + public static Store.ValueHolder empty() { + return (Store.ValueHolder) EMPTY; + } + + @Override + public V value() { + return null; + } + + @Override + public long creationTime(TimeUnit unit) { + return 0; + } + + @Override + public long expirationTime(TimeUnit unit) { + return 0; + } + + @Override + public boolean isExpired(long expirationTime, TimeUnit unit) { + return false; + } + + @Override + public long lastAccessTime(TimeUnit unit) { + return 0; + } + + @Override + public float hitRate(long now, TimeUnit unit) { + return 0; + } + + @Override + public long hits() { + return 0; + } + + @Override + public long getId() { + return 0; + } +} diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/basic/NopStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/basic/NopStore.java new file mode 100644 index 0000000000..12532fbdfe --- /dev/null +++ b/impl/src/main/java/org/ehcache/impl/internal/store/basic/NopStore.java @@ -0,0 +1,195 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.store.basic; + +import org.ehcache.Cache; +import org.ehcache.core.CacheConfigurationChangeListener; +import org.ehcache.core.spi.function.BiFunction; +import org.ehcache.core.spi.function.Function; +import org.ehcache.core.spi.function.NullaryFunction; +import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.core.spi.store.events.StoreEventFilter; +import org.ehcache.core.spi.store.events.StoreEventListener; +import org.ehcache.core.spi.store.events.StoreEventSource; +import org.ehcache.core.spi.store.tiering.AuthoritativeTier; + +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * A store that doesn't store anything. + * + * @author Henri Tremblay + */ +public class NopStore implements AuthoritativeTier { + + + @Override + public List getConfigurationChangeListeners() { + return Collections.emptyList(); + } + + @Override + public ValueHolder getAndFault(K key) throws StoreAccessException { + return null; + } + + @Override + public ValueHolder computeIfAbsentAndFault(K key, Function mappingFunction) throws StoreAccessException { + return null; + } + + @Override + public boolean flush(K key, ValueHolder valueHolder) { + return false; + } + + @Override + public void setInvalidationValve(InvalidationValve valve) { + + } + + @Override + public ValueHolder get(K key) throws StoreAccessException { + return null; + } + + @Override + public boolean containsKey(K key) throws StoreAccessException { + return false; + } + + @Override + public PutStatus put(K key, V value) throws StoreAccessException { + return PutStatus.PUT; + } + + @Override + public ValueHolder putIfAbsent(K key, V value) throws StoreAccessException { + return EmptyValueHolder.empty(); + } + + @Override + public boolean remove(K key) throws StoreAccessException { + return false; + } + + @Override + public RemoveStatus remove(K key, V value) throws StoreAccessException { + return RemoveStatus.KEY_MISSING; + } + + @Override + public ValueHolder replace(K key, V value) throws StoreAccessException { + return null; + } + + @Override + public ReplaceStatus replace(K key, V oldValue, V newValue) throws StoreAccessException { + return ReplaceStatus.MISS_NOT_PRESENT; + } + + @Override + public void clear() throws StoreAccessException { + + } + + @Override + public StoreEventSource getStoreEventSource() { + return new StoreEventSource() { + @Override + public void addEventListener(StoreEventListener eventListener) { + + } + + @Override + public void removeEventListener(StoreEventListener eventListener) { + + } + + @Override + public void addEventFilter(StoreEventFilter eventFilter) { + + } + + @Override + public void setEventOrdering(boolean ordering) { + + } + + @Override + public boolean isEventOrdering() { + return false; + } + }; + } + + @Override + public Iterator>> iterator() { + return new Iterator>>() { + @Override + public boolean hasNext() { + return false; + } + + @Override + public Cache.Entry> next() throws StoreAccessException { + return null; + } + }; + } + + @Override + public ValueHolder compute(K key, BiFunction mappingFunction) throws StoreAccessException { + return EmptyValueHolder.empty(); + } + + @Override + public ValueHolder compute(K key, BiFunction mappingFunction, NullaryFunction replaceEqual) throws StoreAccessException { + return null; + } + + @Override + public ValueHolder computeIfAbsent(K key, Function mappingFunction) throws StoreAccessException { + return null; + } + + @Override + public Map> bulkCompute(Set keys, Function>, Iterable>> remappingFunction) throws StoreAccessException { + return bulkCompute(keys, remappingFunction, null); + } + + @Override + public Map> bulkCompute(Set keys, Function>, Iterable>> remappingFunction, NullaryFunction replaceEqual) throws StoreAccessException { + Map> map = new HashMap>(keys.size()); + for(K key : keys) { + map.put(key, EmptyValueHolder.empty()); + } + return map; + } + + @Override + public Map> bulkComputeIfAbsent(Set keys, Function, Iterable>> mappingFunction) throws StoreAccessException { + Map> map = new HashMap>(keys.size()); + for(K key : keys) { + map.put(key, EmptyValueHolder.empty()); + } + return map; + } +} From 08b30cc19e29e2de18d11943365e6c45d90b9e4f Mon Sep 17 00:00:00 2001 From: Henri Tremblay Date: Wed, 19 Oct 2016 14:41:30 -0400 Subject: [PATCH 088/218] No need to have many instances of the StringCopier --- .../java/org/ehcache/docs/UserManagedCaches.java | 14 ++++---------- .../java/org/ehcache/docs/plugs/StringCopier.java | 6 ++++++ 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java b/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java index 03ba3fce80..838c14ed69 100644 --- a/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java +++ b/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java @@ -21,20 +21,14 @@ import org.ehcache.config.builders.CacheEventListenerConfigurationBuilder; import org.ehcache.config.builders.ResourcePoolsBuilder; import org.ehcache.config.builders.UserManagedCacheBuilder; -import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; -import org.ehcache.event.EventType; -import org.ehcache.impl.config.persistence.UserManagedPersistenceContext; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.docs.plugs.ListenerObject; -import org.ehcache.docs.plugs.LongCopier; -import org.ehcache.impl.serialization.LongSerializer; -import org.ehcache.docs.plugs.OddKeysEvictionAdvisor; -import org.ehcache.docs.plugs.SampleLoaderWriter; -import org.ehcache.docs.plugs.StringCopier; -import org.ehcache.impl.serialization.StringSerializer; +import org.ehcache.event.EventType; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.impl.config.persistence.UserManagedPersistenceContext; import org.ehcache.impl.persistence.DefaultLocalPersistenceService; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.junit.Test; import java.io.File; diff --git a/impl/src/test/java/org/ehcache/docs/plugs/StringCopier.java b/impl/src/test/java/org/ehcache/docs/plugs/StringCopier.java index afbe1f2517..15ca26ef74 100644 --- a/impl/src/test/java/org/ehcache/docs/plugs/StringCopier.java +++ b/impl/src/test/java/org/ehcache/docs/plugs/StringCopier.java @@ -25,6 +25,12 @@ public class StringCopier implements Copier { private static final Logger LOG = LoggerFactory.getLogger(StringCopier.class); + private static final Copier STRING_COPIER = new StringCopier(); + + public static Copier copier() { + return STRING_COPIER; + } + @Override public String copyForRead(String obj) { LOG.info("Copying for read {}", obj); From cf68253ca64a529fb0c18dcca0481c881f6d3ee2 Mon Sep 17 00:00:00 2001 From: Henri Tremblay Date: Tue, 25 Oct 2016 13:32:37 -0400 Subject: [PATCH 089/218] INFO seems a bit strong for method calls --- impl/src/test/java/org/ehcache/docs/plugs/StringCopier.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/impl/src/test/java/org/ehcache/docs/plugs/StringCopier.java b/impl/src/test/java/org/ehcache/docs/plugs/StringCopier.java index 15ca26ef74..46655b214c 100644 --- a/impl/src/test/java/org/ehcache/docs/plugs/StringCopier.java +++ b/impl/src/test/java/org/ehcache/docs/plugs/StringCopier.java @@ -33,13 +33,13 @@ public static Copier copier() { @Override public String copyForRead(String obj) { - LOG.info("Copying for read {}", obj); + LOG.debug("Copying for read {}", obj); return obj; } @Override public String copyForWrite(String obj) { - LOG.info("Copying for write {}", obj); + LOG.debug("Copying for write {}", obj); return obj; } } From ac154e0e45fd9f6d3179d09471cb3f11cec99726 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Wed, 26 Oct 2016 20:29:49 +0530 Subject: [PATCH 090/218] Explicit invalidation tracking at passive #1211 --- .../client/internal/EhcacheClientEntity.java | 1 - .../clustered/server/EhcacheActiveEntity.java | 4 +- .../server/EhcacheExecutionStrategy.java | 4 +- .../server/EhcachePassiveEntity.java | 5 +++ .../server/EhcacheStateServiceImpl.java | 14 +++--- .../server/state/EhcacheStateService.java | 4 +- .../server/EhcacheActiveEntityTest.java | 43 +++++++++++++++++++ 7 files changed, 62 insertions(+), 13 deletions(-) diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java index e6cd70ac2c..5126f4a020 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java @@ -117,7 +117,6 @@ public byte[] createExtendedReconnectData() { for (ReconnectListener reconnectListener : reconnectListeners) { reconnectListener.onHandleReconnect(reconnectMessage); } -// return reconnectDataCodec.encode(reconnectData); return reconnectMessageCodec.encode(reconnectMessage); } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 76516b0b4e..a8cee10678 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -366,13 +366,12 @@ public void loadExisting() { LOGGER.debug("Preparing for handling Inflight Invalidations and independent Passive Evictions in loadExisting"); inflightInvalidations = new ConcurrentHashMap<>(); addInflightInvalidationsForEventualCaches(); - ehcacheStateService.clearInvalidationTrackers(); } private void addInflightInvalidationsForEventualCaches() { Set caches = ehcacheStateService.getStores(); caches.forEach(cacheId -> { - InvalidationTracker invalidationTracker = ehcacheStateService.getInvalidationTracker(cacheId); + InvalidationTracker invalidationTracker = ehcacheStateService.removeInvalidationtracker(cacheId); if (invalidationTracker != null) { inflightInvalidations.compute(cacheId, (s, invalidationTuples) -> { if (invalidationTuples == null) { @@ -382,6 +381,7 @@ private void addInflightInvalidationsForEventualCaches() { .keySet(), invalidationTracker.isClearInProgress())); return invalidationTuples; }); + invalidationTracker.getInvalidationMap().clear(); } }); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java index c6dfa6b7fc..3ccd486ee1 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java @@ -16,9 +16,9 @@ package org.ehcache.clustered.server; -import org.ehcache.clustered.common.internal.messages.ClientIDTrackerMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.LifecycleMessage; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; import org.ehcache.clustered.server.internal.messages.EntitySyncMessage; @@ -54,7 +54,7 @@ public Location getExecutionLocation(EhcacheEntityMessage message) { } else if (message instanceof StateRepositoryOpMessage) { // StateRepositoryOp not needing replication return Location.ACTIVE; - } else if (message instanceof ClientIDTrackerMessage) { + } else if (message instanceof PassiveReplicationMessage) { return Location.PASSIVE; } else if (message instanceof EntitySyncMessage) { throw new AssertionError("Unexpected use of ExecutionStrategy for sync messages"); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index 3f7b09a49b..2a72e2f4af 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -16,6 +16,7 @@ package org.ehcache.clustered.server; +import org.ehcache.clustered.common.Consistency; import org.ehcache.clustered.common.PoolAllocation; import org.ehcache.clustered.common.internal.ClusteredEhcacheIdentity; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; @@ -263,6 +264,9 @@ private void createServerStore(CreateServerStore createServerStore) throws Clust ServerStoreConfiguration storeConfiguration = createServerStore.getStoreConfiguration(); ehcacheStateService.createStore(name, storeConfiguration); + if(storeConfiguration.getConsistency() == Consistency.EVENTUAL) { + ehcacheStateService.addInvalidationtracker(name); + } } private void destroyServerStore(DestroyServerStore destroyServerStore) throws ClusterException { @@ -276,6 +280,7 @@ private void destroyServerStore(DestroyServerStore destroyServerStore) throws Cl LOGGER.info("Destroying clustered tier '{}'", name); ehcacheStateService.destroyServerStore(name); + ehcacheStateService.removeInvalidationtracker(name); } @Override diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java index e1bce3798c..4bc5de23d4 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java @@ -308,9 +308,6 @@ public ServerStoreImpl createStore(String name, ServerStoreConfiguration serverS PageSource resourcePageSource = getPageSource(name, serverStoreConfiguration.getPoolAllocation()); ServerStoreImpl serverStore = new ServerStoreImpl(serverStoreConfiguration, resourcePageSource, mapper); stores.put(name, serverStore); - if(serverStoreConfiguration.getConsistency() == Consistency.EVENTUAL) { - invalidationMap.put(name, new InvalidationTracker()); - } return serverStore; } @@ -323,7 +320,6 @@ public void destroyServerStore(String name) throws ClusterException { store.close(); } stateRepositoryManager.destroyStateRepository(name); - invalidationMap.remove(name); } private PageSource getPageSource(String name, PoolAllocation allocation) throws ClusterException { @@ -372,9 +368,13 @@ public InvalidationTracker getInvalidationTracker(String cacheId) { } @Override - public void clearInvalidationTrackers() { - invalidationMap.forEach((cache, invalidationTracker) -> invalidationTracker.getInvalidationMap().clear()); - invalidationMap.clear(); + public void addInvalidationtracker(String cacheId) { + this.invalidationMap.put(cacheId, new InvalidationTracker()); + } + + @Override + public InvalidationTracker removeInvalidationtracker(String cacheId) { + return this.invalidationMap.remove(cacheId); } public boolean isConfigured() { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java index dc9ca7feff..0aaf6558f8 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java @@ -56,6 +56,8 @@ public interface EhcacheStateService { InvalidationTracker getInvalidationTracker(String cacheId); - void clearInvalidationTrackers(); + void addInvalidationtracker(String cacheId); + + InvalidationTracker removeInvalidationtracker(String cacheId); } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index b2e62d2051..97108745d9 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -37,6 +37,7 @@ import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; import org.ehcache.clustered.server.internal.messages.EntityStateSyncMessage; import org.ehcache.clustered.server.state.EhcacheStateService; +import org.ehcache.clustered.server.state.InvalidationTracker; import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Test; @@ -56,6 +57,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Map; +import java.util.Random; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; @@ -2635,6 +2637,47 @@ public void testSyncToPassive() throws Exception { } + @Test + public void testLoadExistingRecoversInflightInvalidationsForEventualCache() { + final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); + registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); + + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); + + ClientDescriptor client = new TestClientDescriptor(); + activeEntity.connected(client); + + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() + .defaultResource("serverResource1") + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .build(); + + activeEntity.invoke(client, + MESSAGE_FACTORY.configureStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client, + MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client, + MESSAGE_FACTORY.createServerStore("test", + new ServerStoreConfigBuilder() + .shared("primary") + .build())); + + EhcacheStateServiceImpl ehcacheStateService = registry.getStoreManagerService(); + ehcacheStateService.addInvalidationtracker("test"); + + InvalidationTracker invalidationTracker = ehcacheStateService.getInvalidationTracker("test"); + + Random random = new Random(); + random.ints(0, 100).limit(10).forEach(x -> invalidationTracker.getInvalidationMap().put((long)x, x)); + + activeEntity.loadExisting(); + + assertThat(ehcacheStateService.getInvalidationTracker("test"), nullValue()); + + } + private void assertSuccess(EhcacheEntityResponse response) throws Exception { if (!response.equals(EhcacheEntityResponse.Success.INSTANCE)) { throw ((Failure) response).getCause(); From 00e93080b3a33a3e74078eb8a3f7250db6a85e45 Mon Sep 17 00:00:00 2001 From: Henri Tremblay Date: Wed, 19 Oct 2016 15:13:22 -0400 Subject: [PATCH 091/218] Remove all conditional invalidations to prevent incoherent results (close #1522) --- .../impl/internal/store/heap/OnHeapStore.java | 122 +++-- .../internal/store/tiering/TieredStore.java | 100 ++-- .../store/tiering/TieredStoreMutatorTest.java | 459 ++++++++++++++++++ .../store/tiering/TieredStoreTest.java | 17 +- 4 files changed, 571 insertions(+), 127 deletions(-) create mode 100644 impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreMutatorTest.java diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java index c90fce7434..62429ab506 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java @@ -220,6 +220,12 @@ public OnHeapStore(final Configuration config, final TimeSource timeSource if (heapPool == null) { throw new IllegalArgumentException("OnHeap store must be configured with a resource of type 'heap'"); } + if (timeSource == null) { + throw new NullPointerException("timeSource must not be null"); + } + if (sizeOfEngine == null) { + throw new NullPointerException("sizeOfEngine must not be null"); + } this.sizeOfEngine = sizeOfEngine; boolean byteSized = this.sizeOfEngine instanceof NoopSizeOfEngine ? false : true; this.capacity = byteSized ? ((MemoryUnit) heapPool.getUnit()).toBytes(heapPool.getSize()) : heapPool.getSize(); @@ -689,7 +695,9 @@ public ValueHolder getOrComputeIfAbsent(final K key, final Function backEnd = map; + // First try to find the value from heap OnHeapValueHolder cachedValue = backEnd.get(key); + final long now = timeSource.getTimeMillis(); if (cachedValue == null) { final Fault fault = new Fault(new NullaryFunction>() { @@ -699,60 +707,14 @@ public ValueHolder apply() { } }); cachedValue = backEnd.putIfAbsent(key, fault); - if (cachedValue == null) { - try { - final ValueHolder value = fault.get(); - final OnHeapValueHolder newValue; - if(value != null) { - newValue = importValueFromLowerTier(key, value, now, backEnd, fault); - if (newValue == null) { - // Inline expiry or sizing failure - backEnd.remove(key, fault); - getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED); - return value; - } - } else { - backEnd.remove(key, fault); - getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS); - return null; - } - if (backEnd.replace(key, fault, newValue)) { - getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULTED); - updateUsageInBytesIfRequired(newValue.size()); - enforceCapacity(); - return getValue(newValue); - } else { - final AtomicReference> invalidatedValue = new AtomicReference>(); - backEnd.computeIfPresent(key, new BiFunction, OnHeapValueHolder>() { - @Override - public OnHeapValueHolder apply(K mappedKey, OnHeapValueHolder mappedValue) { - notifyInvalidation(key, mappedValue); - invalidatedValue.set(mappedValue); - updateUsageInBytesIfRequired(mappedValue.size()); - return null; - } - }); - ValueHolder p = getValue(invalidatedValue.get()); - if (p != null) { - if (p.isExpired(now, TimeUnit.MILLISECONDS)) { - getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED_MISS); - return null; - } else { - getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED); - return p; - } - } - getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED); - return newValue; - } - } catch (Throwable e) { - backEnd.remove(key, fault); - throw new StoreAccessException(e); - } + if (cachedValue == null) { + return resolveFault(key, backEnd, now, fault); } } + // If we have a real value (not a fault), we make sure it is not expired + // If yes, we return null and remove it. If no, we return it (below) if (!(cachedValue instanceof Fault)) { if (cachedValue.isExpired(now, TimeUnit.MILLISECONDS)) { expireMappingUnderLock(key, cachedValue); @@ -763,6 +725,8 @@ public OnHeapValueHolder apply(K mappedKey, OnHeapValueHolder mappedValue) } getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.HIT); + + // Return the value that we found in the cache (by getting the fault or just returning the plain value depending on what we found) return getValue(cachedValue); } catch (RuntimeException re) { handleRuntimeException(re); @@ -770,6 +734,62 @@ public OnHeapValueHolder apply(K mappedKey, OnHeapValueHolder mappedValue) } } + private ValueHolder resolveFault(final K key, Backend backEnd, long now, Fault fault) throws StoreAccessException { + try { + final ValueHolder value = fault.get(); + final OnHeapValueHolder newValue; + if(value != null) { + newValue = importValueFromLowerTier(key, value, now, backEnd, fault); + if (newValue == null) { + // Inline expiry or sizing failure + backEnd.remove(key, fault); + getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED); + return value; + } + } else { + backEnd.remove(key, fault); + getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS); + return null; + } + + if (backEnd.replace(key, fault, newValue)) { + getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULTED); + updateUsageInBytesIfRequired(newValue.size()); + enforceCapacity(); + return newValue; + } + + final AtomicReference> invalidatedValue = new AtomicReference>(); + backEnd.computeIfPresent(key, new BiFunction, OnHeapValueHolder>() { + @Override + public OnHeapValueHolder apply(K mappedKey, OnHeapValueHolder mappedValue) { + notifyInvalidation(key, mappedValue); + invalidatedValue.set(mappedValue); + updateUsageInBytesIfRequired(mappedValue.size()); + return null; + } + }); + + ValueHolder p = getValue(invalidatedValue.get()); + if (p != null) { + if (p.isExpired(now, TimeUnit.MILLISECONDS)) { + getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED_MISS); + return null; + } + + getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED); + return p; + } + + getOrComputeIfAbsentObserver.end(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED); + return newValue; + + } catch (Throwable e) { + backEnd.remove(key, fault); + throw new StoreAccessException(e); + } + } + private void invalidateInGetOrComputeIfAbsent(Backend map, final K key, final ValueHolder value, final Fault fault, final long now, final Duration expiration) { map.computeIfPresent(key, new BiFunction, OnHeapValueHolder>() { @Override @@ -1065,7 +1085,7 @@ public long size() { @Override public String toString() { - return "[Fault : " + (complete ? (throwable == null ? value.toString() : throwable.getMessage()) : "???") + "]"; + return "[Fault : " + (complete ? (throwable == null ? String.valueOf(value) : throwable.getMessage()) : "???") + "]"; } @Override diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java index c98e5f45d7..946f5d78ce 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/tiering/TieredStore.java @@ -16,26 +16,25 @@ package org.ehcache.impl.internal.store.tiering; import org.ehcache.Cache; -import org.ehcache.config.ResourcePool; import org.ehcache.config.ResourcePools; import org.ehcache.config.ResourceType; import org.ehcache.core.CacheConfigurationChangeListener; -import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.core.internal.util.ConcurrentWeakIdentityHashMap; import org.ehcache.core.spi.function.BiFunction; import org.ehcache.core.spi.function.Function; import org.ehcache.core.spi.function.NullaryFunction; -import org.ehcache.impl.internal.store.disk.OffHeapDiskStore; -import org.ehcache.impl.internal.store.heap.OnHeapStore; -import org.ehcache.impl.internal.store.offheap.OffHeapStore; -import org.ehcache.spi.service.ServiceProvider; import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.store.StoreAccessException; import org.ehcache.core.spi.store.events.StoreEventSource; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; import org.ehcache.core.spi.store.tiering.CachingTier; +import org.ehcache.impl.internal.store.disk.OffHeapDiskStore; +import org.ehcache.impl.internal.store.heap.OnHeapStore; +import org.ehcache.impl.internal.store.offheap.OffHeapStore; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; -import org.ehcache.core.internal.util.ConcurrentWeakIdentityHashMap; import org.ehcache.spi.service.ServiceDependencies; +import org.ehcache.spi.service.ServiceProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terracotta.context.annotations.ContextAttribute; @@ -52,10 +51,6 @@ import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicReference; -import static org.ehcache.config.ResourceType.Core.DISK; -import static org.ehcache.config.ResourceType.Core.HEAP; -import static org.ehcache.config.ResourceType.Core.OFFHEAP; - /** * A {@link Store} implementation supporting a tiered caching model. */ @@ -154,15 +149,11 @@ public PutStatus put(final K key, final V value) throws StoreAccessException { @Override public ValueHolder putIfAbsent(K key, V value) throws StoreAccessException { - ValueHolder previous = null; try { - previous = authoritativeTier.putIfAbsent(key, value); + return authoritativeTier.putIfAbsent(key, value); } finally { - if (previous == null) { - cachingTier().invalidate(key); - } + cachingTier().invalidate(key); } - return previous; } @Override @@ -176,79 +167,55 @@ public boolean remove(K key) throws StoreAccessException { @Override public RemoveStatus remove(K key, V value) throws StoreAccessException { - RemoveStatus removed = null; try { - removed = authoritativeTier.remove(key, value); - return removed; + return authoritativeTier.remove(key, value); } finally { - if (removed != null && removed.equals(RemoveStatus.REMOVED)) { - cachingTier().invalidate(key); - } + cachingTier().invalidate(key); } } @Override public ValueHolder replace(K key, V value) throws StoreAccessException { - ValueHolder previous = null; - boolean exceptionThrown = true; try { - previous = authoritativeTier.replace(key, value); - exceptionThrown = false; + return authoritativeTier.replace(key, value); } finally { - if (exceptionThrown || previous != null) { - cachingTier().invalidate(key); - } + cachingTier().invalidate(key); } - return previous; } @Override public ReplaceStatus replace(K key, V oldValue, V newValue) throws StoreAccessException { - ReplaceStatus replaced = null; try { - replaced = authoritativeTier.replace(key, oldValue, newValue); + return authoritativeTier.replace(key, oldValue, newValue); } finally { - if (replaced != null && replaced.equals(ReplaceStatus.HIT)) { - cachingTier().invalidate(key); - } + cachingTier().invalidate(key); } - return replaced; } @Override public void clear() throws StoreAccessException { - boolean interrupted = false; - while(!cachingTierRef.compareAndSet(realCachingTier, noopCachingTier)) { - synchronized (noopCachingTier) { - if(cachingTierRef.get() == noopCachingTier) { - try { - noopCachingTier.wait(); - } catch (InterruptedException e) { - interrupted = true; - } - } - } - } - if(interrupted) { - Thread.currentThread().interrupt(); - } + swapCachingTiers(); try { authoritativeTier.clear(); } finally { try { realCachingTier.clear(); } finally { - if(!cachingTierRef.compareAndSet(noopCachingTier, realCachingTier)) { - throw new AssertionError("Something bad happened"); - } - synchronized (noopCachingTier) { - noopCachingTier.notify(); - } + swapBackCachingTiers(); } } } private void invalidateAllInternal() throws StoreAccessException { + swapCachingTiers(); + try { + realCachingTier.invalidateAll(); + } finally { + swapBackCachingTiers(); + } + } + + private void swapCachingTiers() { boolean interrupted = false; while(!cachingTierRef.compareAndSet(realCachingTier, noopCachingTier)) { synchronized (noopCachingTier) { @@ -264,15 +231,14 @@ private void invalidateAllInternal() throws StoreAccessException { if(interrupted) { Thread.currentThread().interrupt(); } - try { - realCachingTier.invalidateAll(); - } finally { - if(!cachingTierRef.compareAndSet(noopCachingTier, realCachingTier)) { - throw new AssertionError("Something bad happened"); - } - synchronized (noopCachingTier) { - noopCachingTier.notify(); - } + } + + private void swapBackCachingTiers() { + if(!cachingTierRef.compareAndSet(noopCachingTier, realCachingTier)) { + throw new AssertionError("Something bad happened"); + } + synchronized (noopCachingTier) { + noopCachingTier.notify(); } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreMutatorTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreMutatorTest.java new file mode 100644 index 0000000000..b7baf15198 --- /dev/null +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreMutatorTest.java @@ -0,0 +1,459 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.impl.internal.store.tiering; + +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.internal.store.StoreConfigurationImpl; +import org.ehcache.core.spi.store.Store; +import org.ehcache.core.spi.store.StoreAccessException; +import org.ehcache.core.spi.store.tiering.AuthoritativeTier; +import org.ehcache.core.spi.store.tiering.CachingTier; +import org.ehcache.core.spi.time.SystemTimeSource; +import org.ehcache.docs.plugs.StringCopier; +import org.ehcache.expiry.Expirations; +import org.ehcache.impl.internal.events.NullStoreEventDispatcher; +import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; +import org.ehcache.impl.internal.store.basic.NopStore; +import org.ehcache.impl.internal.store.heap.OnHeapStore; +import org.ehcache.impl.internal.store.offheap.BasicOffHeapValueHolder; +import org.ehcache.spi.test.After; +import org.junit.Before; +import org.junit.Test; + +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.CoreMatchers.nullValue; +import static org.junit.Assert.assertThat; + +/** + * Tests for {@link TieredStore}. These tests are mainly to validate that + * ehcache3#1522 is correctly fixed. + *

+ * Only putIfAbsent is tested due the the time is takes to create each test. All methods that conditionally + * modify the authoritative tier and then invalidate the caching tier are impacted. + *

    + *
  • putIfAbsent
  • + *
  • remove(key, value): If the remove does nothing because the value is different, it will return KEY_PRESENT but the get will return null
  • + *
  • replace(key, value): Il faut avoir une valeur. Cette valeur removée mais pas encore invalidé. Ensuite un autre thread tente un replace, échoue et fait un get. Il aura l’ancienne valeur au lieu de null
  • + *
  • replace(key,old,new): If the replace does nothing
  • + *
+ * They should invalidate even if hey have not modified the authoritative tier to prevent inconsistencies. + *

+ * Note: In the tests below, it fails by a deadlock we are creating on purpose. In real life, we would get() + * inconsistent values instead + */ +public class TieredStoreMutatorTest { + + private static final String KEY = "KEY"; + private static final String VALUE = "VALUE"; + private static final String OTHER_VALUE = "OTHER_VALUE"; + + private class AuthoritativeTierMock extends NopStore { + + private final AtomicBoolean get = new AtomicBoolean(false); + + private final ConcurrentMap map = new ConcurrentHashMap(); + + @Override + public PutStatus put(String key, String value) throws StoreAccessException { + String oldValue = map.put(key, value); + + try { + progressLatch.countDown(); + thread3Latch.await(); + } catch (InterruptedException e) { + // ignore + } + + if(oldValue == null) { + return PutStatus.PUT; + } + if(oldValue.equals(value)) { + return PutStatus.NOOP; + } + return PutStatus.UPDATE; + } + + @Override + public boolean remove(String key) throws StoreAccessException { + boolean result = map.remove(key) != null; + try { + progressLatch.countDown(); + thread3Latch.await(); + } catch (InterruptedException e) { + // ignore + } + return result; + } + + @Override + public ValueHolder getAndFault(String key) throws StoreAccessException { + // First, called by Thread 1, blocks + // Then, called by test thread, returns a value holder of null + if (get.compareAndSet(false, true)) { + try { + progressLatch.countDown(); + thread1Latch.await(); + } catch (InterruptedException e) { + // ignore + } + } + return createValueHolder(map.get(key)); + } + + @Override + public ValueHolder putIfAbsent(String key, String value) throws StoreAccessException { + return createValueHolder(map.putIfAbsent(key, value)); + } + + @Override + public RemoveStatus remove(String key, String value) throws StoreAccessException { + String oldValue = map.get(key); + if(oldValue == null) { + return RemoveStatus.KEY_MISSING; + } + if(value.equals(oldValue)) { + map.remove(key); + return RemoveStatus.REMOVED; + } + return RemoveStatus.KEY_PRESENT; + } + + @Override + public ValueHolder replace(String key, String value) throws StoreAccessException { + return createValueHolder(map.replace(key, value)); + } + + @Override + public ReplaceStatus replace(String key, String oldValue, String newValue) throws StoreAccessException { + String currentValue = map.get(key); + if(currentValue == null) { + return ReplaceStatus.MISS_NOT_PRESENT; + } + if(currentValue.equals(oldValue)) { + map.replace(key, newValue); + return ReplaceStatus.HIT; + } + return ReplaceStatus.MISS_PRESENT; + } + } + + private final AuthoritativeTier authoritativeTier = new AuthoritativeTierMock(); + + private TieredStore tieredStore; + + private Thread thread3 = null; + private volatile boolean failed = false; + + private final CountDownLatch progressLatch = new CountDownLatch(2); + private final CountDownLatch thread1Latch = new CountDownLatch(1); + private final CountDownLatch thread3Latch = new CountDownLatch(1); + + @Before + public void setUp() throws Exception { + // Not relevant to the test, just used to instantiate the OnHeapStore + ResourcePools resourcePools = ResourcePoolsBuilder.newResourcePoolsBuilder() + .heap(1, MemoryUnit.MB) + .disk(1, MemoryUnit.GB, false) + .build(); + + // Not relevant to the test, just used to instantiate the OnHeapStore + Store.Configuration config = new StoreConfigurationImpl(String.class, String.class, + null, getClass().getClassLoader(), Expirations.noExpiration(), resourcePools, 0, null, null); + + // Here again, all parameters are useless, we only care about the beforeCompletingTheFault implementation + CachingTier cachingTier = new OnHeapStore(config, SystemTimeSource.INSTANCE, + StringCopier.copier(), StringCopier.copier(), new NoopSizeOfEngine(), NullStoreEventDispatcher. + nullStoreEventDispatcher()); + + tieredStore = new TieredStore(cachingTier, authoritativeTier); + } + + @After + public void after() { + releaseThreads(); + } + + @Test + public void testPutIfAbsent() throws Exception { + + // 1. Thread 1 gets the key but found null in the on-heap backend + // 2. Thread 1 creates a Fault and then block + // a. Thread 1 -> Fault.get() + // b. Thread 1 -> AuthoritativeTierMock.getAndFault - BLOCK + launchThread(new Runnable() { + @Override + public void run() { + getFromTieredStore(); + } + }); + + // 3. Thread 2 does a put. But it hasn't invalided the on-heap yet (it blocks instead) + // a. Thread 2 -> TieredStore.put + // b. Thread 2 -> AuthoritativeTierMock.put - BLOCK + launchThread(new Runnable() { + @Override + public void run() { + putToTieredStore(); + } + }); + + // At this point we have a fault with null in the caching tier and a value in the authority + // However the fault has not yet been invalidated following the authority update + progressLatch.await(); + + // 6. Thread 3 - unblock Faults after X ms to make sure it happens after the test thread gets the fault + launchThread3(); + + // 4. Test Thread receives a value from putIfAbsent. We would expect the get to receive the same value right after + // a. Test Thread -> TieredStore.putIfAbsent + // b. Test Thread -> AuthoritativeTierMock.putIfAbsent - returns VALUE + assertThat(putIfAbsentToTieredStore().value(), is(VALUE)); + + // 5. Test Thread -> TieredStore.get() + // If Test Thread bugged -> Fault.get() - synchronized - blocked on the fault because thread 2 already locks the fault + // Else Test Thread fixed -> new Fault ... correct value + Store.ValueHolder value = getFromTieredStore(); + + // These assertions will in fact work most of the time even if a failure occurred. Because as soon as the latches are + // released by thread 3, the thread 2 will invalidate the fault + assertThat(value, notNullValue()); + assertThat(value.value(), is(VALUE)); + + // If the Test thread was blocked, Thread 3 will eventually flag the failure + assertThat(failed, is(false)); + } + + @Test + public void testRemoveKeyValue() throws Exception { + // Follows the same pattern as testPutIfAbsent except that at the end, if remove returns KEY_PRESENT, we expect + // the get to return VALUE afterwards + + launchThread(new Runnable() { + @Override + public void run() { + getFromTieredStore(); + } + }); + + launchThread(new Runnable() { + @Override + public void run() { + putToTieredStore(); + } + }); + + progressLatch.await(); + + launchThread3(); + + // 4. Test Thread receives KEY_PRESENT from remove. We would expect the get to receive a value right afterwards + // a. Test Thread -> TieredStore.remove + // b. Test Thread -> AuthoritativeTierMock.remove - returns KEY_PRESENT + assertThat(removeKeyValueFromTieredStore(OTHER_VALUE), is(Store.RemoveStatus.KEY_PRESENT)); + + // 5. Test Thread -> TieredStore.get() + // If Test Thread bugged -> Fault.get() - synchronized - blocked + // Else Test Thread fixed -> new Fault ... correct value + Store.ValueHolder value = getFromTieredStore(); + assertThat(value, notNullValue()); + assertThat(value.value(), is(VALUE)); + + assertThat(failed, is(false)); + } + + @Test + public void testReplaceKeyValue() throws Exception { + // Follows the same pattern as testPutIfAbsent except that at the end, if remove returns null, we expect + // the get to return null afterwards + + // 1. Put a value. The value is now in the authoritative tier + putIfAbsentToTieredStore(); // using putIfAbsent instead of put here because our mock won't block on a putIfAbsent + + // 2. Thread 1 gets the key but found null in the on-heap backend + // 3. Thread 1 creates a Fault and then block + // a. Thread 1 -> Fault.get() + // b. Thread 1 -> AuthoritativeTierMock.getAndFault - BLOCK + launchThread(new Runnable() { + @Override + public void run() { + getFromTieredStore(); + } + }); + + // 3. Thread 3 does a remove. But it hasn't invalided the on-heap yet (it blocks instead) + // a. Thread 2 -> TieredStore.remove + // b. Thread 2 -> AuthoritativeTierMock.remove - BLOCK + launchThread(new Runnable() { + @Override + public void run() { + removeKeyFromTieredStore(); + } + }); + + progressLatch.await(); + + launchThread3(); + + // 4. Test Thread receives null from replace. We would expect the get to receive the same null afterwards + // a. Test Thread -> TieredStore.replace + // b. Test Thread -> AuthoritativeTierMock.replace - returns null + assertThat(replaceFromTieredStore(VALUE), nullValue()); + + // 5. Test Thread -> TieredStore.get() + // If Test Thread bugged -> Fault.get() - synchronized - blocked + // Else Test Thread fixed -> new Fault ... correct value + Store.ValueHolder value = getFromTieredStore(); + assertThat(value, nullValue()); + + assertThat(failed, is(false)); + } + + @Test + public void testReplaceKeyOldNewValue() throws Exception { + // Follows the same pattern as testReplaceKey + + putIfAbsentToTieredStore(); // using putIfAbsent instead of put here because our mock won't block on a putIfAbsent + + launchThread(new Runnable() { + @Override + public void run() { + getFromTieredStore(); + } + }); + + launchThread(new Runnable() { + @Override + public void run() { + removeKeyFromTieredStore(); + } + }); + + progressLatch.await(); + + launchThread3(); + + assertThat(replaceFromTieredStore(VALUE, OTHER_VALUE), is(Store.ReplaceStatus.MISS_NOT_PRESENT)); + + // 5. Test Thread -> TieredStore.get() + // If Test Thread bugged -> Fault.get() - synchronized - blocked + // Else Test Thread fixed -> new Fault ... correct value + Store.ValueHolder value = getFromTieredStore(); + assertThat(value, nullValue()); + + assertThat(failed, is(false)); + } + + private Store.ValueHolder createValueHolder(String value) { + if(value == null) { + return null; + } + return new BasicOffHeapValueHolder(1, value, Long.MAX_VALUE, System.currentTimeMillis() - 1); + } + + private Store.PutStatus putToTieredStore() { + try { + return tieredStore.put(KEY, VALUE); + } catch (StoreAccessException e) { + throw new RuntimeException(e); + } + } + + private boolean removeKeyFromTieredStore() { + try { + return tieredStore.remove(KEY); + } catch (StoreAccessException e) { + throw new RuntimeException(e); + } + } + + private Store.ValueHolder putIfAbsentToTieredStore() { + try { + return tieredStore.putIfAbsent(KEY, VALUE); + } catch (StoreAccessException e) { + throw new RuntimeException(e); + } + } + + private Store.RemoveStatus removeKeyValueFromTieredStore(String value) { + try { + return tieredStore.remove(KEY, value); + } catch (StoreAccessException e) { + throw new RuntimeException(e); + } + } + + private Store.ValueHolder replaceFromTieredStore(String value) { + try { + return tieredStore.replace(KEY, value); + } catch (StoreAccessException e) { + throw new RuntimeException(e); + } + } + + private Store.ReplaceStatus replaceFromTieredStore(String oldValue, String newValue) { + try { + return tieredStore.replace(KEY, oldValue, newValue); + } catch (StoreAccessException e) { + throw new RuntimeException(e); + } + } + + private Store.ValueHolder getFromTieredStore() { + try { + return tieredStore.get(KEY); + } catch (StoreAccessException e) { + throw new RuntimeException(e); + } + } + + private void launchThread3() { + thread3 = launchThread(new Runnable() { + @Override + public void run() { + try { + // Give time to test thread to reach blocked fault + Thread.sleep(1000); + } catch (InterruptedException e) { + // ignore + } + failed = true; + thread1Latch.countDown(); + thread3Latch.countDown(); + } + }); + } + + private Thread launchThread(Runnable runnable) { + Thread thread = new Thread(runnable); + thread.setDaemon(true); + thread.start(); + return thread; + } + + private void releaseThreads() { + if(thread3 != null) { + thread3.interrupt(); + } + } +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java index 6c1eabd7b1..a6960e1923 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java @@ -20,20 +20,20 @@ import org.ehcache.config.ResourceType; import org.ehcache.config.SizedResourcePool; import org.ehcache.core.internal.service.ServiceLocator; -import org.ehcache.core.spi.store.StoreAccessException; import org.ehcache.core.spi.function.BiFunction; import org.ehcache.core.spi.function.Function; import org.ehcache.core.spi.function.NullaryFunction; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.Store.RemoveStatus; import org.ehcache.core.spi.store.Store.ReplaceStatus; +import org.ehcache.core.spi.store.StoreAccessException; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; import org.ehcache.core.spi.store.tiering.CachingTier; import org.ehcache.impl.internal.store.heap.OnHeapStore; import org.ehcache.impl.internal.store.offheap.OffHeapStore; -import org.ehcache.spi.service.ServiceProvider; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; +import org.ehcache.spi.service.ServiceProvider; import org.hamcrest.Matchers; import org.junit.Assert; import org.junit.Before; @@ -57,7 +57,6 @@ import java.util.concurrent.CyclicBarrier; import java.util.concurrent.TimeUnit; -import static java.util.Collections.singleton; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.Is.is; @@ -163,14 +162,14 @@ public void testPutIfAbsent_whenAbsent() throws Exception { @Test public void testPutIfAbsent_whenPresent() throws Exception { - when(numberAuthoritativeTier.putIfAbsent(eq(1), eq("one"))).thenReturn(newValueHolder("un")); + when(numberAuthoritativeTier.putIfAbsent(1, "one")).thenReturn(newValueHolder("un")); TieredStore tieredStore = new TieredStore(numberCachingTier, numberAuthoritativeTier); assertThat(tieredStore.putIfAbsent(1, "one").value(), Matchers.equalTo("un")); - verify(numberCachingTier, times(0)).invalidate(any(Number.class)); - verify(numberAuthoritativeTier, times(1)).putIfAbsent(eq(1), eq("one")); + verify(numberCachingTier, times(1)).invalidate(1); + verify(numberAuthoritativeTier, times(1)).putIfAbsent(1, "one"); } @Test @@ -203,7 +202,7 @@ public void testRemove2Args_doesNotRemove() throws Exception { assertThat(tieredStore.remove(1, "one"), is(RemoveStatus.KEY_MISSING)); - verify(numberCachingTier, times(0)).invalidate(any(Number.class)); + verify(numberCachingTier).invalidate(any(Number.class)); verify(numberAuthoritativeTier, times(1)).remove(eq(1), eq("one")); } @@ -227,7 +226,7 @@ public void testReplace2Args_doesNotReplace() throws Exception { assertThat(tieredStore.replace(1, "one"), is(nullValue())); - verify(numberCachingTier, times(0)).invalidate(any(Number.class)); + verify(numberCachingTier).invalidate(any(Number.class)); verify(numberAuthoritativeTier, times(1)).replace(eq(1), eq("one")); } @@ -251,7 +250,7 @@ public void testReplace3Args_doesNotReplace() throws Exception { assertThat(tieredStore.replace(1, "un", "one"), is(ReplaceStatus.MISS_NOT_PRESENT)); - verify(numberCachingTier, times(0)).invalidate(any(Number.class)); + verify(numberCachingTier).invalidate(any(Number.class)); verify(numberAuthoritativeTier, times(1)).replace(eq(1), eq("un"), eq("one")); } From 6c573d5bd05be02e381d8b7e970fe3667563208d Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Fri, 21 Oct 2016 18:36:51 -0400 Subject: [PATCH 092/218] :bug: NPE in EhcacheServiceStateImpl when requesting pools but not yet configured --- .../org/ehcache/clustered/server/EhcacheStateServiceImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java index 4bc5de23d4..d67dc327ef 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java @@ -121,7 +121,7 @@ public String getDefaultServerResource() { @Override public Map getSharedResourcePools() { - return sharedResourcePools.entrySet().stream().collect(toMap(Map.Entry::getKey, e -> e.getValue().getPool())); + return sharedResourcePools == null ? Collections.emptyMap() : sharedResourcePools.entrySet().stream().collect(toMap(Map.Entry::getKey, e -> e.getValue().getPool())); } public void validate(ServerSideConfiguration configuration) throws ClusterException { From ae6686d9ff35b1bbfb562923715fe612e02648d4 Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Thu, 20 Oct 2016 14:38:40 -0400 Subject: [PATCH 093/218] :heavy_plus_sign: Exposing server-side stats (memory usage) --- .../clustered/server/ServerStoreImpl.java | 42 ++++++- .../server/offheap/OffHeapServerStore.java | 113 +++++++++++++++++- 2 files changed, 153 insertions(+), 2 deletions(-) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java index 491da91c3e..20e034fc8b 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java @@ -21,6 +21,7 @@ import org.ehcache.clustered.common.internal.store.ServerStore; import org.ehcache.clustered.server.offheap.OffHeapChainMap; import org.ehcache.clustered.server.offheap.OffHeapServerStore; +import org.terracotta.offheapstore.MapInternals; import org.terracotta.offheapstore.paging.PageSource; import com.tc.classloader.CommonComponent; @@ -29,7 +30,7 @@ import java.util.List; @CommonComponent -public class ServerStoreImpl implements ServerStore { +public class ServerStoreImpl implements ServerStore, MapInternals { private final ServerStoreConfiguration storeConfiguration; private final PageSource pageSource; @@ -94,4 +95,43 @@ public void close() { public List> getSegments() { return store.getSegments(); } + + // stats + + + @Override + public long getDataAllocatedMemory() {return store.getDataAllocatedMemory();} + + @Override + public long getAllocatedMemory() {return store.getAllocatedMemory();} + + @Override + public long getRemovedSlotCount() {return store.getRemovedSlotCount();} + + @Override + public long getDataVitalMemory() {return store.getDataVitalMemory();} + + @Override + public int getReprobeLength() {return store.getReprobeLength();} + + @Override + public long getDataSize() {return store.getDataSize();} + + @Override + public long getDataOccupiedMemory() {return store.getDataOccupiedMemory();} + + @Override + public long getUsedSlotCount() {return store.getUsedSlotCount();} + + @Override + public long getSize() {return store.getSize();} + + @Override + public long getVitalMemory() {return store.getVitalMemory();} + + @Override + public long getOccupiedMemory() {return store.getOccupiedMemory();} + + @Override + public long getTableCapacity() {return store.getTableCapacity();} } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapServerStore.java b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapServerStore.java index 42f8ca4b22..c44e429382 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapServerStore.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapServerStore.java @@ -23,13 +23,14 @@ import org.ehcache.clustered.common.internal.store.ServerStore; import org.ehcache.clustered.server.KeySegmentMapper; import org.ehcache.clustered.server.ServerStoreEvictionListener; +import org.terracotta.offheapstore.MapInternals; import org.terracotta.offheapstore.exceptions.OversizeMappingException; import org.terracotta.offheapstore.paging.PageSource; import static org.terracotta.offheapstore.util.MemoryUnit.KILOBYTES; import static org.terracotta.offheapstore.util.MemoryUnit.MEGABYTES; -public class OffHeapServerStore implements ServerStore { +public class OffHeapServerStore implements ServerStore, MapInternals { private final List> segments; private final KeySegmentMapper mapper; @@ -229,4 +230,114 @@ public void close() { } } + // stats + + @Override + public long getAllocatedMemory() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getAllocatedMemory(); + } + return total; + } + + @Override + public long getOccupiedMemory() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getOccupiedMemory(); + } + return total; + } + + @Override + public long getDataAllocatedMemory() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getDataAllocatedMemory(); + } + return total; + } + + @Override + public long getDataOccupiedMemory() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getDataOccupiedMemory(); + } + return total; + } + + @Override + public long getDataSize() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getDataSize(); + } + return total; + } + + @Override + public long getSize() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getSize(); + } + return total; + } + + @Override + public long getTableCapacity() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getTableCapacity(); + } + return total; + } + + @Override + public long getUsedSlotCount() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getUsedSlotCount(); + } + return total; + } + + @Override + public long getRemovedSlotCount() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getRemovedSlotCount(); + } + return total; + } + + @Override + public int getReprobeLength() { + int total = 0; + for (MapInternals segment : segments) { + total += segment.getReprobeLength(); + } + return total; + } + + @Override + public long getVitalMemory() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getVitalMemory(); + } + return total; + } + + @Override + public long getDataVitalMemory() { + long total = 0L; + for (MapInternals segment : segments) { + total += segment.getDataVitalMemory(); + } + return total; + } + } From b6892d43d273b48093bb7898f0bd1b2525ae43b5 Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Wed, 19 Oct 2016 17:50:27 -0400 Subject: [PATCH 094/218] :heavy_plus_sign: Integration of management into Ehcache server entity and exposing management metadata to describe server entity state and config --- build.gradle | 2 +- clustered/client/build.gradle | 1 + clustered/integration-test/build.gradle | 31 +-- .../AbstractClusteringManagementTest.java | 232 ++++++++++++------ .../ClusteringManagementServiceTest.java | 225 ++++++++--------- .../EhcacheConfigWithManagementTest.java | 109 ++++++++ .../EhcacheManagerToStringTest.java | 14 +- .../test/resources/clusteredConfiguration.txt | 2 +- clustered/server/build.gradle | 6 + .../ehcache/clustered/server/ClientState.java | 55 +++++ .../clustered/server/EhcacheActiveEntity.java | 57 ++--- .../server/EhcachePassiveEntity.java | 11 +- .../server/EhcacheStateServiceImpl.java | 7 +- .../server/management/ClientStateBinding.java | 33 +++ ...ClientStateSettingsManagementProvider.java | 67 +++++ .../server/management/Management.java | 156 ++++++++++++ .../management/OffHeapResourceBinding.java | 32 +++ ...eapResourceSettingsManagementProvider.java | 70 ++++++ .../server/management/PoolBinding.java | 64 +++++ .../PoolSettingsManagementProvider.java | 76 ++++++ .../server/management/ServerStoreBinding.java | 32 +++ ...ServerStoreSettingsManagementProvider.java | 90 +++++++ .../server/state/EhcacheStateService.java | 2 + .../server/EhcacheActiveEntityTest.java | 9 + .../server/EhcachePassiveEntityTest.java | 9 + .../CacheBindingManagementProvider.java | 2 +- .../providers/ExposedCacheBinding.java | 7 +- .../DefaultManagementRegistryService.java | 2 - 28 files changed, 1129 insertions(+), 274 deletions(-) create mode 100644 clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheConfigWithManagementTest.java create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/ClientState.java create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateBinding.java create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateSettingsManagementProvider.java create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceBinding.java create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceSettingsManagementProvider.java create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolBinding.java create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreBinding.java create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java diff --git a/build.gradle b/build.gradle index f26ed9f42b..f886e7318c 100644 --- a/build.gradle +++ b/build.gradle @@ -28,7 +28,7 @@ ext { sizeofVersion = '0.3.0' // Clustered - terracottaPlatformVersion = '5.0.8.beta2' + terracottaPlatformVersion = '5.0.8.beta6' managementVersion = terracottaPlatformVersion terracottaApisVersion = '1.0.9.beta' terracottaCoreVersion = '5.0.9-beta2' diff --git a/clustered/client/build.gradle b/clustered/client/build.gradle index 3dd99f0539..4096ab8a85 100644 --- a/clustered/client/build.gradle +++ b/clustered/client/build.gradle @@ -23,6 +23,7 @@ dependencies { compileOnly project(':xml') compile project(':clustered:common') provided "org.terracotta:entity-client-api:$parent.entityApiVersion" + provided "org.terracotta.management:management-registry-service-api:$parent.managementVersion" // provided in management-server jar testCompile project(':api') testCompile project(':xml') diff --git a/clustered/integration-test/build.gradle b/clustered/integration-test/build.gradle index e8d6c2ebd9..f4546eb9de 100644 --- a/clustered/integration-test/build.gradle +++ b/clustered/integration-test/build.gradle @@ -16,34 +16,20 @@ import org.gradle.internal.jvm.Jvm -// all these libs should be on server classpath for management -// management-entity-server : entity used client-side to send management data into the server -// monitoring-service : server monitoring service -// monitoring-service-entity : test entity, so that we can read the server monitoring service -// management-model : management metadata describing exposed objects, stats and notifications (jdk-6 compat) -// cluster-topology : model classes describing cluster topology (jdk-8 compat) -// management-registry : service classes to expose management metadata and query api for stats (jdk-6 compat) -// sequence-generator : improved boundary flake seq generator to add seq numbers on management messages -def serverCP = [ - 'management-entity-server': ':plugin', - 'monitoring-service': ':plugin', - 'monitoring-service-entity': ':plugin', - 'management-model': '', - 'cluster-topology': '', - 'management-registry': '', - 'sequence-generator': '', -] +sourceCompatibility = 1.8 +targetCompatibility = 1.8 configurations { - serverClasspath + serverLibs } dependencies { testCompile project(':dist') testCompile project(':clustered:clustered-dist') testCompile project(':management') - testCompile "org.terracotta.management:management-entity-client:$parent.managementVersion" + testCompile "org.terracotta.management.dist:management-client:$parent.managementVersion" testCompile "org.terracotta.management:monitoring-service-entity:$parent.managementVersion" + testCompile "com.fasterxml.jackson.core:jackson-databind:2.8.0" testCompile group:'org.terracotta', name:'galvan-support', version: galvanVersion testCompile (group:'com.google.code.tempus-fugit', name:'tempus-fugit', version:'1.1') { @@ -52,9 +38,10 @@ dependencies { } testCompile group: 'javax.cache', name: 'cache-api', version: jcacheVersion - serverCP.each { k, v -> - serverClasspath "org.terracotta.management:$k:$parent.managementVersion$v" + serverLibs ("org.terracotta.management.dist:management-server:$parent.managementVersion") { + exclude group:'org.terracotta.management.dist', module:'management-common' } + serverLibs "org.terracotta.management:monitoring-service-entity:$parent.managementVersion" // test entity for monitoring service } task unzipKit(type: Copy) { @@ -77,7 +64,7 @@ test { environment 'JAVA_OPTS', '-Dcom.tc.l2.lockmanager.greedy.locks.enabled=false' //If this directory does not exist, tests will fail with a cryptic assert failure systemProperty 'kitInstallationPath', "$unzipKit.destinationDir/${project(':clustered:clustered-dist').archivesBaseName}-$project.version-kit" - systemProperty 'managementPlugins', serverCP.keySet().collect { String artifact -> project.configurations.serverClasspath.find { it.name.startsWith("$artifact-$parent.managementVersion") } }.join(File.pathSeparator) + systemProperty 'managementPlugins', project.configurations.serverLibs.join(File.pathSeparator) // Uncomment to include client logging in console output // testLogging.showStandardStreams = true } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java index 8690bd7b3f..537c243b5a 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java @@ -15,21 +15,32 @@ */ package org.ehcache.clustered.management; -import org.junit.After; -import org.junit.Assert; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import org.ehcache.CacheManager; +import org.ehcache.Status; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.junit.AfterClass; +import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.rules.Timeout; import org.terracotta.connection.Connection; import org.terracotta.connection.ConnectionFactory; import org.terracotta.management.entity.management.ManagementAgentConfig; -import org.terracotta.management.entity.management.client.ContextualReturnListener; import org.terracotta.management.entity.management.client.ManagementAgentEntityFactory; import org.terracotta.management.entity.management.client.ManagementAgentService; import org.terracotta.management.entity.monitoring.client.MonitoringServiceEntityFactory; import org.terracotta.management.entity.monitoring.client.MonitoringServiceProxyEntity; import org.terracotta.management.model.call.ContextualReturn; import org.terracotta.management.model.call.Parameter; +import org.terracotta.management.model.cluster.Client; import org.terracotta.management.model.cluster.ClientIdentifier; +import org.terracotta.management.model.cluster.ServerEntityIdentifier; import org.terracotta.management.model.context.Context; import org.terracotta.management.model.message.Message; import org.terracotta.management.model.notification.ContextualNotification; @@ -38,22 +49,31 @@ import org.terracotta.testing.rules.Cluster; import java.io.File; -import java.util.ArrayList; +import java.io.FileNotFoundException; +import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Properties; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.LinkedBlockingQueue; +import java.util.Scanner; +import java.util.concurrent.Exchanger; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; +import java.util.stream.Stream; import static java.util.Arrays.asList; +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredShared; +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.CoreMatchers.nullValue; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; +import static org.junit.Assert.fail; public abstract class AbstractClusteringManagementTest { @@ -61,73 +81,135 @@ public abstract class AbstractClusteringManagementTest { "" + "" + "64" + + "64" + "" + "\n"; protected static MonitoringServiceProxyEntity consumer; + protected static CacheManager cacheManager; + protected static ClientIdentifier clientIdentifier; + protected static ServerEntityIdentifier serverEntityIdentifier; + protected static ObjectMapper mapper = new ObjectMapper(); + + private static final List MANAGEMENT_PLUGINS = Stream.of(System.getProperty("managementPlugins", "").split(File.pathSeparator)) + .map(File::new) + .collect(Collectors.toList()); @ClassRule - public static Cluster CLUSTER = new BasicExternalCluster(new File("build/cluster"), 1, getManagementPlugins(), "", RESOURCE_CONFIG, ""); + public static Cluster CLUSTER = new BasicExternalCluster(new File("build/cluster"), 1, MANAGEMENT_PLUGINS, "", RESOURCE_CONFIG, ""); @BeforeClass public static void beforeClass() throws Exception { + mapper.configure(SerializationFeature.INDENT_OUTPUT, true); + CLUSTER.getClusterControl().waitForActive(); consumer = new MonitoringServiceEntityFactory(ConnectionFactory.connect(CLUSTER.getConnectionURI(), new Properties())).retrieveOrCreate("MonitoringConsumerEntity"); consumer.createMessageBuffer(1024); + + cacheManager = newCacheManagerBuilder() + // cluster config + .with(cluster(CLUSTER.getConnectionURI().resolve("/my-server-entity-1")) + .autoCreate() + .defaultServerResource("primary-server-resource") + .resourcePool("resource-pool-a", 28, MemoryUnit.MB, "secondary-server-resource") // <2> + .resourcePool("resource-pool-b", 16, MemoryUnit.MB)) // will take from primary-server-resource + // management config + .using(new DefaultManagementRegistryConfiguration() + .addTags("webapp-1", "server-node-1") + .setCacheManagerAlias("my-super-cache-manager") + .addConfiguration(new EhcacheStatisticsProviderConfiguration( + 1, TimeUnit.MINUTES, + 100, 1, TimeUnit.SECONDS, + 10, TimeUnit.SECONDS))) + // cache config + .withCache("dedicated-cache-1", newCacheConfigurationBuilder( + String.class, String.class, + newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .offheap(1, MemoryUnit.MB) + .with(clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))) + .build()) + .withCache("shared-cache-2", newCacheConfigurationBuilder( + String.class, String.class, + newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .offheap(1, MemoryUnit.MB) + .with(clusteredShared("resource-pool-a"))) + .build()) + .withCache("shared-cache-3", newCacheConfigurationBuilder( + String.class, String.class, + newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .offheap(1, MemoryUnit.MB) + .with(clusteredShared("resource-pool-b"))) + .build()) + .build(true); + + // ensure the CM is running and get its client id + assertThat(cacheManager.getStatus(), equalTo(Status.AVAILABLE)); + clientIdentifier = consumer.readTopology().getClients().values() + .stream() + .filter(client -> client.getName().equals("Ehcache:my-server-entity-1")) + .findFirst() + .map(Client::getClientIdentifier) + .get(); + + serverEntityIdentifier = consumer.readTopology() + .activeServerEntityStream() + .filter(serverEntity -> serverEntity.getName().equals("my-server-entity-1")) + .findFirst() + .get() // throws if not found + .getServerEntityIdentifier(); + + // test_notifs_sent_at_CM_init + List messages = consumer.drainMessageBuffer(); + List notificationTypes = notificationTypes(messages); + assertThat(notificationTypes.get(0), equalTo("CLIENT_CONNECTED")); + assertThat(notificationTypes.containsAll(Arrays.asList( + "SERVER_ENTITY_CREATED", "SERVER_ENTITY_FETCHED", + "ENTITY_REGISTRY_AVAILABLE", "ENTITY_REGISTRY_UPDATED", "EHCACHE_RESOURCE_POOLS_CONFIGURED", "EHCACHE_CLIENT_VALIDATED", "EHCACHE_SERVER_STORE_CREATED", + "CLIENT_REGISTRY_AVAILABLE", "CLIENT_TAGS_UPDATED")), is(true)); + assertThat(consumer.readMessageBuffer(), is(nullValue())); } - @After - public final void clearBuffers() throws Exception { - clear(); + @AfterClass + public static void afterClass() throws Exception { + if (cacheManager != null && cacheManager.getStatus() == Status.AVAILABLE) { + cacheManager.close(); + } } - protected final void clear() { - if(consumer != null) { + @Rule + public final Timeout globalTimeout = Timeout.seconds(60); + + @Before + public void init() throws Exception { + if (consumer != null) { consumer.clearMessageBuffer(); } } - protected static void sendManagementCallToCollectStats(String... statNames) throws Exception { + protected static ContextualReturn sendManagementCallToCollectStats(String... statNames) throws Exception { Connection managementConnection = CLUSTER.newConnection(); try { ManagementAgentService agent = new ManagementAgentService(new ManagementAgentEntityFactory(managementConnection).retrieveOrCreate(new ManagementAgentConfig())); - assertThat(agent.getManageableClients().size(), equalTo(1)); // only ehcache client is manageable, not this one + final AtomicReference managementCallId = new AtomicReference<>(); + final Exchanger> exchanger = new Exchanger<>(); - // find Ehcache client - ClientIdentifier me = agent.getClientIdentifier(); - ClientIdentifier client = null; - for (ClientIdentifier clientIdentifier : agent.getManageableClients()) { - if (!clientIdentifier.equals(me)) { - client = clientIdentifier; - break; - } - } - assertThat(client, is(notNullValue())); - final ClientIdentifier ehcacheClientIdentifier = client; - - final CountDownLatch callCompleted = new CountDownLatch(1); - final AtomicReference managementCallId = new AtomicReference(); - final BlockingQueue> returns = new LinkedBlockingQueue>(); - - agent.setContextualReturnListener(new ContextualReturnListener() { - @Override - public void onContextualReturn(ClientIdentifier from, String id, ContextualReturn aReturn) { - try { - Assert.assertEquals(ehcacheClientIdentifier, from); - // make sure the call completed - callCompleted.await(10, TimeUnit.SECONDS); - assertEquals(managementCallId.get(), id); - returns.offer(aReturn); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } + agent.setContextualReturnListener((from, id, aReturn) -> { + try { + assertEquals(clientIdentifier, from); + assertEquals(managementCallId.get(), id); + exchanger.exchange(aReturn); + } catch (InterruptedException e) { + fail("interrupted"); } }); managementCallId.set(agent.call( - ehcacheClientIdentifier, + clientIdentifier, Context.create("cacheManagerName", "my-super-cache-manager"), "StatisticCollectorCapability", "updateCollectedStatistics", @@ -135,11 +217,7 @@ public void onContextualReturn(ClientIdentifier from, String id, ContextualRetur new Parameter("StatisticsCapability"), new Parameter(asList(statNames), Collection.class.getName()))); - // now we're sure the call completed - callCompleted.countDown(); - - // ensure the call is made - returns.take(); + return exchanger.exchange(null); } finally { managementConnection.close(); } @@ -148,41 +226,43 @@ public void onContextualReturn(ClientIdentifier from, String id, ContextualRetur protected static List waitForNextStats() { // uses the monitoring consumre entity to get the content of the stat buffer when some stats are collected while (true) { - Message message = consumer.readMessageBuffer(); - if (message != null && message.getType().equals("STATISTICS")) { - return message.unwrap(ContextualStatistics.class); + List messages = consumer.drainMessageBuffer() + .stream() + .filter(message -> message.getType().equals("STATISTICS")) + .flatMap(message -> message.unwrap(ContextualStatistics.class).stream()) + .collect(Collectors.toList()); + if(messages.isEmpty()) { + Thread.yield(); + } else { + return messages; } - Thread.yield(); } } - private static List getManagementPlugins() { - String[] paths = System.getProperty("managementPlugins").split(File.pathSeparator); - List plugins = new ArrayList(paths.length); - for (String path : paths) { - plugins.add(new File(path)); - } - return plugins; - } - protected static List messageTypes(List messages) { - List types = new ArrayList(messages.size()); - for (Message message : messages) { - types.add(message.getType()); - } - return types; + return messages.stream().map(Message::getType).collect(Collectors.toList()); } protected static List notificationTypes(List messages) { - List types = new ArrayList(messages.size()); - for (Message message : messages) { - if ("NOTIFICATION".equals(message.getType())) { - for (ContextualNotification notification : message.unwrap(ContextualNotification.class)) { - types.add(notification.getType()); - } - } + return messages + .stream() + .filter(message -> "NOTIFICATION".equals(message.getType())) + .flatMap(message -> message.unwrap(ContextualNotification.class).stream()) + .map(ContextualNotification::getType) + .collect(Collectors.toList()); + } + + protected static String read(String path) throws FileNotFoundException { + Scanner scanner = new Scanner(AbstractClusteringManagementTest.class.getResourceAsStream(path), "UTF-8"); + try { + return scanner.useDelimiter("\\A").next(); + } finally { + scanner.close(); } - return types; + } + + protected static String normalizeForLineEndings(String stringToNormalize) { + return stringToNormalize.replace("\r\n", "\n").replace("\r", "\n"); } } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java index 8afc6a1745..b126d6d8d4 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java @@ -16,29 +16,22 @@ package org.ehcache.clustered.management; import org.ehcache.Cache; -import org.ehcache.CacheManager; -import org.ehcache.Status; -import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; -import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; -import org.ehcache.config.builders.CacheConfigurationBuilder; -import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; -import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; -import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; -import org.junit.After; -import org.junit.Before; +import org.hamcrest.CoreMatchers; import org.junit.BeforeClass; -import org.junit.Rule; +import org.junit.FixMethodOrder; +import org.junit.Ignore; import org.junit.Test; -import org.junit.rules.Timeout; +import org.junit.runners.MethodSorters; +import org.terracotta.management.model.call.ContextualReturn; import org.terracotta.management.model.capabilities.Capability; import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.model.capabilities.descriptors.Settings; import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; -import org.terracotta.management.model.cluster.Client; +import org.terracotta.management.model.cluster.Cluster; import org.terracotta.management.model.context.ContextContainer; import org.terracotta.management.model.message.Message; -import org.terracotta.management.model.notification.ContextualNotification; import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.management.model.stats.Sample; import org.terracotta.management.model.stats.StatisticType; @@ -48,99 +41,56 @@ import java.util.Arrays; import java.util.Collection; import java.util.List; +import java.util.Map; import java.util.TreeSet; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; -import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.collection.IsCollectionWithSize.hasSize; import static org.junit.Assert.assertThat; +@FixMethodOrder(MethodSorters.NAME_ASCENDING) public class ClusteringManagementServiceTest extends AbstractClusteringManagementTest { - private static final Collection ONHEAP_DESCRIPTORS = new ArrayList(); - private static final Collection OFFHEAP_DESCRIPTORS = new ArrayList(); - private static final Collection DISK_DESCRIPTORS = new ArrayList(); - private static final Collection CLUSTERED_DESCRIPTORS = new ArrayList(); - private static final Collection CACHE_DESCRIPTORS = new ArrayList(); - - private static AtomicInteger N = new AtomicInteger(); - - @Rule - public final Timeout globalTimeout = Timeout.seconds(60); - - private CacheManager cacheManager; - private String clientIdentifier; - private int n = N.incrementAndGet(); - - @Before - public void init() throws Exception { - // clear previous messages - clear(); - - this.cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - // cluster config - .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/my-server-entity-" + n)) - .autoCreate() - .defaultServerResource("primary-server-resource")) - // management config - .using(new DefaultManagementRegistryConfiguration() - .addTags("webapp-1", "server-node-1") - .setCacheManagerAlias("my-super-cache-manager") - .addConfiguration(new EhcacheStatisticsProviderConfiguration( - 1, TimeUnit.MINUTES, - 100, 1, TimeUnit.SECONDS, - 2, TimeUnit.SECONDS))) // TTD reduce to 2 seconds so that the stat collector runs faster - // cache config - .withCache("cache-1", CacheConfigurationBuilder.newCacheConfigurationBuilder( - String.class, String.class, - newResourcePoolsBuilder() - .heap(10, EntryUnit.ENTRIES) - .offheap(1, MemoryUnit.MB) - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) - .build()) - .build(true); - - // ensure the CM is running and get its client id - assertThat(cacheManager.getStatus(), equalTo(Status.AVAILABLE)); - for (Client client : consumer.readTopology().getClients().values()) { - if(client.getName().equals("Ehcache:my-server-entity-" + n)) { - clientIdentifier = client.getClientId(); - } - } - assertThat(clientIdentifier, is(notNullValue())); - } + private static final Collection ONHEAP_DESCRIPTORS = new ArrayList<>(); + private static final Collection OFFHEAP_DESCRIPTORS = new ArrayList<>(); + private static final Collection DISK_DESCRIPTORS = new ArrayList<>(); + private static final Collection CLUSTERED_DESCRIPTORS = new ArrayList<>(); + private static final Collection CACHE_DESCRIPTORS = new ArrayList<>(); - @After - public void close() throws Exception { - if (cacheManager != null && cacheManager.getStatus() == Status.AVAILABLE) { - cacheManager.close(); - } + @Test + @Ignore("This is not a test, but something useful to show a json print of a cluster topology with all management metadata inside") + public void test_A_topology() throws Exception { + Cluster cluster = consumer.readTopology(); + String json = mapper.writeValueAsString(cluster.toMap()); + System.out.println(json); } @Test - public void test_tags_exposed() throws Exception { + public void test_A_client_tags_exposed() throws Exception { String[] tags = consumer.readTopology().getClient(clientIdentifier).get().getTags().toArray(new String[0]); assertThat(tags, equalTo(new String[]{"server-node-1", "webapp-1"})); } @Test - public void test_contextContainer_exposed() throws Exception { + public void test_B_client_contextContainer_exposed() throws Exception { ContextContainer contextContainer = consumer.readTopology().getClient(clientIdentifier).get().getManagementRegistry().get().getContextContainer(); assertThat(contextContainer.getValue(), equalTo("my-super-cache-manager")); - assertThat(contextContainer.getSubContexts(), hasSize(1)); - assertThat(contextContainer.getSubContexts().iterator().next().getValue(), equalTo("cache-1")); + Collection subContexts = contextContainer.getSubContexts(); + TreeSet cacheNames = subContexts.stream().map(ContextContainer::getValue).collect(Collectors.toCollection(TreeSet::new)); + assertThat(subContexts, hasSize(3)); + assertThat(cacheNames, hasSize(3)); + assertThat(cacheNames, equalTo(new TreeSet<>(Arrays.asList("dedicated-cache-1", "shared-cache-2", "shared-cache-3")))); } @Test - public void test_capabilities_exposed() throws Exception { + public void test_C_client_capabilities_exposed() throws Exception { Capability[] capabilities = consumer.readTopology().getClient(clientIdentifier).get().getManagementRegistry().get().getCapabilities().toArray(new Capability[0]); assertThat(capabilities.length, equalTo(5)); assertThat(capabilities[0].getName(), equalTo("ActionsCapability")); @@ -151,7 +101,7 @@ public void test_capabilities_exposed() throws Exception { assertThat(capabilities[0].getDescriptors(), hasSize(4)); Collection descriptors = capabilities[1].getDescriptors(); - Collection allDescriptors = new ArrayList(); + Collection allDescriptors = new ArrayList<>(); allDescriptors.addAll(CACHE_DESCRIPTORS); allDescriptors.addAll(ONHEAP_DESCRIPTORS); allDescriptors.addAll(OFFHEAP_DESCRIPTORS); @@ -159,63 +109,98 @@ public void test_capabilities_exposed() throws Exception { assertThat(descriptors, containsInAnyOrder(allDescriptors.toArray())); assertThat(descriptors, hasSize(allDescriptors.size())); - } @Test - public void test_notifs_sent_at_CM_init() throws Exception { - List messages = consumer.drainMessageBuffer(); - assertThat(messages.size(), equalTo(14)); - assertThat(notificationTypes(messages).containsAll(Arrays.asList("CLIENT_CONNECTED", "SERVER_ENTITY_FETCHED", "CLIENT_REGISTRY_UPDATED", "CLIENT_TAGS_UPDATED")), is(true)); - assertThat(consumer.readMessageBuffer(), is(nullValue())); + public void test_D_server_capabilities_exposed() throws Exception { + Capability[] capabilities = consumer.readTopology().getSingleStripe().getActiveServerEntity(serverEntityIdentifier).get().getManagementRegistry().get().getCapabilities().toArray(new Capability[0]); + + assertThat(capabilities.length, equalTo(4)); + + assertThat(capabilities[0].getName(), equalTo("ClientStateSettings")); + assertThat(capabilities[1].getName(), equalTo("OffHeapResourceSettings")); + assertThat(capabilities[2].getName(), equalTo("ServerStoreSettings")); + assertThat(capabilities[3].getName(), equalTo("PoolSettings")); + + assertThat(capabilities[1].getDescriptors(), hasSize(3)); // time + 2 resources + assertThat(capabilities[2].getDescriptors(), hasSize(4)); // time descriptor + 3 dedicated store + + // ClientStateSettings + + assertThat(capabilities[0].getDescriptors(), hasSize(1)); + Settings settings = (Settings) capabilities[0].getDescriptors().iterator().next(); + assertThat(settings.get("attachedStores"), equalTo(new String[]{"dedicated-cache-1", "shared-cache-2", "shared-cache-3"})); + + // EhcacheStateServiceSettings + + List descriptors = new ArrayList<>(capabilities[3].getDescriptors()); + assertThat(descriptors, hasSize(4)); + + settings = (Settings) descriptors.get(0); + assertThat(settings.get("alias"), equalTo("resource-pool-b")); + assertThat(settings.get("type"), equalTo("PoolBinding")); + assertThat(settings.get("serverResource"), equalTo("primary-server-resource")); + assertThat(settings.get("size"), equalTo(16 * 1024 * 1024L)); + assertThat(settings.get("allocationType"), equalTo("SHARED")); + + settings = (Settings) descriptors.get(1); + assertThat(settings.get("alias"), equalTo("resource-pool-a")); + assertThat(settings.get("type"), equalTo("PoolBinding")); + assertThat(settings.get("serverResource"), equalTo("secondary-server-resource")); + assertThat(settings.get("size"), equalTo(28 * 1024 * 1024L)); + assertThat(settings.get("allocationType"), equalTo("SHARED")); + + settings = (Settings) descriptors.get(2); + assertThat(settings.get("alias"), equalTo("dedicated-cache-1")); + assertThat(settings.get("type"), equalTo("PoolBinding")); + assertThat(settings.get("serverResource"), equalTo("primary-server-resource")); + assertThat(settings.get("size"), equalTo(4 * 1024 * 1024L)); + assertThat(settings.get("allocationType"), equalTo("DEDICATED")); + + settings = (Settings) descriptors.get(3); + assertThat(settings.get("type"), equalTo("PoolSettingsManagementProvider")); + assertThat(settings.get("defaultServerResource"), equalTo("primary-server-resource")); } @Test - public void test_notifs_on_add_cache() throws Exception { - clear(); - - cacheManager.createCache("cache-2", CacheConfigurationBuilder.newCacheConfigurationBuilder( + public void test_E_notifs_on_add_cache() throws Exception { + cacheManager.createCache("cache-2", newCacheConfigurationBuilder( String.class, String.class, newResourcePoolsBuilder() .heap(10, EntryUnit.ENTRIES) .offheap(1, MemoryUnit.MB) - .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) + .with(clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) .build()); ContextContainer contextContainer = consumer.readTopology().getClient(clientIdentifier).get().getManagementRegistry().get().getContextContainer(); - assertThat(contextContainer.getSubContexts(), hasSize(2)); + assertThat(contextContainer.getSubContexts(), hasSize(4)); - TreeSet cNames = new TreeSet(); - for (ContextContainer container : contextContainer.getSubContexts()) { - cNames.add(container.getValue()); - } - assertThat(cNames, equalTo(new TreeSet(Arrays.asList("cache-1", "cache-2")))); + TreeSet cNames = contextContainer.getSubContexts().stream().map(ContextContainer::getValue).collect(Collectors.toCollection(TreeSet::new)); + assertThat(cNames, equalTo(new TreeSet<>(Arrays.asList("cache-2", "dedicated-cache-1", "shared-cache-2", "shared-cache-3")))); List messages = consumer.drainMessageBuffer(); - assertThat(messages.size(), equalTo(3)); - assertThat(notificationTypes(messages), equalTo(Arrays.asList("CLIENT_REGISTRY_UPDATED", "CLIENT_REGISTRY_UPDATED", "CACHE_ADDED"))); + assertThat(notificationTypes(messages), equalTo(Arrays.asList( + "ENTITY_REGISTRY_UPDATED", "EHCACHE_SERVER_STORE_CREATED", "ENTITY_REGISTRY_UPDATED", + "CLIENT_REGISTRY_UPDATED", "CACHE_ADDED"))); assertThat(consumer.readMessageBuffer(), is(nullValue())); } @Test - public void test_notifs_on_remove_cache() throws Exception { - test_notifs_on_add_cache(); - clear(); - + public void test_F_notifs_on_remove_cache() throws Exception { cacheManager.removeCache("cache-2"); List messages = consumer.drainMessageBuffer(); - assertThat(messages.size(), equalTo(3)); - assertThat(notificationTypes(messages), equalTo(Arrays.asList("CLIENT_REGISTRY_UPDATED", "CLIENT_REGISTRY_UPDATED", "CACHE_REMOVED"))); + assertThat(notificationTypes(messages), equalTo(Arrays.asList("CLIENT_REGISTRY_UPDATED", "CACHE_REMOVED", "ENTITY_REGISTRY_UPDATED"))); assertThat(consumer.readMessageBuffer(), is(nullValue())); } @Test - public void test_stats_collection() throws Exception { + public void test_G_stats_collection() throws Exception { - sendManagementCallToCollectStats("Cache:HitCount"); + ContextualReturn contextualReturn = sendManagementCallToCollectStats("Cache:HitCount"); + assertThat(contextualReturn.hasExecuted(), is(true)); - Cache cache1 = cacheManager.getCache("cache-1", String.class, String.class); + Cache cache1 = cacheManager.getCache("dedicated-cache-1", String.class, String.class); cache1.put("key1", "val"); cache1.put("key2", "val"); @@ -231,10 +216,13 @@ public void test_stats_collection() throws Exception { // get the stats (we are getting the primitive counter, not the sample history) List stats = waitForNextStats(); - Sample[] samples = stats.get(0).getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); - - if(stats.size() == 1 && stats.get(0).getContext().get("cacheName").equals("cache-1") && samples.length > 0) { - val = samples[samples.length - 1].getValue(); + for (ContextualStatistics stat : stats) { + if (stat.getContext().get("cacheName").equals("dedicated-cache-1")) { + Sample[] samples = stat.getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); + if(samples.length > 0) { + val = samples[samples.length - 1].getValue(); + } + } } } while(val != 2); @@ -245,10 +233,13 @@ public void test_stats_collection() throws Exception { do { List stats = waitForNextStats(); - Sample[] samples = stats.get(0).getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); - - if(stats.size() == 1 && stats.get(0).getContext().get("cacheName").equals("cache-1") && samples.length > 0) { - val = samples[samples.length - 1].getValue(); + for (ContextualStatistics stat : stats) { + if (stat.getContext().get("cacheName").equals("dedicated-cache-1")) { + Sample[] samples = stat.getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); + if(samples.length > 0) { + val = samples[samples.length - 1].getValue(); + } + } } } while(val != 4); diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheConfigWithManagementTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheConfigWithManagementTest.java new file mode 100644 index 0000000000..24b451fd76 --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheConfigWithManagementTest.java @@ -0,0 +1,109 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.management; + +import org.ehcache.CacheManager; +import org.ehcache.config.units.EntryUnit; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.terracotta.testing.rules.BasicExternalCluster; +import org.terracotta.testing.rules.Cluster; + +import java.io.File; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredShared; +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; + +public class EhcacheConfigWithManagementTest { + + private static final String RESOURCE_CONFIG = + "" + + "" + + "64" + + "64" + + "" + + "\n"; + + private static final List MANAGEMENT_PLUGINS = Stream.of(System.getProperty("managementPlugins", "").split(File.pathSeparator)) + .map(File::new) + .collect(Collectors.toList()); + + @ClassRule + public static Cluster CLUSTER = new BasicExternalCluster(new File("build/cluster"), 1, MANAGEMENT_PLUGINS, "", RESOURCE_CONFIG, ""); + + @BeforeClass + public static void beforeClass() throws Exception { + CLUSTER.getClusterControl().waitForActive(); + } + + @Test + public void create_cache_manager() throws Exception { + CacheManager cacheManager = newCacheManagerBuilder() + // cluster config + .with(cluster(CLUSTER.getConnectionURI().resolve("/my-server-entity-3")) + .autoCreate() + .defaultServerResource("primary-server-resource") + .resourcePool("resource-pool-a", 28, MemoryUnit.MB, "secondary-server-resource") // <2> + .resourcePool("resource-pool-b", 16, MemoryUnit.MB)) // will take from primary-server-resource + // management config + .using(new DefaultManagementRegistryConfiguration() + .addTags("webapp-1", "server-node-1") + .setCacheManagerAlias("my-super-cache-manager") + .addConfiguration(new EhcacheStatisticsProviderConfiguration( + 1, TimeUnit.MINUTES, + 100, 1, TimeUnit.SECONDS, + 2, TimeUnit.SECONDS))) // TTD reduce to 2 seconds so that the stat collector runs faster + // cache config + .withCache("dedicated-cache-1", newCacheConfigurationBuilder( + String.class, String.class, + newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .offheap(1, MemoryUnit.MB) + .with(clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB))) + .build()) + .withCache("shared-cache-2", newCacheConfigurationBuilder( + String.class, String.class, + newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .offheap(1, MemoryUnit.MB) + .with(clusteredShared("resource-pool-a"))) + .build()) + .withCache("shared-cache-3", newCacheConfigurationBuilder( + String.class, String.class, + newResourcePoolsBuilder() + .heap(10, EntryUnit.ENTRIES) + .offheap(1, MemoryUnit.MB) + .with(clusteredShared("resource-pool-b"))) + .build()) + .build(true); + + cacheManager.close(); + } + +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheManagerToStringTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheManagerToStringTest.java index de223619ed..a3269aca78 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheManagerToStringTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheManagerToStringTest.java @@ -92,7 +92,7 @@ public boolean adviseAgainstEviction(String key, String value) { @Test public void clusteredToString() throws Exception { - URI uri = CLUSTER.getConnectionURI().resolve("/my-server-entity-1"); + URI uri = CLUSTER.getConnectionURI().resolve("/my-server-entity-2"); CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder() // cluster config .with(ClusteringServiceConfigurationBuilder.cluster(uri) @@ -169,16 +169,4 @@ public void deleteAll(Iterable keys) throws Exception { } } - private String read(String path) throws FileNotFoundException { - Scanner scanner = new Scanner(getClass().getResourceAsStream(path), "UTF-8"); - try { - return scanner.useDelimiter("\\A").next(); - } finally { - scanner.close(); - } - } - - private static String normalizeForLineEndings(String stringToNormalize) { - return stringToNormalize.replace("\r\n","\n").replace("\r","\n"); - } } diff --git a/clustered/integration-test/src/test/resources/clusteredConfiguration.txt b/clustered/integration-test/src/test/resources/clusteredConfiguration.txt index 40899b9409..240f5ea51e 100644 --- a/clustered/integration-test/src/test/resources/clusteredConfiguration.txt +++ b/clustered/integration-test/src/test/resources/clusteredConfiguration.txt @@ -18,7 +18,7 @@ caches: tierHeight: 10 services: - org.ehcache.clustered.client.config.ClusteringServiceConfiguration: - clusterUri: terracotta://server-1:9510/my-server-entity-1 + clusterUri: terracotta://server-1:9510/my-server-entity-2 readOperationTimeout: TimeoutDuration{20 SECONDS} autoCreate: true - org.ehcache.management.registry.DefaultManagementRegistryConfiguration diff --git a/clustered/server/build.gradle b/clustered/server/build.gradle index 8921237568..8d3e28f0cc 100644 --- a/clustered/server/build.gradle +++ b/clustered/server/build.gradle @@ -26,6 +26,12 @@ dependencies { compile group: 'org.terracotta', name: 'offheap-resource', version: parent.offheapResourceVersion compile group: 'org.terracotta', name: 'offheap-store', version: parent.offheapVersion compile group: 'org.slf4j', name: 'slf4j-api', version: parent.slf4jVersion + compile("org.terracotta.management:management-registry-service-api:$parent.managementVersion") { + // provided in management-server jar, but necessary so that ehcache can work without depending on management + exclude group: 'org.terracotta.management', module: 'management-registry' + exclude group: 'org.terracotta.management', module: 'management-model' + } + compile"org.terracotta.management.dist:management-common:$parent.managementVersion" provided "org.terracotta:entity-server-api:$parent.entityApiVersion" provided "org.terracotta:standard-cluster-services:$parent.terracottaApisVersion" } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/ClientState.java b/clustered/server/src/main/java/org/ehcache/clustered/server/ClientState.java new file mode 100644 index 0000000000..6b4c141a33 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/ClientState.java @@ -0,0 +1,55 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server; + +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +/** + * Represents a client's state against an {@link EhcacheActiveEntity}. + */ +public class ClientState { + /** + * Indicates if the client has either configured or validated with clustered store manager. + */ + private boolean attached = false; + + /** + * The set of stores to which the client has attached. + */ + private final Set attachedStores = new HashSet(); + + public boolean isAttached() { + return attached; + } + + void attach() { + this.attached = true; + } + + boolean addStore(String storeName) { + return this.attachedStores.add(storeName); + } + + boolean removeStore(String storeName) { + return this.attachedStores.remove(storeName); + } + + public Set getAttachedStores() { + return Collections.unmodifiableSet(new HashSet(this.attachedStores)); + } +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index a8cee10678..57506b2dcc 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -60,6 +60,7 @@ import org.ehcache.clustered.common.internal.store.ServerStore; import org.ehcache.clustered.server.internal.messages.EntityDataSyncMessage; import org.ehcache.clustered.server.internal.messages.EntityStateSyncMessage; +import org.ehcache.clustered.server.management.Management; import org.ehcache.clustered.server.state.EhcacheStateService; import org.ehcache.clustered.server.state.InvalidationTracker; import org.ehcache.clustered.server.state.config.EhcacheStateServiceConfig; @@ -122,6 +123,7 @@ class EhcacheActiveEntity implements ActiveServerEntity> inflightInvalidations; + private final Management management; static class InvalidationHolder { final ClientDescriptor clientDescriptorWaitingForInvalidation; @@ -178,6 +180,7 @@ public Class getServiceType() { if (entityMessenger == null) { throw new AssertionError("Server failed to retrieve IEntityMessenger service."); } + this.management = new Management(services, ehcacheStateService, this.offHeapResourceIdentifiers); } /** @@ -214,7 +217,9 @@ Map> getInUseStores() { public void connected(ClientDescriptor clientDescriptor) { if (!clientStateMap.containsKey(clientDescriptor)) { LOGGER.info("Connecting {}", clientDescriptor); - clientStateMap.put(clientDescriptor, new ClientState()); + ClientState clientState = new ClientState(); + clientStateMap.put(clientDescriptor, clientState); + management.clientConnected(clientDescriptor, clientState); } else { // This is logically an AssertionError LOGGER.error("Client {} already registered as connected", clientDescriptor); @@ -248,6 +253,7 @@ public void disconnected(ClientDescriptor clientDescriptor) { for (String storeId : clientState.getAttachedStores()) { detachStore(clientDescriptor, storeId); } + management.clientDisconnected(clientDescriptor, clientState); } UUID clientId = clientIdMap.remove(clientDescriptor); if (clientId != null) { @@ -312,6 +318,7 @@ public void handleReconnect(ClientDescriptor clientDescriptor, byte[] extendedRe } LOGGER.info("Client '{}' successfully reconnected to newly promoted ACTIVE after failover.", clientDescriptor); + management.clientReconnected(clientDescriptor, clientState); } private void addInflightInvalidationsForStrongCache(ClientDescriptor clientDescriptor, ReconnectMessage reconnectMessage, String cacheId, ServerStoreImpl serverStore) { @@ -358,7 +365,7 @@ public void synchronizeKeyToPassive(PassiveSynchronizationChannel getClientsWaitingForInvalidation() { return clientsWaitingForInvalidation; } - /** - * Represents a client's state against an {@link EhcacheActiveEntity}. - */ - private static class ClientState { - /** - * Indicates if the client has either configured or validated with clustered store manager. - */ - private boolean attached = false; - - /** - * The set of stores to which the client has attached. - */ - private final Set attachedStores = new HashSet<>(); - - boolean isAttached() { - return attached; - } - - void attach() { - this.attached = true; - } - - boolean addStore(String storeName) { - return this.attachedStores.add(storeName); - } - - boolean removeStore(String storeName) { - return this.attachedStores.remove(storeName); - } - - Set getAttachedStores() { - return Collections.unmodifiableSet(new HashSet<>(this.attachedStores)); - } - } - private static class InvalidationTuple { private final ClientDescriptor clientDescriptor; private final Set invalidationsInProgress; diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index 2a72e2f4af..59197f86c8 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -39,6 +39,7 @@ import org.ehcache.clustered.server.internal.messages.EntityDataSyncMessage; import org.ehcache.clustered.server.internal.messages.EntityStateSyncMessage; import org.ehcache.clustered.server.internal.messages.EntitySyncMessage; +import org.ehcache.clustered.server.management.Management; import org.ehcache.clustered.server.state.ClientMessageTracker; import org.ehcache.clustered.server.state.EhcacheStateService; import org.ehcache.clustered.server.state.InvalidationTracker; @@ -62,6 +63,7 @@ class EhcachePassiveEntity implements PassiveServerEntity offHeapResourceIdentifiers; private final EhcacheStateService ehcacheStateService; + private final Management management; @Override public void invoke(EhcacheEntityMessage message) { @@ -109,6 +111,7 @@ public void invoke(EhcacheEntityMessage message) { if (ehcacheStateService == null) { throw new AssertionError("Server failed to retrieve EhcacheStateService."); } + management = new Management(services, ehcacheStateService, offHeapResourceIdentifiers); } private void invokeRetirementMessages(PassiveReplicationMessage message) throws ClusterException { @@ -201,8 +204,10 @@ private void invokeSyncOperation(EntitySyncMessage message) throws ClusterExcept EntityStateSyncMessage stateSyncMessage = (EntityStateSyncMessage) message; ehcacheStateService.configure(stateSyncMessage.getConfiguration()); + management.sharedPoolsConfigured(); for (Map.Entry entry : stateSyncMessage.getStoreConfigs().entrySet()) { ehcacheStateService.createStore(entry.getKey(), entry.getValue()); + management.serverStoreCreated(entry.getKey()); } stateSyncMessage.getTrackedClients().stream().forEach(id -> ehcacheStateService.getClientMessageTracker().add(id)); break; @@ -237,6 +242,7 @@ private void invokeLifeCycleOperation(LifecycleMessage message) throws ClusterEx private void configure(ConfigureStoreManager message) throws ClusterException { ehcacheStateService.configure(message.getConfiguration()); ehcacheStateService.getClientMessageTracker().setEntityConfiguredStamp(message.getClientId(), message.getId()); + management.sharedPoolsConfigured(); } private void trackAndApplyMessage(LifecycleMessage message) { @@ -267,6 +273,7 @@ private void createServerStore(CreateServerStore createServerStore) throws Clust if(storeConfiguration.getConsistency() == Consistency.EVENTUAL) { ehcacheStateService.addInvalidationtracker(name); } + management.serverStoreCreated(name); } private void destroyServerStore(DestroyServerStore destroyServerStore) throws ClusterException { @@ -279,6 +286,7 @@ private void destroyServerStore(DestroyServerStore destroyServerStore) throws Cl String name = destroyServerStore.getName(); LOGGER.info("Destroying clustered tier '{}'", name); + management.serverStoreDestroyed(name); ehcacheStateService.destroyServerStore(name); ehcacheStateService.removeInvalidationtracker(name); } @@ -305,7 +313,7 @@ public void endSyncConcurrencyKey(int concurrencyKey) { @Override public void createNew() { - + management.init(); } @Override @@ -315,6 +323,7 @@ public void loadExisting() { @Override public void destroy() { + management.close(); ehcacheStateService.destroy(); } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java index d67dc327ef..0fe42d52d1 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java @@ -16,7 +16,6 @@ package org.ehcache.clustered.server; -import org.ehcache.clustered.common.Consistency; import org.ehcache.clustered.common.PoolAllocation; import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; @@ -124,6 +123,12 @@ public Map getSharedResourcePools() { return sharedResourcePools == null ? Collections.emptyMap() : sharedResourcePools.entrySet().stream().collect(toMap(Map.Entry::getKey, e -> e.getValue().getPool())); } + @Override + public ServerSideConfiguration.Pool getDedicatedResourcePool(String name) { + ResourcePageSource resourcePageSource = dedicatedResourcePools.get(name); + return resourcePageSource == null ? null : resourcePageSource.getPool(); + } + public void validate(ServerSideConfiguration configuration) throws ClusterException { if (!isConfigured()) { throw new LifecycleException("Clustered Tier Manager is not configured"); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateBinding.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateBinding.java new file mode 100644 index 0000000000..32e38ae518 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateBinding.java @@ -0,0 +1,33 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.ehcache.clustered.server.ClientState; +import org.terracotta.entity.ClientDescriptor; +import org.terracotta.management.service.registry.provider.ClientBinding; + +final class ClientStateBinding extends ClientBinding { + + ClientStateBinding(ClientDescriptor clientDescriptor, ClientState clientState) { + super(clientDescriptor, clientState); + } + + @Override + public ClientState getValue() { + return (ClientState) super.getValue(); + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateSettingsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateSettingsManagementProvider.java new file mode 100644 index 0000000000..460d4ef7e1 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateSettingsManagementProvider.java @@ -0,0 +1,67 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.ehcache.clustered.server.ClientState; +import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.model.capabilities.descriptors.Settings; +import org.terracotta.management.model.cluster.ClientIdentifier; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.registry.action.Named; +import org.terracotta.management.registry.action.RequiredContext; +import org.terracotta.management.service.registry.provider.ClientBindingManagementProvider; + +import java.util.Collection; +import java.util.Collections; +import java.util.Set; +import java.util.TreeSet; + +@Named("ClientStateSettings") +@RequiredContext({@Named("consumerId"), @Named("clientId"), @Named("type")}) +class ClientStateSettingsManagementProvider extends ClientBindingManagementProvider { + + ClientStateSettingsManagementProvider() { + super(ClientStateBinding.class); + } + + @Override + protected ExposedClientStateBinding internalWrap(ClientStateBinding managedObject, long consumerId, ClientIdentifier clientIdentifier) { + return new ExposedClientStateBinding(managedObject, consumerId, clientIdentifier); + } + + private static class ExposedClientStateBinding extends ExposedClientBinding { + + ExposedClientStateBinding(ClientStateBinding clientBinding, long consumerId, ClientIdentifier clientIdentifier) { + super(clientBinding, consumerId, clientIdentifier); + } + + @Override + public Context getContext() { + return super.getContext().with("type", "ClientState"); + } + + @Override + public Collection getDescriptors() { + ClientState clientState = getClientBinding().getValue(); + Set attachedStores = clientState.getAttachedStores(); + return Collections.singleton(new Settings(getContext()) + .set("attached", clientState.isAttached()) + .set("attachedStores", new TreeSet<>(attachedStores).toArray(new String[attachedStores.size()])) + ); + } + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java new file mode 100644 index 0000000000..79b1239c0d --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java @@ -0,0 +1,156 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.server.ClientState; +import org.ehcache.clustered.server.ServerStoreImpl; +import org.ehcache.clustered.server.state.EhcacheStateService; +import org.terracotta.entity.ClientDescriptor; +import org.terracotta.entity.ServiceRegistry; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.service.registry.ConsumerManagementRegistry; +import org.terracotta.management.service.registry.ConsumerManagementRegistryConfiguration; +import org.terracotta.management.service.registry.provider.ClientBinding; +import org.terracotta.offheapresource.OffHeapResource; +import org.terracotta.offheapresource.OffHeapResourceIdentifier; + +import java.util.Set; + +public class Management { + + private final ConsumerManagementRegistry managementRegistry; + private final ServiceRegistry services; + private final EhcacheStateService ehcacheStateService; + private final Set offHeapResourceIdentifiers; + + public Management(ServiceRegistry services, EhcacheStateService ehcacheStateService, Set offHeapResourceIdentifiers) { + managementRegistry = services.getService(new ConsumerManagementRegistryConfiguration(services)); + this.services = services; + this.ehcacheStateService = ehcacheStateService; + this.offHeapResourceIdentifiers = offHeapResourceIdentifiers; + if (managementRegistry != null) { + // add some providers to describe and compute stats + managementRegistry.addManagementProvider(new ClientStateSettingsManagementProvider()); + managementRegistry.addManagementProvider(new OffHeapResourceSettingsManagementProvider()); + managementRegistry.addManagementProvider(new ServerStoreSettingsManagementProvider()); + managementRegistry.addManagementProvider(new PoolSettingsManagementProvider(ehcacheStateService)); + } + } + + // the goal of the following code is to send the management metadata from the entity into the monitoring tre AFTER the entity creation + public void init() { + if (managementRegistry != null) { + managementRegistry.register(ehcacheStateService); + + managementRegistry.register(PoolBinding.ALL_SHARED); + + for (String identifier : offHeapResourceIdentifiers) { + OffHeapResource offHeapResource = services.getService(OffHeapResourceIdentifier.identifier(identifier)); + managementRegistry.register(new OffHeapResourceBinding(identifier, offHeapResource)); + } + + managementRegistry.refresh(); + } + } + + public void close() { + if (managementRegistry != null) { + managementRegistry.close(); + } + } + + public void clientConnected(ClientDescriptor clientDescriptor, ClientState clientState) { + if (managementRegistry != null) { + managementRegistry.registerAndRefresh(new ClientStateBinding(clientDescriptor, clientState)); + } + } + + + public void clientDisconnected(ClientDescriptor clientDescriptor, ClientState clientState) { + if (managementRegistry != null) { + managementRegistry.unregisterAndRefresh(new ClientStateBinding(clientDescriptor, clientState)); + } + } + + public void clientReconnected(ClientDescriptor clientDescriptor, ClientState clientState) { + if (managementRegistry != null) { + managementRegistry.refresh(); + managementRegistry.pushServerEntityNotification(new ClientStateBinding(clientDescriptor, clientState), "EHCACHE_CLIENT_RECONNECTED"); + } + } + + public void sharedPoolsConfigured() { + if (managementRegistry != null) { + ehcacheStateService.getSharedResourcePools() + .entrySet() + .stream() + .forEach(e -> managementRegistry.register(new PoolBinding(e.getKey(), e.getValue(), PoolBinding.AllocationType.SHARED))); + managementRegistry.refresh(); + managementRegistry.pushServerEntityNotification(PoolBinding.ALL_SHARED, "EHCACHE_RESOURCE_POOLS_CONFIGURED"); + } + } + + public void clientValidated(ClientDescriptor clientDescriptor, ClientState clientState) { + if (managementRegistry != null) { + managementRegistry.refresh(); + managementRegistry.pushServerEntityNotification(new ClientStateBinding(clientDescriptor, clientState), "EHCACHE_CLIENT_VALIDATED"); + } + } + + public void serverStoreCreated(String name) { + if (managementRegistry != null) { + ServerStoreImpl serverStore = ehcacheStateService.getStore(name); + ServerStoreBinding serverStoreBinding = new ServerStoreBinding(name, serverStore); + managementRegistry.register(serverStoreBinding); + ServerSideConfiguration.Pool pool = ehcacheStateService.getDedicatedResourcePool(name); + if (pool != null) { + managementRegistry.register(new PoolBinding(name, pool, PoolBinding.AllocationType.DEDICATED)); + } + managementRegistry.refresh(); + managementRegistry.pushServerEntityNotification(serverStoreBinding, "EHCACHE_SERVER_STORE_CREATED"); + } + } + + public void storeAttached(ClientDescriptor clientDescriptor, ClientState clientState, String storeName) { + if (managementRegistry != null) { + managementRegistry.refresh(); + managementRegistry.pushServerEntityNotification(new ClientBinding(clientDescriptor, clientState), "EHCACHE_SERVER_STORE_ATTACHED", Context.create("storeName", storeName)); + } + } + + public void releaseStore(ClientDescriptor clientDescriptor, ClientState clientState, String storeName) { + if (managementRegistry != null) { + managementRegistry.refresh(); + managementRegistry.pushServerEntityNotification(new ClientBinding(clientDescriptor, clientState), "EHCACHE_SERVER_STORE_RELEASED", Context.create("storeName", storeName)); + } + } + + public void serverStoreDestroyed(String name) { + ServerStoreImpl serverStore = ehcacheStateService.getStore(name); + if (managementRegistry != null && serverStore != null) { + ServerStoreBinding managedObject = new ServerStoreBinding(name, serverStore); + managementRegistry.pushServerEntityNotification(managedObject, "EHCACHE_SERVER_STORE_DESTROYED"); + managementRegistry.unregister(managedObject); + ServerSideConfiguration.Pool pool = ehcacheStateService.getDedicatedResourcePool(name); + if (pool != null) { + managementRegistry.unregister(new PoolBinding(name, pool, PoolBinding.AllocationType.DEDICATED)); + } + managementRegistry.refresh(); + } + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceBinding.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceBinding.java new file mode 100644 index 0000000000..2a60925c45 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceBinding.java @@ -0,0 +1,32 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.terracotta.management.service.registry.provider.AliasBinding; +import org.terracotta.offheapresource.OffHeapResource; + +class OffHeapResourceBinding extends AliasBinding { + + OffHeapResourceBinding(String identifier, OffHeapResource offHeapResource) { + super(identifier, offHeapResource); + } + + @Override + public OffHeapResource getValue() { + return (OffHeapResource) super.getValue(); + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceSettingsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceSettingsManagementProvider.java new file mode 100644 index 0000000000..ba1223309a --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceSettingsManagementProvider.java @@ -0,0 +1,70 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.model.capabilities.descriptors.Settings; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.registry.action.Named; +import org.terracotta.management.registry.action.RequiredContext; +import org.terracotta.management.service.registry.provider.AliasBindingManagementProvider; + +import java.util.Collection; +import java.util.Collections; + +@Named("OffHeapResourceSettings") +@RequiredContext({@Named("consumerId"), @Named("type"), @Named("alias")}) +class OffHeapResourceSettingsManagementProvider extends AliasBindingManagementProvider { + + OffHeapResourceSettingsManagementProvider() { + super(OffHeapResourceBinding.class); + } + + @Override + public Collection getDescriptors() { + Collection descriptors = super.getDescriptors(); + descriptors.add(new Settings() + .set("type", "OffHeapResourceSettingsManagementProvider") + .set("time", System.currentTimeMillis())); + return descriptors; + } + + @Override + protected ExposedOffHeapResourceBinding wrap(OffHeapResourceBinding managedObject) { + return new ExposedOffHeapResourceBinding(managedObject, getConsumerId()); + } + + private static class ExposedOffHeapResourceBinding extends ExposedAliasBinding { + + ExposedOffHeapResourceBinding(OffHeapResourceBinding binding, long consumerId) { + super(binding, consumerId); + } + + @Override + public Context getContext() { + return super.getContext().with("type", "OffHeapResource"); + } + + @Override + public Collection getDescriptors() { + return Collections.singleton(new Settings(getContext()) + .set("capacity", getBinding().getValue().capacity()) + .set("availableAtTime", getBinding().getValue().available()) + ); + } + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolBinding.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolBinding.java new file mode 100644 index 0000000000..14818e10c5 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolBinding.java @@ -0,0 +1,64 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.terracotta.management.model.Objects; +import org.terracotta.management.service.registry.provider.AliasBinding; + +class PoolBinding extends AliasBinding { + + enum AllocationType { + SHARED, + DEDICATED + } + + // this marker is used to send global notification - it is not a real pool + static final PoolBinding ALL_SHARED = new PoolBinding("PoolBinding#all-shared", new ServerSideConfiguration.Pool(1, ""), AllocationType.SHARED); + + private final AllocationType allocationType; + + PoolBinding(String identifier, ServerSideConfiguration.Pool serverStore, AllocationType allocationType) { + super(identifier, serverStore); + this.allocationType = Objects.requireNonNull(allocationType); + } + + AllocationType getAllocationType() { + return allocationType; + } + + @Override + public ServerSideConfiguration.Pool getValue() { + return (ServerSideConfiguration.Pool) super.getValue(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + PoolBinding that = (PoolBinding) o; + return allocationType == that.allocationType; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + allocationType.hashCode(); + return result; + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java new file mode 100644 index 0000000000..c19723a021 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java @@ -0,0 +1,76 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.ehcache.clustered.server.state.EhcacheStateService; +import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.model.capabilities.descriptors.Settings; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.registry.action.Named; +import org.terracotta.management.registry.action.RequiredContext; +import org.terracotta.management.service.registry.provider.AliasBindingManagementProvider; + +import java.util.Collection; +import java.util.Collections; + +@Named("PoolSettings") +@RequiredContext({@Named("consumerId"), @Named("type"), @Named("alias")}) +class PoolSettingsManagementProvider extends AliasBindingManagementProvider { + + private final EhcacheStateService ehcacheStateService; + + PoolSettingsManagementProvider(EhcacheStateService ehcacheStateService) { + super(PoolBinding.class); + this.ehcacheStateService = ehcacheStateService; + } + + @Override + public Collection getDescriptors() { + Collection descriptors = super.getDescriptors(); + descriptors.add(new Settings() + .set("type", "PoolSettingsManagementProvider") + .set("defaultServerResource", ehcacheStateService.getDefaultServerResource())); + return descriptors; + } + + @Override + protected ExposedPoolBinding wrap(PoolBinding managedObject) { + return new ExposedPoolBinding(managedObject, getConsumerId()); + } + + private static class ExposedPoolBinding extends ExposedAliasBinding { + + ExposedPoolBinding(PoolBinding binding, long consumerId) { + super(binding, consumerId); + } + + @Override + public Context getContext() { + return super.getContext().with("type", "PoolBinding"); + } + + @Override + public Collection getDescriptors() { + return getBinding() == PoolBinding.ALL_SHARED ? + Collections.emptyList() : + Collections.singleton(new Settings(getContext()) + .set("serverResource", getBinding().getValue().getServerResource()) + .set("size", getBinding().getValue().getSize()) + .set("allocationType", getBinding().getAllocationType())); + } + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreBinding.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreBinding.java new file mode 100644 index 0000000000..2d2cdc185c --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreBinding.java @@ -0,0 +1,32 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.ehcache.clustered.server.ServerStoreImpl; +import org.terracotta.management.service.registry.provider.AliasBinding; + +class ServerStoreBinding extends AliasBinding { + + ServerStoreBinding(String identifier, ServerStoreImpl serverStore) { + super(identifier, serverStore); + } + + @Override + public ServerStoreImpl getValue() { + return (ServerStoreImpl) super.getValue(); + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java new file mode 100644 index 0000000000..027ce2d5c0 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java @@ -0,0 +1,90 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.ehcache.clustered.common.PoolAllocation; +import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.model.capabilities.descriptors.Settings; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.registry.action.Named; +import org.terracotta.management.registry.action.RequiredContext; +import org.terracotta.management.service.registry.provider.AliasBindingManagementProvider; + +import java.util.Collection; +import java.util.Collections; + +@Named("ServerStoreSettings") +@RequiredContext({@Named("consumerId"), @Named("type"), @Named("alias")}) +class ServerStoreSettingsManagementProvider extends AliasBindingManagementProvider { + + ServerStoreSettingsManagementProvider() { + super(ServerStoreBinding.class); + } + + @Override + public Collection getDescriptors() { + Collection descriptors = super.getDescriptors(); + descriptors.add(new Settings() + .set("type", "ServerStoreSettingsManagementProvider") + .set("time", System.currentTimeMillis())); + return descriptors; + } + + @Override + protected ExposedServerStoreBinding wrap(ServerStoreBinding managedObject) { + return new ExposedServerStoreBinding(managedObject, getConsumerId()); + } + + private static class ExposedServerStoreBinding extends ExposedAliasBinding { + + ExposedServerStoreBinding(ServerStoreBinding binding, long consumerId) { + super(binding, consumerId); + } + + @Override + public Context getContext() { + return super.getContext().with("type", "ServerStore"); + } + + @Override + public Collection getDescriptors() { + return Collections.singleton(getSettings()); + } + + Settings getSettings() { + // names taken from ServerStoreConfiguration.isCompatible() + PoolAllocation poolAllocation = getBinding().getValue().getStoreConfiguration().getPoolAllocation(); + Settings settings = new Settings(getContext()) + .set("resourcePoolType", poolAllocation.getClass().getSimpleName().toLowerCase()) + .set("allocatedMemoryAtTime", getBinding().getValue().getAllocatedMemory()) + .set("tableCapacityAtTime", getBinding().getValue().getTableCapacity()) + .set("vitalMemoryAtTime", getBinding().getValue().getVitalMemory()) + .set("longSizeAtTime", getBinding().getValue().getSize()) + .set("dataAllocatedMemoryAtTime", getBinding().getValue().getDataAllocatedMemory()) + .set("dataOccupiedMemoryAtTime", getBinding().getValue().getDataOccupiedMemory()) + .set("dataSizeAtTime", getBinding().getValue().getDataSize()) + .set("dataVitalMemoryAtTime", getBinding().getValue().getDataVitalMemory()); + if (poolAllocation instanceof PoolAllocation.Dedicated) { + settings.set("resourcePoolDedicatedResourceName", ((PoolAllocation.Dedicated) poolAllocation).getResourceName()); + settings.set("resourcePoolDedicatedSize", ((PoolAllocation.Dedicated) poolAllocation).getSize()); + } else if (poolAllocation instanceof PoolAllocation.Shared) { + settings.set("resourcePoolSharedPoolName", ((PoolAllocation.Shared) poolAllocation).getResourcePoolName()); + } + return settings; + } + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java index 0aaf6558f8..0a21291d23 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java @@ -34,6 +34,8 @@ public interface EhcacheStateService { Map getSharedResourcePools(); + ServerSideConfiguration.Pool getDedicatedResourcePool(String name); + ServerStoreImpl getStore(String name); Set getStores(); diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index 97108745d9..14538de24f 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -48,6 +48,8 @@ import org.terracotta.entity.PassiveSynchronizationChannel; import org.terracotta.entity.ServiceConfiguration; import org.terracotta.entity.ServiceRegistry; +import org.terracotta.management.service.registry.ConsumerManagementRegistry; +import org.terracotta.management.service.registry.ConsumerManagementRegistryConfiguration; import org.terracotta.offheapresource.OffHeapResource; import org.terracotta.offheapresource.OffHeapResourceIdentifier; import org.terracotta.offheapresource.OffHeapResources; @@ -2886,6 +2888,8 @@ public Set getAllIdentifiers() { return (T) (this.storeManagerService); } else if (serviceConfiguration.getServiceType().equals(IEntityMessenger.class)) { return (T) mock(IEntityMessenger.class); + } else if(serviceConfiguration instanceof ConsumerManagementRegistryConfiguration) { + return (T) mock(ConsumerManagementRegistry.class); } throw new UnsupportedOperationException("Registry.getService does not support " + serviceConfiguration.getClass().getName()); @@ -2930,6 +2934,11 @@ public long available() { return this.capacity - this.used; } + @Override + public long capacity() { + return capacity; + } + private long getUsed() { return used; } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java index cfcb28e4e8..ac312d0c6d 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java @@ -31,6 +31,8 @@ import org.terracotta.entity.IEntityMessenger; import org.terracotta.entity.ServiceConfiguration; import org.terracotta.entity.ServiceRegistry; +import org.terracotta.management.service.registry.ConsumerManagementRegistry; +import org.terracotta.management.service.registry.ConsumerManagementRegistryConfiguration; import org.terracotta.offheapresource.OffHeapResource; import org.terracotta.offheapresource.OffHeapResourceIdentifier; import org.terracotta.offheapresource.OffHeapResources; @@ -659,6 +661,8 @@ public Set getAllIdentifiers() { return (T) (this.storeManagerService); } else if (serviceConfiguration.getServiceType().equals(IEntityMessenger.class)) { return (T) mock(IEntityMessenger.class); + } else if(serviceConfiguration instanceof ConsumerManagementRegistryConfiguration) { + return (T) mock(ConsumerManagementRegistry.class); } throw new UnsupportedOperationException("Registry.getService does not support " + serviceConfiguration.getClass().getName()); @@ -703,6 +707,11 @@ public long available() { return this.capacity - this.used; } + @Override + public long capacity() { + return capacity; + } + private long getUsed() { return used; } diff --git a/management/src/main/java/org/ehcache/management/providers/CacheBindingManagementProvider.java b/management/src/main/java/org/ehcache/management/providers/CacheBindingManagementProvider.java index 08f71cf135..6b559d19d1 100644 --- a/management/src/main/java/org/ehcache/management/providers/CacheBindingManagementProvider.java +++ b/management/src/main/java/org/ehcache/management/providers/CacheBindingManagementProvider.java @@ -38,7 +38,7 @@ public CacheBindingManagementProvider(ManagementRegistryServiceConfiguration reg @Override public Collection getDescriptors() { Collection capabilities = new LinkedHashSet(); - for (ExposedObject o : managedObjects) { + for (ExposedObject o : getExposedObjects()) { capabilities.addAll(((ExposedCacheBinding) o).getDescriptors()); } return capabilities; diff --git a/management/src/main/java/org/ehcache/management/providers/ExposedCacheBinding.java b/management/src/main/java/org/ehcache/management/providers/ExposedCacheBinding.java index 9b26b3756b..ee84c7c74d 100644 --- a/management/src/main/java/org/ehcache/management/providers/ExposedCacheBinding.java +++ b/management/src/main/java/org/ehcache/management/providers/ExposedCacheBinding.java @@ -28,10 +28,12 @@ public abstract class ExposedCacheBinding implements ExposedObject protected final ManagementRegistryServiceConfiguration registryConfiguration; protected final CacheBinding cacheBinding; + private final Context context; protected ExposedCacheBinding(ManagementRegistryServiceConfiguration registryConfiguration, CacheBinding cacheBinding) { this.registryConfiguration = registryConfiguration; this.cacheBinding = cacheBinding; + this.context = registryConfiguration.getContext().with("cacheName", cacheBinding.getAlias()); } @Override @@ -46,10 +48,11 @@ public final CacheBinding getTarget() { } @Override - public final boolean matches(Context context) { - return context.contains(registryConfiguration.getContext().with("cacheName", cacheBinding.getAlias())); + public Context getContext() { + return context; } + @Override public Collection getDescriptors() { return Collections.emptyList(); } diff --git a/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryService.java b/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryService.java index 50d894d0a7..927640ca46 100644 --- a/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryService.java +++ b/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryService.java @@ -103,13 +103,11 @@ public void stop() { public void cacheAdded(String alias, Cache cache) { StatisticsManager.associate(cache).withParent(cacheManager); - register(cache); register(new CacheBinding(alias, cache)); } @Override public void cacheRemoved(String alias, Cache cache) { - unregister(cache); unregister(new CacheBinding(alias, cache)); StatisticsManager.dissociate(cache).fromParent(cacheManager); From c686ccb5b77fafc9026140e3834ab996934ce03b Mon Sep 17 00:00:00 2001 From: "U-EUR\\GGIB" Date: Mon, 17 Oct 2016 11:32:05 -0700 Subject: [PATCH 095/218] This is a combination of 2 commits. This is a combination of 2 commits. This is a combination of 2 commits. adds statistics project to clustered project and registers server side clustered allocatedMemory stat unregisters shadow tree statistics in EhcacheActiveEntity and EhcachePassiveEntity, and updates passthroughStatistic tags and properties for searchability adds null check before cleaning stats node, and updates tags to be PageSource moves stat cleanup to EhcacheStateServiceImpl adds all remaining server side stats splits registerStatistics method into registerStoreStatistics and registerPoolStatistics ServerStoreImpl implements MapInternals removes trailing whitespace registers stats using Java8 streams, updates passthrough server to call destroy on entities and decreases JVM size cleanup after rebase removes executable permissions from file uses lambda expression to pass callable rebase fixes --- build.gradle | 2 +- .../internal/UnitTestConnectionService.java | 43 +++++++++- clustered/server/build.gradle | 3 + .../server/EhcacheStateServiceImpl.java | 78 +++++++++++++++++++ .../clustered/server/ServerStoreImpl.java | 54 ++++++++++--- .../offheap/OffHeapServerStoreTest.java | 43 ++++++++++ 6 files changed, 207 insertions(+), 16 deletions(-) diff --git a/build.gradle b/build.gradle index f886e7318c..1d99bfbdca 100644 --- a/build.gradle +++ b/build.gradle @@ -125,7 +125,7 @@ subprojects { } test { - maxHeapSize = "1024m" + maxHeapSize = "1408m" systemProperty 'java.awt.headless', 'true' if (parent.isCloudbees) { systemProperty 'disable.concurrent.tests', 'true' diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java index 23cfd4b976..26ef77111a 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java @@ -32,6 +32,7 @@ import java.util.IdentityHashMap; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Properties; import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockEntityClientService; @@ -44,17 +45,22 @@ import org.terracotta.connection.ConnectionException; import org.terracotta.connection.ConnectionPropertyNames; import org.terracotta.connection.ConnectionService; +import org.terracotta.connection.entity.Entity; +import org.terracotta.connection.entity.EntityRef; import org.terracotta.entity.EntityClientService; import org.terracotta.entity.EntityMessage; import org.terracotta.entity.EntityResponse; import org.terracotta.entity.EntityServerService; import org.terracotta.entity.ServiceProvider; import org.terracotta.entity.ServiceProviderConfiguration; +import org.terracotta.exception.EntityNotFoundException; +import org.terracotta.exception.EntityNotProvidedException; import org.terracotta.offheapresource.OffHeapResourcesConfiguration; import org.terracotta.offheapresource.OffHeapResourcesProvider; import org.terracotta.offheapresource.config.MemoryUnit; import org.terracotta.offheapresource.config.OffheapResourcesType; import org.terracotta.offheapresource.config.ResourceType; +import org.terracotta.passthrough.PassthroughConnection; import org.terracotta.passthrough.PassthroughServer; import org.terracotta.passthrough.PassthroughServerRegistry; @@ -207,9 +213,6 @@ public static PassthroughServer remove(URI uri) { URI keyURI = createKey(uri); ServerDescriptor serverDescriptor = SERVERS.remove(keyURI); if (serverDescriptor != null) { - serverDescriptor.server.stop(); - LOGGER.info("Stopped PassthroughServer at {}", keyURI); - for (Connection connection : serverDescriptor.getConnections().keySet()) { try { LOGGER.warn("Force close {}", formatConnectionId(connection)); @@ -220,6 +223,28 @@ public static PassthroughServer remove(URI uri) { // Ignored } } + + //open destroy connection. You need to make sure connection doesn't have any entities associated with it. + PassthroughConnection connection = serverDescriptor.server.connectNewClient("destroy-connection"); + + for(Entry entry : serverDescriptor.knownEntities.entrySet()) { + Class type = (Class)entry.getKey(); + List args = (List)entry.getValue(); + Long version = (Long)args.get(0); + String stringArg = (String)args.get(1); + + try { + EntityRef entityRef = connection.getEntityRef(type, version, stringArg); + entityRef.destroy(); + } catch (EntityNotProvidedException ex) { + LOGGER.error("Entity destroy failed: ", ex); + } catch (EntityNotFoundException ex) { + LOGGER.error("Entity destroy failed: ", ex); + } + } + + serverDescriptor.server.stop(); + LOGGER.info("Stopped PassthroughServer at {}", keyURI); return serverDescriptor.server; } else { return null; @@ -465,6 +490,7 @@ synchronized void removeConnections() { private static final class ServerDescriptor { private final PassthroughServer server; private final Map connections = new IdentityHashMap(); + private final Map, List> knownEntities = new HashMap, List>(); ServerDescriptor(PassthroughServer server) { this.server = server; @@ -481,6 +507,13 @@ synchronized void add(Connection connection, Properties properties) { synchronized void remove(Connection connection) { this.connections.remove(connection); } + + public void addKnownEntity(Class arg, Object arg1, Object arg2) { + List set = new ArrayList(); + set.add(arg1); + set.add(arg2); + knownEntities.put(arg, set); + } } /** @@ -503,6 +536,10 @@ public Object invoke(Object proxy, Method method, Object[] args) throws Throwabl serverDescriptor.remove(connection); LOGGER.info("Client closed {}", formatConnectionId(connection)); } + + if (method.getName().equals("getEntityRef")) { + serverDescriptor.addKnownEntity((Class) args[0], args[1] ,args[2]); + } try { return method.invoke(connection, args); } catch (InvocationTargetException e) { diff --git a/clustered/server/build.gradle b/clustered/server/build.gradle index 8d3e28f0cc..5721a1dc46 100644 --- a/clustered/server/build.gradle +++ b/clustered/server/build.gradle @@ -22,6 +22,9 @@ sourceCompatibility = 1.8 targetCompatibility = 1.8 dependencies { + compile ("org.terracotta:statistics:$parent.statisticVersion") { + exclude group:'org.slf4j', module:'slf4j-api' + } compile project(':clustered:common') compile group: 'org.terracotta', name: 'offheap-resource', version: parent.offheapResourceVersion compile group: 'org.terracotta', name: 'offheap-store', version: parent.offheapVersion diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java index 0fe42d52d1..ed25e4abf0 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java @@ -16,6 +16,7 @@ package org.ehcache.clustered.server; +import java.util.Arrays; import org.ehcache.clustered.common.PoolAllocation; import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; @@ -32,6 +33,7 @@ import org.ehcache.clustered.common.internal.exceptions.InvalidStoreManagerException; import org.ehcache.clustered.common.internal.exceptions.LifecycleException; import org.ehcache.clustered.common.internal.exceptions.ResourceConfigurationException; +import org.terracotta.context.TreeNode; import org.terracotta.entity.ServiceRegistry; import org.terracotta.offheapresource.OffHeapResource; import org.terracotta.offheapresource.OffHeapResourceIdentifier; @@ -40,6 +42,7 @@ import org.terracotta.offheapstore.paging.Page; import org.terracotta.offheapstore.paging.PageSource; import org.terracotta.offheapstore.paging.UpfrontAllocatingPageSource; +import org.terracotta.statistics.StatisticsManager; import java.util.Collections; import java.util.HashMap; @@ -49,15 +52,43 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.Callable; +import java.util.function.Function; import static java.util.stream.Collectors.toMap; import static org.terracotta.offheapstore.util.MemoryUnit.GIGABYTES; import static org.terracotta.offheapstore.util.MemoryUnit.MEGABYTES; + public class EhcacheStateServiceImpl implements EhcacheStateService { private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheStateServiceImpl.class); + private static final String STATISTICS_STORE_TAG = "ServerStore"; + private static final String STATISTICS_POOL_TAG = "Pool"; + private static final String PROPERTY_STORE_KEY = "storeName"; + private static final String PROPERTY_POOL_KEY = "poolName"; + + private static final Map> STAT_STORE_METHOD_REFERENCES = new HashMap<>(); + private static final Map> STAT_POOL_METHOD_REFERENCES = new HashMap<>(); + + static { + STAT_STORE_METHOD_REFERENCES.put("allocatedMemory", ServerStoreImpl::getAllocatedMemory); + STAT_STORE_METHOD_REFERENCES.put("dataOccupiedMemory", ServerStoreImpl::getDataOccupiedMemory); + STAT_STORE_METHOD_REFERENCES.put("occupiedMemory", ServerStoreImpl::getOccupiedMemory); + STAT_STORE_METHOD_REFERENCES.put("dataOccupiedMemory", ServerStoreImpl::getDataOccupiedMemory); + STAT_STORE_METHOD_REFERENCES.put("entries", ServerStoreImpl::getSize); + STAT_STORE_METHOD_REFERENCES.put("usedSlotCount", ServerStoreImpl::getUsedSlotCount); + STAT_STORE_METHOD_REFERENCES.put("dataVitalMemory", ServerStoreImpl::getDataVitalMemory); + STAT_STORE_METHOD_REFERENCES.put("vitalMemory", ServerStoreImpl::getVitalMemory); + STAT_STORE_METHOD_REFERENCES.put("reprobeLength", ServerStoreImpl::getReprobeLength); + STAT_STORE_METHOD_REFERENCES.put("removedSlotCount", ServerStoreImpl::getRemovedSlotCount); + STAT_STORE_METHOD_REFERENCES.put("dataSize", ServerStoreImpl::getDataSize); + STAT_STORE_METHOD_REFERENCES.put("tableCapacity", ServerStoreImpl::getTableCapacity); + + STAT_POOL_METHOD_REFERENCES.put("allocatedSize", ResourcePageSource::getAllocatedSize); + } + private final ServiceRegistry services; private final Set offHeapResourceIdentifiers; @@ -241,6 +272,7 @@ private ResourcePageSource createPageSource(String poolName, ServerSideConfigura } else if (source.reserve(pool.getSize())) { try { pageSource = new ResourcePageSource(pool); + registerPoolStatistics(poolName, pageSource); } catch (RuntimeException t) { source.release(pool.getSize()); throw new ResourceConfigurationException("Failure allocating pool " + pool, t); @@ -252,6 +284,42 @@ private ResourcePageSource createPageSource(String poolName, ServerSideConfigura return pageSource; } + private void registerStoreStatistics(ServerStoreImpl store, String storeName) throws InvalidStoreException { + STAT_STORE_METHOD_REFERENCES.entrySet().stream().forEach((entry)-> + registerStatistic(store, storeName, entry.getKey(), STATISTICS_STORE_TAG, PROPERTY_STORE_KEY, () -> entry.getValue().apply(store) )); + } + + private void registerPoolStatistics(String poolName, ResourcePageSource pageSource) { + STAT_POOL_METHOD_REFERENCES.entrySet().stream().forEach((entry)-> + registerStatistic(pageSource, poolName, entry.getKey(), STATISTICS_POOL_TAG, PROPERTY_POOL_KEY, () -> entry.getValue().apply(pageSource)) + ); + } + + private void unRegisterStoreStatistics(ServerStoreImpl store) { + TreeNode node = StatisticsManager.nodeFor(store); + + if(node != null) { + node.clean(); + } + } + + private void unRegisterPoolStatistics(ResourcePageSource pageSource) { + TreeNode node = StatisticsManager.nodeFor(pageSource); + + if(node != null) { + node.clean(); + } + } + + private void registerStatistic(Object context, String name, String observerName, String tag, String propertyKey, Callable callable) { + Set tags = new HashSet(Arrays.asList(tag,"tier")); + Map properties = new HashMap(); + properties.put("discriminator", tag); + properties.put(propertyKey, name); + + StatisticsManager.createPassThroughStatistic(context, observerName, tags, properties, callable); + } + private void releaseDedicatedPool(String name, PageSource pageSource) { /* * A ServerStore using a dedicated resource pool is the only referent to that pool. When such a @@ -270,6 +338,7 @@ private void releaseDedicatedPool(String name, PageSource pageSource) { public void destroy() { for (Map.Entry storeEntry: stores.entrySet()) { + unRegisterStoreStatistics(storeEntry.getValue()); storeEntry.getValue().close(); } stores.clear(); @@ -300,6 +369,7 @@ private void releasePool(String poolType, String poolName, ResourcePageSource re ServerSideConfiguration.Pool pool = resourcePageSource.getPool(); OffHeapResource source = services.getService(OffHeapResourceIdentifier.identifier(pool.getServerResource())); if (source != null) { + unRegisterPoolStatistics(resourcePageSource); source.release(pool.getSize()); LOGGER.info("Released {} bytes from resource '{}' for {} pool '{}'", pool.getSize(), pool.getServerResource(), poolType, poolName); } @@ -313,11 +383,15 @@ public ServerStoreImpl createStore(String name, ServerStoreConfiguration serverS PageSource resourcePageSource = getPageSource(name, serverStoreConfiguration.getPoolAllocation()); ServerStoreImpl serverStore = new ServerStoreImpl(serverStoreConfiguration, resourcePageSource, mapper); stores.put(name, serverStore); + + registerStoreStatistics(serverStore, name); + return serverStore; } public void destroyServerStore(String name) throws ClusterException { final ServerStoreImpl store = stores.remove(name); + unRegisterStoreStatistics(store); if (store == null) { throw new InvalidStoreException("Clustered tier '" + name + "' does not exist"); } else { @@ -420,6 +494,10 @@ public ServerSideConfiguration.Pool getPool() { return pool; } + public long getAllocatedSize() { + return delegatePageSource.getAllocatedSizeUnSync(); + } + @Override public Page allocate(int size, boolean thief, boolean victim, OffHeapStorageArea owner) { return delegatePageSource.allocate(size, thief, victim, owner); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java index 20e034fc8b..b302fcbb69 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java @@ -100,38 +100,68 @@ public List> getSegments() { @Override - public long getDataAllocatedMemory() {return store.getDataAllocatedMemory();} + public long getSize() { + return store.getSize(); + } @Override - public long getAllocatedMemory() {return store.getAllocatedMemory();} + public long getTableCapacity() { + return store.getTableCapacity(); + } @Override - public long getRemovedSlotCount() {return store.getRemovedSlotCount();} + public long getUsedSlotCount() { + return store.getUsedSlotCount(); + } @Override - public long getDataVitalMemory() {return store.getDataVitalMemory();} + public long getRemovedSlotCount() { + return store.getRemovedSlotCount(); + } @Override - public int getReprobeLength() {return store.getReprobeLength();} + public long getAllocatedMemory() { + return store.getAllocatedMemory(); + } @Override - public long getDataSize() {return store.getDataSize();} + public long getOccupiedMemory() { + return store.getOccupiedMemory(); + } @Override - public long getDataOccupiedMemory() {return store.getDataOccupiedMemory();} + public long getVitalMemory() { + return store.getVitalMemory(); + } @Override - public long getUsedSlotCount() {return store.getUsedSlotCount();} + public long getDataAllocatedMemory() { + return store.getDataAllocatedMemory(); + } @Override - public long getSize() {return store.getSize();} + public long getDataOccupiedMemory() { + return store.getDataOccupiedMemory(); + } @Override - public long getVitalMemory() {return store.getVitalMemory();} + public long getDataVitalMemory() { + return store.getDataVitalMemory(); + } @Override - public long getOccupiedMemory() {return store.getOccupiedMemory();} + public long getDataSize() { + return store.getDataSize(); + } @Override - public long getTableCapacity() {return store.getTableCapacity();} + public int getReprobeLength() { + //TODO + //MapInternals Interface may need to change to implement this function correctly. + //Currently MapInternals Interface contains function: int getReprobeLength(); + //however OffHeapServerStore.reprobeLength() returns a long + //Thus there could be data loss + + throw new UnsupportedOperationException("Not supported yet."); + } } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/OffHeapServerStoreTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/OffHeapServerStoreTest.java index befa6ce0e2..13320570d9 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/OffHeapServerStoreTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/OffHeapServerStoreTest.java @@ -35,7 +35,10 @@ import org.terracotta.offheapstore.paging.UpfrontAllocatingPageSource; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.core.Is.is; +import org.junit.Assert; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyLong; import static org.mockito.Mockito.doThrow; @@ -200,4 +203,44 @@ public void testCrossSegmentShrinking() { } } + @Test + public void testServerSideUsageStats() { + + long maxBytes = MEGABYTES.toBytes(1); + OffHeapServerStore store = new OffHeapServerStore(new UpfrontAllocatingPageSource(new OffHeapBufferSource(), maxBytes, MEGABYTES.toBytes(1)), new KeySegmentMapper(16)); + + int oneKb = 1024; + long smallLoopCount = 5; + ByteBuffer smallValue = ByteBuffer.allocate(oneKb); + for (long i = 0; i < smallLoopCount; i++) { + store.getAndAppend(i, smallValue.duplicate()); + } + + Assert.assertThat(store.getAllocatedMemory(),lessThanOrEqualTo(maxBytes)); + Assert.assertThat(store.getAllocatedMemory(),greaterThanOrEqualTo(smallLoopCount * oneKb)); + Assert.assertThat(store.getAllocatedMemory(),greaterThanOrEqualTo(store.getOccupiedMemory())); + + //asserts above already guarantee that occupiedMemory <= maxBytes and that occupiedMemory <= allocatedMemory + Assert.assertThat(store.getOccupiedMemory(),greaterThanOrEqualTo(smallLoopCount * oneKb)); + + Assert.assertThat(store.getSize(), is(smallLoopCount)); + + int multiplier = 100; + long largeLoopCount = 5 + smallLoopCount; + ByteBuffer largeValue = ByteBuffer.allocate(multiplier * oneKb); + for (long i = smallLoopCount; i < largeLoopCount; i++) { + store.getAndAppend(i, largeValue.duplicate()); + } + + Assert.assertThat(store.getAllocatedMemory(),lessThanOrEqualTo(maxBytes)); + Assert.assertThat(store.getAllocatedMemory(),greaterThanOrEqualTo( (smallLoopCount * oneKb) + ( (largeLoopCount - smallLoopCount) * oneKb * multiplier) )); + Assert.assertThat(store.getAllocatedMemory(),greaterThanOrEqualTo(store.getOccupiedMemory())); + + //asserts above already guarantee that occupiedMemory <= maxBytes and that occupiedMemory <= allocatedMemory + Assert.assertThat(store.getOccupiedMemory(),greaterThanOrEqualTo(smallLoopCount * oneKb)); + + Assert.assertThat(store.getSize(), is(smallLoopCount + (largeLoopCount - smallLoopCount))); + + } + } From ea88b5155e5cad9e43db105970d880f0a45a3572 Mon Sep 17 00:00:00 2001 From: Albin Suresh Date: Tue, 25 Oct 2016 16:20:07 +0530 Subject: [PATCH 096/218] Closes #1525 Make EhcacheStateServiceImpl thread-safe --- .../server/EhcacheStateServiceImpl.java | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java index ed25e4abf0..8330a8a694 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java @@ -91,30 +91,31 @@ public class EhcacheStateServiceImpl implements EhcacheStateService { private final ServiceRegistry services; private final Set offHeapResourceIdentifiers; + private volatile boolean configured = false; /** * The name of the resource to use for dedicated resource pools not identifying a resource from which * space for the pool is obtained. This value may be {@code null}; */ - private String defaultServerResource; + private volatile String defaultServerResource; /** * The clustered shared resource pools specified by the CacheManager creating this {@code EhcacheActiveEntity}. * The index is the name assigned to the shared resource pool in the cache manager configuration. */ - private Map sharedResourcePools; + private final Map sharedResourcePools = new ConcurrentHashMap<>(); /** * The clustered dedicated resource pools specified by caches defined in CacheManagers using this * {@code EhcacheActiveEntity}. The index is the cache identifier (alias). */ - private Map dedicatedResourcePools = new HashMap<>(); + private final Map dedicatedResourcePools = new ConcurrentHashMap<>(); /** * The clustered stores representing the server-side of a {@code ClusterStore}. * The index is the cache alias/identifier. */ - private Map stores = Collections.emptyMap(); + private final Map stores = new ConcurrentHashMap<>(); private final ClientMessageTracker messageTracker = new ClientMessageTracker(); private final ConcurrentMap invalidationMap = new ConcurrentHashMap<>(); @@ -227,9 +228,8 @@ public void configure(ServerSideConfiguration configuration) throws ClusterExcep } } - this.sharedResourcePools = createPools(resolveResourcePools(configuration)); - this.stores = new HashMap<>(); - + this.sharedResourcePools.putAll(createPools(resolveResourcePools(configuration))); + configured = true; } else { throw new InvalidStoreManagerException("Clustered Tier Manager already configured"); } @@ -349,8 +349,9 @@ public void destroy() { releasePools("shared", this.sharedResourcePools); releasePools("dedicated", this.dedicatedResourcePools); - this.sharedResourcePools = null; + this.sharedResourcePools.clear(); invalidationMap.clear(); + this.configured = false; } private void releasePools(String poolType, Map resourcePools) { @@ -457,7 +458,7 @@ public InvalidationTracker removeInvalidationtracker(String cacheId) { } public boolean isConfigured() { - return (sharedResourcePools != null); + return configured; } @Override From 9935622c694a76cb94b4c845dca29aea550f8f49 Mon Sep 17 00:00:00 2001 From: Albin Suresh Date: Tue, 25 Oct 2016 17:27:14 +0530 Subject: [PATCH 097/218] Closes #1418 Make EhcacheActiveEntity thread-safe --- .../clustered/server/EhcacheActiveEntity.java | 17 +++++++++++------ .../server/state/ClientMessageTracker.java | 4 ++-- 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 57506b2dcc..2ebb96c5ff 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -107,7 +107,7 @@ class EhcacheActiveEntity implements ActiveServerEntity clientStateMap = new HashMap<>(); + private final Map clientStateMap = new ConcurrentHashMap<>(); private final ConcurrentHashMap> storeClientMap = new ConcurrentHashMap<>(); @@ -395,18 +395,23 @@ private void addInflightInvalidationsForEventualCaches() { private void validateClientConnected(ClientDescriptor clientDescriptor) throws ClusterException { ClientState clientState = this.clientStateMap.get(clientDescriptor); - if (clientState == null) { - throw new LifecycleException("Client " + clientDescriptor + " is not connected to the Clustered Tier Manager"); - } + validateClientConnected(clientDescriptor, clientState); } private void validateClientAttached(ClientDescriptor clientDescriptor) throws ClusterException { - validateClientConnected(clientDescriptor); - if (!clientStateMap.get(clientDescriptor).isAttached()) { + ClientState clientState = this.clientStateMap.get(clientDescriptor); + validateClientConnected(clientDescriptor, clientState); + if (!clientState.isAttached()) { throw new LifecycleException("Client " + clientDescriptor + " is not attached to the Clustered Tier Manager"); } } + private static void validateClientConnected(ClientDescriptor clientDescriptor, ClientState clientState) throws LifecycleException { + if (clientState == null) { + throw new LifecycleException("Client " + clientDescriptor + " is not connected to the Clustered Tier Manager"); + } + } + private void validateClusteredTierManagerConfigured(ClientDescriptor clientDescriptor) throws ClusterException { validateClientAttached(clientDescriptor); if (!ehcacheStateService.isConfigured()) { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java index 05c8fb14a3..b97f269b49 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java @@ -26,8 +26,8 @@ public class ClientMessageTracker { private final ConcurrentMap messageTrackers = new ConcurrentHashMap<>(); - private UUID entityConfiguredStamp = null; - private long configuredTimestamp; + private volatile UUID entityConfiguredStamp = null; + private volatile long configuredTimestamp; public boolean isAdded(UUID clientId) { return messageTrackers.containsKey(clientId); From d141e6bd89b7afdfb32260195a6f1dc4c214c6b9 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Fri, 21 Oct 2016 18:13:34 +0530 Subject: [PATCH 098/218] New replication message for cache lifecycle #1517 --- .../messages/PassiveReplicationMessage.java | 24 ++++++++++++++++++- .../PassiveReplicationMessageCodec.java | 18 ++++++++++++-- 2 files changed, 39 insertions(+), 3 deletions(-) diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java index 578e3bd511..bb8bb1d24e 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java @@ -29,7 +29,8 @@ public enum ReplicationOp { CHAIN_REPLICATION_OP((byte) 41), CLIENTID_TRACK_OP((byte) 42), CLEAR_INVALIDATION_COMPLETE((byte) 43), - INVALIDATION_COMPLETE((byte) 44) + INVALIDATION_COMPLETE((byte) 44), + SERVER_STORE_LIFECYCLE_REPLICATION_OP((byte) 45) ; private final byte replicationOpCode; @@ -53,6 +54,8 @@ public static ReplicationOp getReplicationOp(byte replicationOpCode) { return CLEAR_INVALIDATION_COMPLETE; case 44: return INVALIDATION_COMPLETE; + case 45: + return SERVER_STORE_LIFECYCLE_REPLICATION_OP; default: throw new IllegalArgumentException("Replication operation not defined for : " + replicationOpCode); } @@ -186,4 +189,23 @@ public long getKey() { return key; } } + + public static class ServerStoreLifeCycleReplicationMessage extends ClientIDTrackerMessage { + + private final LifecycleMessage message; + + public ServerStoreLifeCycleReplicationMessage(long msgId, UUID clientId, LifecycleMessage message) { + super(msgId, clientId); + this.message = message; + } + + public LifecycleMessage getMessage() { + return message; + } + + @Override + public ReplicationOp operation() { + return ReplicationOp.SERVER_STORE_LIFECYCLE_REPLICATION_OP; + } + } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodec.java index c600883335..fb6b8b1d8f 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodec.java @@ -17,9 +17,8 @@ package org.ehcache.clustered.common.internal.messages; import org.ehcache.clustered.common.internal.ClusteredEhcacheIdentity; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ReplicationOp; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ChainReplicationMessage; import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.Util; import java.nio.ByteBuffer; import java.util.UUID; @@ -70,6 +69,14 @@ public byte[] encode(PassiveReplicationMessage message) { encodedMsg.put(message.getOpCode()); encodedMsg.putLong(invalidationCompleteMessage.getKey()); CodecUtil.putStringAsCharArray(encodedMsg, invalidationCompleteMessage.getCacheId()); + case SERVER_STORE_LIFECYCLE_REPLICATION_OP: + ServerStoreLifeCycleReplicationMessage storeLifeCycleReplicationMessage = (ServerStoreLifeCycleReplicationMessage)message; + byte[] encodedLifeCycleMsg = Util.marshall(storeLifeCycleReplicationMessage.getMessage()); + encodedMsg = ByteBuffer.allocate(OP_CODE_SIZE + MESSAGE_ID_SIZE + encodedLifeCycleMsg.length); + encodedMsg.put(message.getOpCode()); + encodedMsg.put(ClusteredEhcacheIdentity.serialize(message.getClientId())); + encodedMsg.putLong(message.getId()); + encodedMsg.put(encodedLifeCycleMsg); return encodedMsg.array(); default: throw new UnsupportedOperationException("This operation is not supported : " + message.operation()); @@ -106,6 +113,13 @@ public EhcacheEntityMessage decode(byte[] payload) { key = byteBuffer.getLong(); cacheId = CodecUtil.getStringFromBuffer(byteBuffer, byteBuffer.remaining()/2); return new InvalidationCompleteMessage(cacheId, key); + case SERVER_STORE_LIFECYCLE_REPLICATION_OP: + clientId = getClientId(byteBuffer); + msgId = byteBuffer.getLong(); + byte[] encodedLifeCycle = new byte[byteBuffer.remaining()]; + byteBuffer.get(encodedLifeCycle); + LifecycleMessage lifecycleMessage = (LifecycleMessage)Util.unmarshall(encodedLifeCycle); + return new ServerStoreLifeCycleReplicationMessage(msgId, clientId, lifecycleMessage); default: throw new UnsupportedOperationException("This operation code is not supported : " + replicationOp); } From 3a9774379f1d1b10c6289266953974d4d488e84c Mon Sep 17 00:00:00 2001 From: Abhilash Date: Fri, 21 Oct 2016 18:14:19 +0530 Subject: [PATCH 099/218] lifecycle on passive using deferred messages #1517 --- ...leMessageActivePassvieReplicationTest.java | 42 +++++++++++++++++++ .../server/EhcachePassiveEntity.java | 24 +++++++++-- 2 files changed, 63 insertions(+), 3 deletions(-) diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/LifeCycleMessageActivePassvieReplicationTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/LifeCycleMessageActivePassvieReplicationTest.java index 859c197fe6..6f50c3d01d 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/LifeCycleMessageActivePassvieReplicationTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/LifeCycleMessageActivePassvieReplicationTest.java @@ -180,6 +180,48 @@ public void testDestroyServerStoreReplication() throws Exception { } + @Test + public void testDestroyServerStoreIsNotReplicatedIfFailsOnActive() throws Exception { + ClusteringServiceConfiguration configuration = + ClusteringServiceConfigurationBuilder.cluster(URI.create(STRIPE_URI)) + .autoCreate() + .build(); + + ClusteringService service1 = new ClusteringServiceFactory().create(configuration); + + ClusteringService service2 = new ClusteringServiceFactory().create(configuration); + + service1.start(null); + service2.start(null); + + EhcacheClientEntity clientEntity1 = getEntity(service1); + EhcacheClientEntity clientEntity2 = getEntity(service2); + + clientEntity1.createCache("testCache", getServerStoreConfiguration("test")); + clientEntity2.validateCache("testCache", getServerStoreConfiguration("test")); + + clientEntity1.releaseCache("testCache"); + try { + clientEntity1.destroyCache("testCache"); + fail("ClusteredTierDestructionException Expected"); + } catch (ClusteredTierDestructionException e) { + //nothing to do + } + + clusterControl.terminateActive(); + + clientEntity1.destroyCache("testCache"); + + service1.stop(); + service2.stop(); + + } + + @Test + public void testCreateServerStoreIsNotReplicatedIsFailsOnActive() throws Exception { + + } + private static EhcacheClientEntity getEntity(ClusteringService clusteringService) throws NoSuchFieldException, IllegalAccessException { Field entity = clusteringService.getClass().getDeclaredField("entity"); entity.setAccessible(true); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index 59197f86c8..519af8174a 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -34,6 +34,7 @@ import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ChainReplicationMessage; import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.InvalidationCompleteMessage; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ServerStoreLifeCycleReplicationMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; import org.ehcache.clustered.server.internal.messages.EntityDataSyncMessage; @@ -139,6 +140,9 @@ private void invokeRetirementMessages(PassiveReplicationMessage message) throws case CLEAR_INVALIDATION_COMPLETE: ehcacheStateService.getInvalidationTracker(((ClearInvalidationCompleteMessage)message).getCacheId()).setClearInProgress(false); break; + case SERVER_STORE_LIFECYCLE_REPLICATION_OP: + invokeRetiredServerStoreLifecycleMessage((ServerStoreLifeCycleReplicationMessage)message); + break; default: throw new IllegalMessageException("Unknown Retirement Message : " + message); } @@ -166,6 +170,22 @@ private void trackHashInvalidationForEventualCache(ChainReplicationMessage retir } } + private void invokeRetiredServerStoreLifecycleMessage(ServerStoreLifeCycleReplicationMessage storeLifeCycleReplicationMessage) throws ClusterException { + + LifecycleMessage message = storeLifeCycleReplicationMessage.getMessage(); + switch (message.operation()) { + case CREATE_SERVER_STORE: + createServerStore((CreateServerStore)message); + break; + case DESTROY_SERVER_STORE: + destroyServerStore((DestroyServerStore)message); + break; + default: + throw new IllegalMessageException("Unknown Replicated ServerStore operation : " + message); + } + } + + private void invokeServerStoreOperation(ServerStoreOpMessage message) throws ClusterException { ServerStoreImpl cacheStore = ehcacheStateService.getStore(message.getCacheId()); if (cacheStore == null) { @@ -229,10 +249,8 @@ private void invokeLifeCycleOperation(LifecycleMessage message) throws ClusterEx trackAndApplyMessage(message); break; case CREATE_SERVER_STORE: - createServerStore((CreateServerStore) message); - break; case DESTROY_SERVER_STORE: - destroyServerStore((DestroyServerStore) message); + ehcacheStateService.getClientMessageTracker().track(message.getId(), message.getClientId()); break; default: throw new IllegalMessageException("Unknown LifeCycle operation " + message); From 4080ea20cfd0d3d9f528f5daffce2f7fcbd40295 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Fri, 21 Oct 2016 20:31:51 +0530 Subject: [PATCH 100/218] Passive entity test + refactoring #1517 --- ...leMessageActivePassvieReplicationTest.java | 3 +- .../messages/PassiveReplicationMessage.java | 4 +- .../PassiveReplicationMessageCodec.java | 28 ++--- .../BasicLifeCyclePassiveReplicationTest.java | 38 +++++++ .../clustered/server/EhcacheActiveEntity.java | 10 ++ .../server/EhcachePassiveEntity.java | 5 +- .../server/EhcachePassiveEntityTest.java | 106 ++++++++++-------- 7 files changed, 126 insertions(+), 68 deletions(-) diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/LifeCycleMessageActivePassvieReplicationTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/LifeCycleMessageActivePassvieReplicationTest.java index 6f50c3d01d..d1898e0acd 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/LifeCycleMessageActivePassvieReplicationTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/LifeCycleMessageActivePassvieReplicationTest.java @@ -210,7 +210,8 @@ public void testDestroyServerStoreIsNotReplicatedIfFailsOnActive() throws Except clusterControl.terminateActive(); - clientEntity1.destroyCache("testCache"); + clientEntity2.releaseCache("testCache"); + clientEntity2.destroyCache("testCache"); service1.stop(); service2.stop(); diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java index bb8bb1d24e..ad2da535a1 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java @@ -194,8 +194,8 @@ public static class ServerStoreLifeCycleReplicationMessage extends ClientIDTrack private final LifecycleMessage message; - public ServerStoreLifeCycleReplicationMessage(long msgId, UUID clientId, LifecycleMessage message) { - super(msgId, clientId); + public ServerStoreLifeCycleReplicationMessage(LifecycleMessage message) { + super(message.getId(), message.getClientId()); this.message = message; } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodec.java index fb6b8b1d8f..6019107789 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodec.java @@ -23,7 +23,6 @@ import java.nio.ByteBuffer; import java.util.UUID; -import static org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.*; class PassiveReplicationMessageCodec { @@ -45,7 +44,7 @@ public byte[] encode(PassiveReplicationMessage message) { encodedMsg.putLong(message.getId()); return encodedMsg.array(); case CHAIN_REPLICATION_OP: - ChainReplicationMessage chainReplicationMessage = (ChainReplicationMessage)message; + PassiveReplicationMessage.ChainReplicationMessage chainReplicationMessage = (PassiveReplicationMessage.ChainReplicationMessage)message; byte[] encodedChain = chainCodec.encode(chainReplicationMessage.getChain()); int cacheIdLen = chainReplicationMessage.getCacheId().length(); encodedMsg = ByteBuffer.allocate(OP_CODE_SIZE + CACHE_ID_LEN_SIZE + KEY_SIZE + MESSAGE_ID_SIZE + encodedChain.length + 2 * cacheIdLen); @@ -58,24 +57,23 @@ public byte[] encode(PassiveReplicationMessage message) { encodedMsg.put(encodedChain); return encodedMsg.array(); case CLEAR_INVALIDATION_COMPLETE: - ClearInvalidationCompleteMessage clearInvalidationCompleteMessage = (ClearInvalidationCompleteMessage)message; + PassiveReplicationMessage.ClearInvalidationCompleteMessage clearInvalidationCompleteMessage = (PassiveReplicationMessage.ClearInvalidationCompleteMessage)message; encodedMsg = ByteBuffer.allocate(OP_CODE_SIZE + 2 * clearInvalidationCompleteMessage.getCacheId().length()); encodedMsg.put(message.getOpCode()); CodecUtil.putStringAsCharArray(encodedMsg, clearInvalidationCompleteMessage.getCacheId()); return encodedMsg.array(); case INVALIDATION_COMPLETE: - InvalidationCompleteMessage invalidationCompleteMessage = (InvalidationCompleteMessage)message; + PassiveReplicationMessage.InvalidationCompleteMessage invalidationCompleteMessage = (PassiveReplicationMessage.InvalidationCompleteMessage)message; encodedMsg = ByteBuffer.allocate(OP_CODE_SIZE + KEY_SIZE + 2 * invalidationCompleteMessage.getCacheId().length()); encodedMsg.put(message.getOpCode()); encodedMsg.putLong(invalidationCompleteMessage.getKey()); CodecUtil.putStringAsCharArray(encodedMsg, invalidationCompleteMessage.getCacheId()); + return encodedMsg.array(); case SERVER_STORE_LIFECYCLE_REPLICATION_OP: - ServerStoreLifeCycleReplicationMessage storeLifeCycleReplicationMessage = (ServerStoreLifeCycleReplicationMessage)message; + PassiveReplicationMessage.ServerStoreLifeCycleReplicationMessage storeLifeCycleReplicationMessage = (PassiveReplicationMessage.ServerStoreLifeCycleReplicationMessage)message; byte[] encodedLifeCycleMsg = Util.marshall(storeLifeCycleReplicationMessage.getMessage()); - encodedMsg = ByteBuffer.allocate(OP_CODE_SIZE + MESSAGE_ID_SIZE + encodedLifeCycleMsg.length); + encodedMsg = ByteBuffer.allocate(OP_CODE_SIZE + encodedLifeCycleMsg.length); encodedMsg.put(message.getOpCode()); - encodedMsg.put(ClusteredEhcacheIdentity.serialize(message.getClientId())); - encodedMsg.putLong(message.getId()); encodedMsg.put(encodedLifeCycleMsg); return encodedMsg.array(); default: @@ -86,7 +84,7 @@ public byte[] encode(PassiveReplicationMessage message) { public EhcacheEntityMessage decode(byte[] payload) { ByteBuffer byteBuffer = ByteBuffer.wrap(payload); - ReplicationOp replicationOp = ReplicationOp.getReplicationOp(byteBuffer.get()); + PassiveReplicationMessage.ReplicationOp replicationOp = PassiveReplicationMessage.ReplicationOp.getReplicationOp(byteBuffer.get()); UUID clientId; long msgId; String cacheId; @@ -101,25 +99,23 @@ public EhcacheEntityMessage decode(byte[] payload) { byte[] encodedChain = new byte[byteBuffer.remaining()]; byteBuffer.get(encodedChain); Chain chain = chainCodec.decode(encodedChain); - return new ChainReplicationMessage(cacheId, key, chain, msgId, clientId); + return new PassiveReplicationMessage.ChainReplicationMessage(cacheId, key, chain, msgId, clientId); case CLIENTID_TRACK_OP: clientId = getClientId(byteBuffer); msgId = byteBuffer.getLong(); - return new ClientIDTrackerMessage(msgId, clientId); + return new PassiveReplicationMessage.ClientIDTrackerMessage(msgId, clientId); case CLEAR_INVALIDATION_COMPLETE: cacheId = CodecUtil.getStringFromBuffer(byteBuffer, byteBuffer.remaining()/2); - return new ClearInvalidationCompleteMessage(cacheId); + return new PassiveReplicationMessage.ClearInvalidationCompleteMessage(cacheId); case INVALIDATION_COMPLETE: key = byteBuffer.getLong(); cacheId = CodecUtil.getStringFromBuffer(byteBuffer, byteBuffer.remaining()/2); - return new InvalidationCompleteMessage(cacheId, key); + return new PassiveReplicationMessage.InvalidationCompleteMessage(cacheId, key); case SERVER_STORE_LIFECYCLE_REPLICATION_OP: - clientId = getClientId(byteBuffer); - msgId = byteBuffer.getLong(); byte[] encodedLifeCycle = new byte[byteBuffer.remaining()]; byteBuffer.get(encodedLifeCycle); LifecycleMessage lifecycleMessage = (LifecycleMessage)Util.unmarshall(encodedLifeCycle); - return new ServerStoreLifeCycleReplicationMessage(msgId, clientId, lifecycleMessage); + return new PassiveReplicationMessage.ServerStoreLifeCycleReplicationMessage(lifecycleMessage); default: throw new UnsupportedOperationException("This operation code is not supported : " + replicationOp); } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java index 27bcb4a0f2..d8a807b471 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java @@ -191,6 +191,44 @@ public void testValidateReplication() throws Exception { service.stop(); } + @Test + public void testDestroyServerStoreIsNotReplicatedIfFailsOnActive() throws Exception { + ClusteringServiceConfiguration configuration = + ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI()) + .autoCreate() + .build(); + + ClusteringService service1 = new ClusteringServiceFactory().create(configuration); + + ClusteringService service2 = new ClusteringServiceFactory().create(configuration); + + service1.start(null); + service2.start(null); + + EhcacheClientEntity clientEntity1 = getEntity(service1); + EhcacheClientEntity clientEntity2 = getEntity(service2); + + clientEntity1.createCache("testCache", getServerStoreConfiguration("primary-server-resource")); + clientEntity2.validateCache("testCache", getServerStoreConfiguration("primary-server-resource")); + + clientEntity1.releaseCache("testCache"); + try { + clientEntity1.destroyCache("testCache"); + fail("ClusteredTierDestructionException Expected"); + } catch (ClusteredTierDestructionException e) { + //nothing to do + } + + CLUSTER.getClusterControl().terminateActive(); + + clientEntity2.releaseCache("testCache"); + clientEntity2.destroyCache("testCache"); + + service1.stop(); + service2.stop(); + + } + private static EhcacheClientEntity getEntity(ClusteringService clusteringService) throws NoSuchFieldException, IllegalAccessException { Field entity = clusteringService.getClass().getDeclaredField("entity"); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 2ebb96c5ff..5a9c84796d 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -755,6 +755,11 @@ private void createServerStore(ClientDescriptor clientDescriptor, CreateServerSt serverStore.setEvictionListener(key -> invalidateHashAfterEviction(name, key)); attachStore(clientDescriptor, name); + try { + entityMessenger.messageSelfAndDeferRetirement(createServerStore, new ClientIDTrackerMessage.ServerStoreLifeCycleReplicationMessage(createServerStore)); + } catch (MessageCodecException e) { + throw new AssertionError("Codec error", e); + } } /** @@ -841,6 +846,11 @@ private void destroyServerStore(ClientDescriptor clientDescriptor, DestroyServer } storeClientMap.remove(name); + try { + entityMessenger.messageSelfAndDeferRetirement(destroyServerStore, new ClientIDTrackerMessage.ServerStoreLifeCycleReplicationMessage(destroyServerStore)); + } catch (MessageCodecException e) { + throw new AssertionError("Codec error", e); + } } private boolean isLifeCycleMessageDuplicate(LifecycleMessage message) { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index 519af8174a..71a0d65846 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -173,6 +173,7 @@ private void trackHashInvalidationForEventualCache(ChainReplicationMessage retir private void invokeRetiredServerStoreLifecycleMessage(ServerStoreLifeCycleReplicationMessage storeLifeCycleReplicationMessage) throws ClusterException { LifecycleMessage message = storeLifeCycleReplicationMessage.getMessage(); + ehcacheStateService.getClientMessageTracker().applied(message.getId(), message.getClientId()); switch (message.operation()) { case CREATE_SERVER_STORE: createServerStore((CreateServerStore)message); @@ -280,8 +281,6 @@ private void createServerStore(CreateServerStore createServerStore) throws Clust throw new LifecycleException("Clustered tier can't be created with an Unknown resource pool"); } - trackAndApplyMessage(createServerStore); - final String name = createServerStore.getName(); // client cache identifier/name LOGGER.info("Creating new clustered tier '{}'", name); @@ -299,8 +298,6 @@ private void destroyServerStore(DestroyServerStore destroyServerStore) throws Cl throw new LifecycleException("Clustered Tier Manager is not configured"); } - trackAndApplyMessage(destroyServerStore); - String name = destroyServerStore.getName(); LOGGER.info("Destroying clustered tier '{}'", name); diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java index ac312d0c6d..4d52527364 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java @@ -21,9 +21,11 @@ import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ClusteredEhcacheIdentity; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage; import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; +import org.ehcache.clustered.common.internal.messages.LifecycleMessage; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ServerStoreLifeCycleReplicationMessage; import org.ehcache.clustered.server.state.EhcacheStateService; import org.hamcrest.Matchers; import org.junit.Before; @@ -249,10 +251,12 @@ public void testCreateDedicatedServerStore() throws Exception { .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build())); passiveEntity.invoke(new ClientIDTrackerMessage(0L, CLIENT_ID)); - passiveEntity.invoke(MESSAGE_FACTORY.createServerStore("cacheAlias", + EhcacheEntityMessage createServerStore = MESSAGE_FACTORY.createServerStore("cacheAlias", new ServerStoreConfigBuilder() .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) - .build())); + .build()); + passiveEntity.invoke(createServerStore); + passiveEntity.invoke(new ServerStoreLifeCycleReplicationMessage((LifecycleMessage)createServerStore)); assertThat(registry.getStoreManagerService() .getSharedResourcePoolIds(), containsInAnyOrder("primary", "secondary")); @@ -287,11 +291,12 @@ public void testCreateSharedServerStore() throws Exception { .build())); passiveEntity.invoke(new ClientIDTrackerMessage(0L, CLIENT_ID)); - passiveEntity.invoke( - MESSAGE_FACTORY.createServerStore("cacheAlias", - new ServerStoreConfigBuilder() - .shared("primary") - .build())); + EhcacheEntityMessage createServerStore = MESSAGE_FACTORY.createServerStore("cacheAlias", + new ServerStoreConfigBuilder() + .shared("primary") + .build()); + passiveEntity.invoke(createServerStore); + passiveEntity.invoke(new ServerStoreLifeCycleReplicationMessage((LifecycleMessage)createServerStore)); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("cacheAlias")); assertThat(registry.getStoreManagerService() @@ -322,11 +327,12 @@ public void testDestroyServerStore() throws Exception { .build())); passiveEntity.invoke(new ClientIDTrackerMessage(0L, CLIENT_ID)); - passiveEntity.invoke( - MESSAGE_FACTORY.createServerStore("dedicatedCache", - new ServerStoreConfigBuilder() - .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) - .build())); + EhcacheEntityMessage createServerStore = MESSAGE_FACTORY.createServerStore("dedicatedCache", + new ServerStoreConfigBuilder() + .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) + .build()); + passiveEntity.invoke(createServerStore); + passiveEntity.invoke(new ServerStoreLifeCycleReplicationMessage((LifecycleMessage)createServerStore)); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache")); assertThat(registry.getStoreManagerService().getDedicatedResourcePoolIds(), containsInAnyOrder("dedicatedCache")); @@ -336,24 +342,29 @@ public void testDestroyServerStore() throws Exception { assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache")); assertThat(registry.getStoreManagerService().getDedicatedResourcePoolIds(), containsInAnyOrder("dedicatedCache")); - passiveEntity.invoke( - MESSAGE_FACTORY.createServerStore("sharedCache", - new ServerStoreConfigBuilder() - .shared("secondary") - .build())); + EhcacheEntityMessage sharedServerStore = MESSAGE_FACTORY.createServerStore("sharedCache", + new ServerStoreConfigBuilder() + .shared("secondary") + .build()); + passiveEntity.invoke(sharedServerStore); + passiveEntity.invoke(new ServerStoreLifeCycleReplicationMessage((LifecycleMessage)sharedServerStore)); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache", "sharedCache")); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache", "sharedCache")); assertThat(registry.getStoreManagerService().getDedicatedResourcePoolIds(), containsInAnyOrder("dedicatedCache")); - passiveEntity.invoke(MESSAGE_FACTORY.destroyServerStore("sharedCache")); + EhcacheEntityMessage destroySharedCache = MESSAGE_FACTORY.destroyServerStore("sharedCache"); + passiveEntity.invoke(destroySharedCache); + passiveEntity.invoke(new ServerStoreLifeCycleReplicationMessage((LifecycleMessage)destroySharedCache)); assertThat(registry.getResource("serverResource1").getUsed(), is(MemoryUnit.MEGABYTES.toBytes(4L + 4L))); assertThat(registry.getResource("serverResource2").getUsed(), is(MemoryUnit.MEGABYTES.toBytes(8L))); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache")); - passiveEntity.invoke(MESSAGE_FACTORY.destroyServerStore("dedicatedCache")); + EhcacheEntityMessage destroyDedicatedCache = MESSAGE_FACTORY.destroyServerStore("dedicatedCache"); + passiveEntity.invoke(destroyDedicatedCache); + passiveEntity.invoke(new ServerStoreLifeCycleReplicationMessage((LifecycleMessage)destroyDedicatedCache)); assertThat(registry.getStoreManagerService().getStores(), is(Matchers.empty())); assertThat(registry.getStoreManagerService().getDedicatedResourcePoolIds(), is(Matchers.empty())); @@ -384,31 +395,34 @@ public void testSharedPoolCacheNameCollision() throws Exception { passiveEntity.invoke(new ClientIDTrackerMessage(0L, CLIENT_ID)); assertThat(registry.getStoreManagerService().getStores(), is(Matchers.empty())); - passiveEntity.invoke( - MESSAGE_FACTORY.createServerStore("dedicatedCache", - new ServerStoreConfigBuilder() - .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) - .build())); + EhcacheEntityMessage createServerStore = MESSAGE_FACTORY.createServerStore("dedicatedCache", + new ServerStoreConfigBuilder() + .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) + .build()); + passiveEntity.invoke(createServerStore); + passiveEntity.invoke(new ServerStoreLifeCycleReplicationMessage((LifecycleMessage)createServerStore)); assertThat(registry.getStoreManagerService() .getSharedResourcePoolIds(), containsInAnyOrder("primary", "secondary")); assertThat(registry.getStoreManagerService().getDedicatedResourcePoolIds(), containsInAnyOrder("dedicatedCache")); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache")); - passiveEntity.invoke( - MESSAGE_FACTORY.createServerStore("sharedCache", - new ServerStoreConfigBuilder() - .shared("primary") - .build())); + EhcacheEntityMessage sharedServerStore = MESSAGE_FACTORY.createServerStore("sharedCache", + new ServerStoreConfigBuilder() + .shared("primary") + .build()); + passiveEntity.invoke(sharedServerStore); + passiveEntity.invoke(new ServerStoreLifeCycleReplicationMessage((LifecycleMessage)sharedServerStore)); assertThat(registry.getStoreManagerService() .getSharedResourcePoolIds(), containsInAnyOrder("primary", "secondary")); assertThat(registry.getStoreManagerService().getDedicatedResourcePoolIds(), containsInAnyOrder("dedicatedCache")); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache", "sharedCache")); - passiveEntity.invoke( - MESSAGE_FACTORY.createServerStore("primary", - new ServerStoreConfigBuilder() - .dedicated("serverResource2", 4, MemoryUnit.MEGABYTES) - .build())); + EhcacheEntityMessage createServerStore2 = MESSAGE_FACTORY.createServerStore("primary", + new ServerStoreConfigBuilder() + .dedicated("serverResource2", 4, MemoryUnit.MEGABYTES) + .build()); + passiveEntity.invoke(createServerStore2); + passiveEntity.invoke(new ServerStoreLifeCycleReplicationMessage((LifecycleMessage)createServerStore2)); assertThat(registry.getStoreManagerService() .getSharedResourcePoolIds(), containsInAnyOrder("primary", "secondary")); assertThat(registry.getStoreManagerService() @@ -452,21 +466,23 @@ public void testDestroyWithStores() throws Exception { .build())); passiveEntity.invoke(new ClientIDTrackerMessage(0L, CLIENT_ID)); - passiveEntity.invoke( - MESSAGE_FACTORY.createServerStore("dedicatedCache", - new ServerStoreConfigBuilder() - .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) - .build())); + EhcacheEntityMessage createServerStore = MESSAGE_FACTORY.createServerStore("dedicatedCache", + new ServerStoreConfigBuilder() + .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) + .build()); + passiveEntity.invoke(createServerStore); + passiveEntity.invoke(new ServerStoreLifeCycleReplicationMessage((LifecycleMessage)createServerStore)); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache")); assertThat(registry.getResource("serverResource1").getUsed(), is(MemoryUnit.MEGABYTES.toBytes(4L + 4L))); assertThat(registry.getResource("serverResource2").getUsed(), is(MemoryUnit.MEGABYTES.toBytes(8L))); - passiveEntity.invoke( - MESSAGE_FACTORY.createServerStore("sharedCache", - new ServerStoreConfigBuilder() - .shared("secondary") - .build())); + EhcacheEntityMessage sharedServerStore = MESSAGE_FACTORY.createServerStore("sharedCache", + new ServerStoreConfigBuilder() + .shared("secondary") + .build()); + passiveEntity.invoke(sharedServerStore); + passiveEntity.invoke(new ServerStoreLifeCycleReplicationMessage((LifecycleMessage)sharedServerStore)); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache", "sharedCache")); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache", "sharedCache")); From 8b428a7a99aaa6e90483337b66e155bbeb7b000d Mon Sep 17 00:00:00 2001 From: Abhilash Date: Thu, 27 Oct 2016 17:33:10 +0530 Subject: [PATCH 101/218] Promoted Active avoids duplicate messages #1557 --- .../clustered/server/EhcacheActiveEntity.java | 38 +-- .../server/EhcacheActiveEntityTest.java | 225 +++++++++++++++++- 2 files changed, 244 insertions(+), 19 deletions(-) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 5a9c84796d..c2e7a4164a 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -481,19 +481,25 @@ private EhcacheEntityResponse invokeServerStoreOperation(ClientDescriptor client return responseFactory.response(cacheStore.get(getMessage.getKey())); } case APPEND: { - ServerStoreOpMessage.AppendMessage appendMessage = (ServerStoreOpMessage.AppendMessage)message; - cacheStore.append(appendMessage.getKey(), appendMessage.getPayload()); - sendMessageToSelfAndDeferRetirement(appendMessage, cacheStore.get(appendMessage.getKey())); - invalidateHashForClient(clientDescriptor, appendMessage.getCacheId(), appendMessage.getKey()); + if (!isMessageDuplicate(message)) { + ServerStoreOpMessage.AppendMessage appendMessage = (ServerStoreOpMessage.AppendMessage)message; + cacheStore.getAndAppend(appendMessage.getKey(), appendMessage.getPayload()); + sendMessageToSelfAndDeferRetirement(appendMessage, cacheStore.get(appendMessage.getKey())); + invalidateHashForClient(clientDescriptor, appendMessage.getCacheId(), appendMessage.getKey()); + } return responseFactory.success(); } case GET_AND_APPEND: { ServerStoreOpMessage.GetAndAppendMessage getAndAppendMessage = (ServerStoreOpMessage.GetAndAppendMessage)message; - Chain result = cacheStore.getAndAppend(getAndAppendMessage.getKey(), getAndAppendMessage.getPayload()); - sendMessageToSelfAndDeferRetirement(getAndAppendMessage, cacheStore.get(getAndAppendMessage.getKey())); - EhcacheEntityResponse response = responseFactory.response(result); - invalidateHashForClient(clientDescriptor, getAndAppendMessage.getCacheId(), getAndAppendMessage.getKey()); - return response; + if (!isMessageDuplicate(message)) { + + Chain result = cacheStore.getAndAppend(getAndAppendMessage.getKey(), getAndAppendMessage.getPayload()); + sendMessageToSelfAndDeferRetirement(getAndAppendMessage, cacheStore.get(getAndAppendMessage.getKey())); + EhcacheEntityResponse response = responseFactory.response(result); + invalidateHashForClient(clientDescriptor, getAndAppendMessage.getCacheId(), getAndAppendMessage.getKey()); + return response; + } + return responseFactory.response(cacheStore.get(getAndAppendMessage.getKey())); } case REPLACE: { ServerStoreOpMessage.ReplaceAtHeadMessage replaceAtHeadMessage = (ServerStoreOpMessage.ReplaceAtHeadMessage) message; @@ -509,9 +515,11 @@ private EhcacheEntityResponse invokeServerStoreOperation(ClientDescriptor client return responseFactory.success(); } case CLEAR: { - String cacheId = message.getCacheId(); - cacheStore.clear(); - invalidateAll(clientDescriptor, cacheId); + if (!isMessageDuplicate(message)) { + String cacheId = message.getCacheId(); + cacheStore.clear(); + invalidateAll(clientDescriptor, cacheId); + } return responseFactory.success(); } default: @@ -737,7 +745,7 @@ private void createServerStore(ClientDescriptor clientDescriptor, CreateServerSt if(createServerStore.getStoreConfiguration().getPoolAllocation() instanceof PoolAllocation.Unknown) { throw new LifecycleException("Clustered tier can't be created with an Unknown resource pool"); } - boolean isDuplicate = isLifeCycleMessageDuplicate(createServerStore); + boolean isDuplicate = isMessageDuplicate(createServerStore); final String name = createServerStore.getName(); // client cache identifier/name ServerStoreImpl serverStore; if (!isDuplicate) { @@ -837,7 +845,7 @@ private void destroyServerStore(ClientDescriptor clientDescriptor, DestroyServer throw new ResourceBusyException("Cannot destroy clustered tier '" + name + "': in use by " + clients.size() + " other client(s)"); } - boolean isDuplicate = isLifeCycleMessageDuplicate(destroyServerStore); + boolean isDuplicate = isMessageDuplicate(destroyServerStore); if (!isDuplicate) { LOGGER.info("Client {} destroying clustered tier '{}'", clientDescriptor, name); @@ -853,7 +861,7 @@ private void destroyServerStore(ClientDescriptor clientDescriptor, DestroyServer } } - private boolean isLifeCycleMessageDuplicate(LifecycleMessage message) { + private boolean isMessageDuplicate(EhcacheEntityMessage message) { return ehcacheStateService.getClientMessageTracker().isDuplicate(message.getId(), message.getClientId()); } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index 14538de24f..e93f23193c 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -21,6 +21,7 @@ import org.ehcache.clustered.common.ServerSideConfiguration.Pool; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.PoolAllocation; +import org.ehcache.clustered.common.internal.exceptions.ClusterException; import org.ehcache.clustered.common.internal.exceptions.InvalidServerSideConfigurationException; import org.ehcache.clustered.common.internal.exceptions.InvalidServerStoreConfigurationException; import org.ehcache.clustered.common.internal.exceptions.InvalidStoreException; @@ -32,10 +33,17 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.Failure; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponseFactory; import org.ehcache.clustered.common.internal.messages.LifecycleMessage; import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; +import org.ehcache.clustered.common.internal.messages.LifecycleMessage.CreateServerStore; +import org.ehcache.clustered.common.internal.messages.LifecycleMessage.DestroyServerStore; +import org.ehcache.clustered.common.internal.messages.LifecycleMessage.ValidateStoreManager; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ServerStoreLifeCycleReplicationMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; import org.ehcache.clustered.server.internal.messages.EntityStateSyncMessage; +import org.ehcache.clustered.server.state.ClientMessageTracker; import org.ehcache.clustered.server.state.EhcacheStateService; import org.ehcache.clustered.server.state.InvalidationTracker; import org.hamcrest.Matchers; @@ -45,6 +53,7 @@ import org.terracotta.entity.ClientCommunicator; import org.terracotta.entity.ClientDescriptor; import org.terracotta.entity.IEntityMessenger; +import org.terracotta.entity.MessageCodecException; import org.terracotta.entity.PassiveSynchronizationChannel; import org.terracotta.entity.ServiceConfiguration; import org.terracotta.entity.ServiceRegistry; @@ -63,6 +72,7 @@ import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; + import org.ehcache.clustered.common.PoolAllocation.Dedicated; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.Type; @@ -77,7 +87,10 @@ import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; +import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; public class EhcacheActiveEntityTest { @@ -2179,7 +2192,7 @@ public void testDestroyServerStore() throws Exception { /** * Tests the destroy server store operation before the use of either a - * {@link LifecycleMessage.CreateServerStore CreateServerStore} + * {@link CreateServerStore CreateServerStore} * {@link LifecycleMessage.ValidateServerStore ValidateServerStore} * operation. */ @@ -2642,7 +2655,7 @@ public void testSyncToPassive() throws Exception { @Test public void testLoadExistingRecoversInflightInvalidationsForEventualCache() { final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); - registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); + registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); @@ -2680,6 +2693,191 @@ public void testLoadExistingRecoversInflightInvalidationsForEventualCache() { } + @Test + public void testCreateServerStoreSendsPassiveReplicationMessageIfSuccessful() throws MessageCodecException { + + final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); + registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); + + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); + + IEntityMessenger entityMessenger = registry.getEntityMessenger(); + + ClientDescriptor client = new TestClientDescriptor(); + activeEntity.connected(client); + + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() + .defaultResource("serverResource1") + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .build(); + + activeEntity.invoke(client, + MESSAGE_FACTORY.configureStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client, + MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)); + + try { + activeEntity.invoke(client, + MESSAGE_FACTORY.createServerStore("test", + new ServerStoreConfigBuilder() + .shared("primary1") + .build())); + } catch (Exception e) { + //nothing to do + } + + verify(entityMessenger, times(0)).messageSelf(any()); + verify(entityMessenger, times(1)).messageSelfAndDeferRetirement(any(ValidateStoreManager.class), any(ClientIDTrackerMessage.class)); + + reset(entityMessenger); + + activeEntity.invoke(client, + MESSAGE_FACTORY.createServerStore("test", + new ServerStoreConfigBuilder() + .shared("primary") + .build())); + + verify(entityMessenger, times(0)).messageSelf(any()); + verify(entityMessenger, times(1)).messageSelfAndDeferRetirement(any(CreateServerStore.class), any(ServerStoreLifeCycleReplicationMessage.class)); + + } + + @Test + public void testDestroyServerStoreSendsPassiveReplicationMessageIfSuccessful() throws MessageCodecException { + + final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); + registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); + + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); + + IEntityMessenger entityMessenger = registry.getEntityMessenger(); + + ClientDescriptor client1 = new TestClientDescriptor(); + ClientDescriptor client2 = new TestClientDescriptor(); + activeEntity.connected(client1); + activeEntity.connected(client2); + + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() + .defaultResource("serverResource1") + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .build(); + + activeEntity.invoke(client1, + MESSAGE_FACTORY.configureStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client1, + MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client1, + MESSAGE_FACTORY.createServerStore("test", + new ServerStoreConfigBuilder() + .shared("primary") + .build())); + + UUID client2Id = UUID.randomUUID(); + MESSAGE_FACTORY.setClientId(client2Id); + + activeEntity.invoke(client2, + MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client2, + MESSAGE_FACTORY.validateServerStore("test", + new ServerStoreConfigBuilder() + .shared("primary") + .build())); + + MESSAGE_FACTORY.setClientId(CLIENT_ID); + try { + activeEntity.invoke(client1, + MESSAGE_FACTORY.destroyServerStore("test")); + } catch (Exception e) { + //nothing to do + } + + verify(entityMessenger, times(0)).messageSelf(any()); + verify(entityMessenger, times(3)).messageSelfAndDeferRetirement(any(), any()); + + reset(entityMessenger); + + MESSAGE_FACTORY.setClientId(client2Id); + + activeEntity.invoke(client2, + MESSAGE_FACTORY.releaseServerStore("test")); + + MESSAGE_FACTORY.setClientId(CLIENT_ID); + activeEntity.invoke(client1, + MESSAGE_FACTORY.releaseServerStore("test")); + activeEntity.invoke(client1, + MESSAGE_FACTORY.destroyServerStore("test")); + + verify(entityMessenger, times(0)).messageSelf(any()); + verify(entityMessenger, times(1)).messageSelfAndDeferRetirement(any(DestroyServerStore.class), any(ServerStoreLifeCycleReplicationMessage.class)); + + } + + @Test + public void testPromotedActiveIgnoresDuplicateMessages() throws MessageCodecException, ClusterException { + + final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); + registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); + + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); + + IEntityMessenger entityMessenger = registry.getEntityMessenger(); + + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() + .defaultResource("serverResource1") + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .build(); + + EhcacheStateService ehcacheStateService = registry.getStoreManagerService(); + ehcacheStateService.configure(serverSideConfiguration); + + ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfigBuilder() + .shared("primary") + .setActualKeyType(Long.class) + .setActualValueType(Long.class) + .build(); + + ehcacheStateService.createStore("test", serverStoreConfiguration); + + ClientMessageTracker clientMessageTracker = ehcacheStateService.getClientMessageTracker(); + clientMessageTracker.add(CLIENT_ID); + + Random random = new Random(); + Set msgIds = new HashSet<>(); + random.longs(100).distinct().forEach(x -> { + msgIds.add(x); + clientMessageTracker.track(x, CLIENT_ID); + }); + + Set applied = new HashSet<>(); + msgIds.stream().limit(80).forEach(x -> { + applied.add(x); + clientMessageTracker.applied(x, CLIENT_ID); + }); + + ClientDescriptor client = new TestClientDescriptor(); + activeEntity.connected(client); + + activeEntity.invoke(client, + MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client, MESSAGE_FACTORY.validateServerStore("test", serverStoreConfiguration)); + + reset(entityMessenger); + ServerStoreMessageFactory serverStoreMessageFactory = new ServerStoreMessageFactory("test", CLIENT_ID); + EhcacheEntityResponseFactory entityResponseFactory = new EhcacheEntityResponseFactory(); + applied.forEach(y -> { + EhcacheEntityMessage message = serverStoreMessageFactory.appendOperation(y, createPayload(y)); + message.setId(y); + assertThat(activeEntity.invoke(client, message), is(entityResponseFactory.success())); + }); + + verify(entityMessenger, times(0)).messageSelfAndDeferRetirement(any(), any()); + } + private void assertSuccess(EhcacheEntityResponse response) throws Exception { if (!response.equals(EhcacheEntityResponse.Success.INSTANCE)) { throw ((Failure) response).getCause(); @@ -2815,6 +3013,10 @@ private static final class OffHeapIdentifierRegistry implements ServiceRegistry private EhcacheStateServiceImpl storeManagerService; + private IEntityMessenger entityMessenger; + + private ClientCommunicator clientCommunicator; + private final Map pools = new HashMap(); @@ -2857,6 +3059,15 @@ private EhcacheStateServiceImpl getStoreManagerService() { return this.storeManagerService; } + + private IEntityMessenger getEntityMessenger() { + return entityMessenger; + } + + private ClientCommunicator getClientCommunicator() { + return clientCommunicator; + } + private static Set getIdentifiers(Set pools) { Set names = new HashSet(); for (OffHeapResourceIdentifier identifier: pools) { @@ -2873,7 +3084,10 @@ public T getService(ServiceConfiguration serviceConfiguration) { final OffHeapResourceIdentifier resourceIdentifier = (OffHeapResourceIdentifier) serviceConfiguration; return (T) this.pools.get(resourceIdentifier); } else if (serviceConfiguration.getServiceType().equals(ClientCommunicator.class)) { - return (T) mock(ClientCommunicator.class); + if (this.clientCommunicator == null) { + this.clientCommunicator = mock(ClientCommunicator.class); + } + return (T) this.clientCommunicator; } else if(serviceConfiguration.getServiceType().equals(OffHeapResources.class)) { return (T) new OffHeapResources() { @Override @@ -2887,7 +3101,10 @@ public Set getAllIdentifiers() { } return (T) (this.storeManagerService); } else if (serviceConfiguration.getServiceType().equals(IEntityMessenger.class)) { - return (T) mock(IEntityMessenger.class); + if (this.entityMessenger == null) { + this.entityMessenger = mock(IEntityMessenger.class); + } + return (T) this.entityMessenger; } else if(serviceConfiguration instanceof ConsumerManagementRegistryConfiguration) { return (T) mock(ConsumerManagementRegistry.class); } From fe354b157b83d206ab021a35ca256feea2850b99 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Fri, 28 Oct 2016 14:29:33 +0200 Subject: [PATCH 102/218] :bug: Fix #1569 Proper ByteBuffer usage * Set limit based on known limit instead of capacity * rewind instead of flip after decoding --- .../internal/store/operations/BaseKeyValueOperation.java | 3 ++- .../client/internal/store/operations/ChainResolver.java | 2 +- .../store/operations/ConditionalReplaceOperation.java | 5 +++-- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/BaseKeyValueOperation.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/BaseKeyValueOperation.java index 3059e109af..b2ecae09eb 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/BaseKeyValueOperation.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/BaseKeyValueOperation.java @@ -46,10 +46,11 @@ abstract class BaseKeyValueOperation implements Operation { } this.timeStamp = buffer.getLong(); int keySize = buffer.getInt(); + int maxLimit = buffer.limit(); buffer.limit(buffer.position() + keySize); ByteBuffer keyBlob = buffer.slice(); buffer.position(buffer.limit()); - buffer.limit(buffer.capacity()); + buffer.limit(maxLimit); try { this.key = keySerializer.read(keyBlob); } catch (ClassNotFoundException e) { diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ChainResolver.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ChainResolver.java index c705f9b4ff..66a8966609 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ChainResolver.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ChainResolver.java @@ -122,7 +122,7 @@ public V value() { } } } else { - payload.flip(); + payload.rewind(); chainBuilder = chainBuilder.add(payload); } } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ConditionalReplaceOperation.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ConditionalReplaceOperation.java index afa68fb3e7..96245e926d 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ConditionalReplaceOperation.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ConditionalReplaceOperation.java @@ -53,16 +53,17 @@ public ConditionalReplaceOperation(final K key, final V oldValue, final V newVal } this.timeStamp = buffer.getLong(); int keySize = buffer.getInt(); + int maxLimit = buffer.limit(); buffer.limit(buffer.position() + keySize); ByteBuffer keyBlob = buffer.slice(); buffer.position(buffer.limit()); - buffer.limit(buffer.capacity()); + buffer.limit(maxLimit); int oldValueSize = buffer.getInt(); buffer.limit(buffer.position() + oldValueSize); ByteBuffer oldValueBlob = buffer.slice(); buffer.position(buffer.limit()); - buffer.limit(buffer.capacity()); + buffer.limit(maxLimit); ByteBuffer valueBlob = buffer.slice(); From 4b498a403de1fa3791eb4a7ba3f5f369c7a29da6 Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Thu, 27 Oct 2016 11:27:32 -0400 Subject: [PATCH 103/218] :bug: a stat observer was missing and one was duplicated --- .../org/ehcache/clustered/server/EhcacheStateServiceImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java index 8330a8a694..b7bdfb09a6 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java @@ -74,7 +74,7 @@ public class EhcacheStateServiceImpl implements EhcacheStateService { static { STAT_STORE_METHOD_REFERENCES.put("allocatedMemory", ServerStoreImpl::getAllocatedMemory); - STAT_STORE_METHOD_REFERENCES.put("dataOccupiedMemory", ServerStoreImpl::getDataOccupiedMemory); + STAT_STORE_METHOD_REFERENCES.put("dataAllocatedMemory", ServerStoreImpl::getDataAllocatedMemory); STAT_STORE_METHOD_REFERENCES.put("occupiedMemory", ServerStoreImpl::getOccupiedMemory); STAT_STORE_METHOD_REFERENCES.put("dataOccupiedMemory", ServerStoreImpl::getDataOccupiedMemory); STAT_STORE_METHOD_REFERENCES.put("entries", ServerStoreImpl::getSize); From f4b024208e69af005153116cd936b043f9324004 Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Thu, 27 Oct 2016 11:29:23 -0400 Subject: [PATCH 104/218] :art: filtering exposed objects and put allocation type in lowercase --- .../management/ClusteringManagementServiceTest.java | 6 +++--- .../management/PoolSettingsManagementProvider.java | 9 ++++++++- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java index b126d6d8d4..5f3b8c433e 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java @@ -141,21 +141,21 @@ public void test_D_server_capabilities_exposed() throws Exception { assertThat(settings.get("type"), equalTo("PoolBinding")); assertThat(settings.get("serverResource"), equalTo("primary-server-resource")); assertThat(settings.get("size"), equalTo(16 * 1024 * 1024L)); - assertThat(settings.get("allocationType"), equalTo("SHARED")); + assertThat(settings.get("allocationType"), equalTo("shared")); settings = (Settings) descriptors.get(1); assertThat(settings.get("alias"), equalTo("resource-pool-a")); assertThat(settings.get("type"), equalTo("PoolBinding")); assertThat(settings.get("serverResource"), equalTo("secondary-server-resource")); assertThat(settings.get("size"), equalTo(28 * 1024 * 1024L)); - assertThat(settings.get("allocationType"), equalTo("SHARED")); + assertThat(settings.get("allocationType"), equalTo("shared")); settings = (Settings) descriptors.get(2); assertThat(settings.get("alias"), equalTo("dedicated-cache-1")); assertThat(settings.get("type"), equalTo("PoolBinding")); assertThat(settings.get("serverResource"), equalTo("primary-server-resource")); assertThat(settings.get("size"), equalTo(4 * 1024 * 1024L)); - assertThat(settings.get("allocationType"), equalTo("DEDICATED")); + assertThat(settings.get("allocationType"), equalTo("dedicated")); settings = (Settings) descriptors.get(3); assertThat(settings.get("type"), equalTo("PoolSettingsManagementProvider")); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java index c19723a021..db1bec6a0b 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java @@ -19,12 +19,14 @@ import org.terracotta.management.model.capabilities.descriptors.Descriptor; import org.terracotta.management.model.capabilities.descriptors.Settings; import org.terracotta.management.model.context.Context; +import org.terracotta.management.registry.action.ExposedObject; import org.terracotta.management.registry.action.Named; import org.terracotta.management.registry.action.RequiredContext; import org.terracotta.management.service.registry.provider.AliasBindingManagementProvider; import java.util.Collection; import java.util.Collections; +import java.util.stream.Collectors; @Named("PoolSettings") @RequiredContext({@Named("consumerId"), @Named("type"), @Named("alias")}) @@ -46,6 +48,11 @@ public Collection getDescriptors() { return descriptors; } + @Override + public Collection> getExposedObjects() { + return super.getExposedObjects().stream().filter(e -> e.getTarget() != PoolBinding.ALL_SHARED).collect(Collectors.toList()); + } + @Override protected ExposedPoolBinding wrap(PoolBinding managedObject) { return new ExposedPoolBinding(managedObject, getConsumerId()); @@ -69,7 +76,7 @@ public Collection getDescriptors() { Collections.singleton(new Settings(getContext()) .set("serverResource", getBinding().getValue().getServerResource()) .set("size", getBinding().getValue().getSize()) - .set("allocationType", getBinding().getAllocationType())); + .set("allocationType", getBinding().getAllocationType().name().toLowerCase())); } } From e6930586fe44e3c67bbeae17f7bbb0cbe1adb244 Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Thu, 27 Oct 2016 11:32:49 -0400 Subject: [PATCH 105/218] :art: rename method to match other ones --- .../java/org/ehcache/clustered/server/EhcacheActiveEntity.java | 2 +- .../org/ehcache/clustered/server/management/Management.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index c2e7a4164a..8ffca46d91 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -821,7 +821,7 @@ private void releaseServerStore(ClientDescriptor clientDescriptor, ReleaseServer if (!removedFromClient) { throw new InvalidStoreException("Clustered tier '" + name + "' is not in use by client"); } else { - management.releaseStore(clientDescriptor, clientStateMap.get(clientDescriptor), name); + management.storeReleased(clientDescriptor, clientStateMap.get(clientDescriptor), name); } } else { throw new InvalidStoreException("Clustered tier '" + name + "' does not exist"); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java index 79b1239c0d..5569551909 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java @@ -132,7 +132,7 @@ public void storeAttached(ClientDescriptor clientDescriptor, ClientState clientS } } - public void releaseStore(ClientDescriptor clientDescriptor, ClientState clientState, String storeName) { + public void storeReleased(ClientDescriptor clientDescriptor, ClientState clientState, String storeName) { if (managementRegistry != null) { managementRegistry.refresh(); managementRegistry.pushServerEntityNotification(new ClientBinding(clientDescriptor, clientState), "EHCACHE_SERVER_STORE_RELEASED", Context.create("storeName", storeName)); From 68ede97cc1de28b6fd8eb44f08270cc047860716 Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Thu, 27 Oct 2016 11:34:36 -0400 Subject: [PATCH 106/218] :art: add some logging --- .../server/management/Management.java | 21 ++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java index 5569551909..fa258ed248 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java @@ -19,6 +19,8 @@ import org.ehcache.clustered.server.ClientState; import org.ehcache.clustered.server.ServerStoreImpl; import org.ehcache.clustered.server.state.EhcacheStateService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.terracotta.entity.ClientDescriptor; import org.terracotta.entity.ServiceRegistry; import org.terracotta.management.model.context.Context; @@ -32,6 +34,8 @@ public class Management { + private static final Logger LOGGER = LoggerFactory.getLogger(Management.class); + private final ConsumerManagementRegistry managementRegistry; private final ServiceRegistry services; private final EhcacheStateService ehcacheStateService; @@ -43,10 +47,13 @@ public Management(ServiceRegistry services, EhcacheStateService ehcacheStateServ this.ehcacheStateService = ehcacheStateService; this.offHeapResourceIdentifiers = offHeapResourceIdentifiers; if (managementRegistry != null) { - // add some providers to describe and compute stats + // expose settings about attached stores managementRegistry.addManagementProvider(new ClientStateSettingsManagementProvider()); + // expose settings about off-heap server service managementRegistry.addManagementProvider(new OffHeapResourceSettingsManagementProvider()); + // expose settings about server stores managementRegistry.addManagementProvider(new ServerStoreSettingsManagementProvider()); + // expose settings about pools managementRegistry.addManagementProvider(new PoolSettingsManagementProvider(ehcacheStateService)); } } @@ -54,6 +61,8 @@ public Management(ServiceRegistry services, EhcacheStateService ehcacheStateServ // the goal of the following code is to send the management metadata from the entity into the monitoring tre AFTER the entity creation public void init() { if (managementRegistry != null) { + LOGGER.trace("init()"); + managementRegistry.register(ehcacheStateService); managementRegistry.register(PoolBinding.ALL_SHARED); @@ -69,12 +78,14 @@ public void init() { public void close() { if (managementRegistry != null) { + LOGGER.trace("close()"); managementRegistry.close(); } } public void clientConnected(ClientDescriptor clientDescriptor, ClientState clientState) { if (managementRegistry != null) { + LOGGER.trace("clientConnected({})", clientDescriptor); managementRegistry.registerAndRefresh(new ClientStateBinding(clientDescriptor, clientState)); } } @@ -82,12 +93,14 @@ public void clientConnected(ClientDescriptor clientDescriptor, ClientState clien public void clientDisconnected(ClientDescriptor clientDescriptor, ClientState clientState) { if (managementRegistry != null) { + LOGGER.trace("clientDisconnected({})", clientDescriptor); managementRegistry.unregisterAndRefresh(new ClientStateBinding(clientDescriptor, clientState)); } } public void clientReconnected(ClientDescriptor clientDescriptor, ClientState clientState) { if (managementRegistry != null) { + LOGGER.trace("clientReconnected({})", clientDescriptor); managementRegistry.refresh(); managementRegistry.pushServerEntityNotification(new ClientStateBinding(clientDescriptor, clientState), "EHCACHE_CLIENT_RECONNECTED"); } @@ -95,6 +108,7 @@ public void clientReconnected(ClientDescriptor clientDescriptor, ClientState cli public void sharedPoolsConfigured() { if (managementRegistry != null) { + LOGGER.trace("sharedPoolsConfigured()"); ehcacheStateService.getSharedResourcePools() .entrySet() .stream() @@ -106,6 +120,7 @@ public void sharedPoolsConfigured() { public void clientValidated(ClientDescriptor clientDescriptor, ClientState clientState) { if (managementRegistry != null) { + LOGGER.trace("clientValidated({})", clientDescriptor); managementRegistry.refresh(); managementRegistry.pushServerEntityNotification(new ClientStateBinding(clientDescriptor, clientState), "EHCACHE_CLIENT_VALIDATED"); } @@ -113,6 +128,7 @@ public void clientValidated(ClientDescriptor clientDescriptor, ClientState clien public void serverStoreCreated(String name) { if (managementRegistry != null) { + LOGGER.trace("serverStoreCreated({})", name); ServerStoreImpl serverStore = ehcacheStateService.getStore(name); ServerStoreBinding serverStoreBinding = new ServerStoreBinding(name, serverStore); managementRegistry.register(serverStoreBinding); @@ -127,6 +143,7 @@ public void serverStoreCreated(String name) { public void storeAttached(ClientDescriptor clientDescriptor, ClientState clientState, String storeName) { if (managementRegistry != null) { + LOGGER.trace("storeAttached({}, {})", clientDescriptor, storeName); managementRegistry.refresh(); managementRegistry.pushServerEntityNotification(new ClientBinding(clientDescriptor, clientState), "EHCACHE_SERVER_STORE_ATTACHED", Context.create("storeName", storeName)); } @@ -134,6 +151,7 @@ public void storeAttached(ClientDescriptor clientDescriptor, ClientState clientS public void storeReleased(ClientDescriptor clientDescriptor, ClientState clientState, String storeName) { if (managementRegistry != null) { + LOGGER.trace("storeReleased({}, {})", clientDescriptor, storeName); managementRegistry.refresh(); managementRegistry.pushServerEntityNotification(new ClientBinding(clientDescriptor, clientState), "EHCACHE_SERVER_STORE_RELEASED", Context.create("storeName", storeName)); } @@ -142,6 +160,7 @@ public void storeReleased(ClientDescriptor clientDescriptor, ClientState clientS public void serverStoreDestroyed(String name) { ServerStoreImpl serverStore = ehcacheStateService.getStore(name); if (managementRegistry != null && serverStore != null) { + LOGGER.trace("serverStoreDestroyed({})", name); ServerStoreBinding managedObject = new ServerStoreBinding(name, serverStore); managementRegistry.pushServerEntityNotification(managedObject, "EHCACHE_SERVER_STORE_DESTROYED"); managementRegistry.unregister(managedObject); From 803379267579bf5f1d26abfe272b5aa061c00a23 Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Thu, 27 Oct 2016 11:38:20 -0400 Subject: [PATCH 107/218] :heavy_plus_sign: Exposing server-side stats (memory usage) - Close #1551 - Close #1530 --- build.gradle | 2 +- .../ClusteringManagementServiceTest.java | 93 +++++++- clustered/server/build.gradle | 3 + .../server/EhcacheStateServiceImpl.java | 67 ++---- .../management/AbstractExposedStatistics.java | 202 ++++++++++++++++++ .../AbstractStatisticsManagementProvider.java | 90 ++++++++ .../server/management/Management.java | 92 ++++++++ .../PoolStatisticsManagementProvider.java | 87 ++++++++ ...rverStoreStatisticsManagementProvider.java | 73 +++++++ .../StatisticCollectorManagementProvider.java | 166 ++++++++++++++ .../management/StatisticConfiguration.java | 148 +++++++++++++ .../server/state/EhcacheStateService.java | 4 + .../server/state/ResourcePageSource.java | 72 +++++++ 13 files changed, 1035 insertions(+), 64 deletions(-) create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractExposedStatistics.java create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractStatisticsManagementProvider.java create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/management/StatisticCollectorManagementProvider.java create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/management/StatisticConfiguration.java create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/state/ResourcePageSource.java diff --git a/build.gradle b/build.gradle index 1d99bfbdca..c34a190ca0 100644 --- a/build.gradle +++ b/build.gradle @@ -28,7 +28,7 @@ ext { sizeofVersion = '0.3.0' // Clustered - terracottaPlatformVersion = '5.0.8.beta6' + terracottaPlatformVersion = '5.0.9.beta' managementVersion = terracottaPlatformVersion terracottaApisVersion = '1.0.9.beta' terracottaCoreVersion = '5.0.9-beta2' diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java index 5f3b8c433e..6fd0c0889a 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java @@ -18,7 +18,6 @@ import org.ehcache.Cache; import org.ehcache.config.units.EntryUnit; import org.ehcache.config.units.MemoryUnit; -import org.hamcrest.CoreMatchers; import org.junit.BeforeClass; import org.junit.FixMethodOrder; import org.junit.Ignore; @@ -40,8 +39,8 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.HashSet; import java.util.List; -import java.util.Map; import java.util.TreeSet; import java.util.stream.Collectors; @@ -58,11 +57,13 @@ @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class ClusteringManagementServiceTest extends AbstractClusteringManagementTest { - private static final Collection ONHEAP_DESCRIPTORS = new ArrayList<>(); - private static final Collection OFFHEAP_DESCRIPTORS = new ArrayList<>(); - private static final Collection DISK_DESCRIPTORS = new ArrayList<>(); - private static final Collection CLUSTERED_DESCRIPTORS = new ArrayList<>(); - private static final Collection CACHE_DESCRIPTORS = new ArrayList<>(); + private static final Collection ONHEAP_DESCRIPTORS = new ArrayList<>(); + private static final Collection OFFHEAP_DESCRIPTORS = new ArrayList<>(); + private static final Collection DISK_DESCRIPTORS = new ArrayList<>(); + private static final Collection CLUSTERED_DESCRIPTORS = new ArrayList<>(); + private static final Collection CACHE_DESCRIPTORS = new ArrayList<>(); + private static final Collection POOL_DESCRIPTORS = new ArrayList<>(); + private static final Collection SERVER_STORE_DESCRIPTORS = new ArrayList<>(); @Test @Ignore("This is not a test, but something useful to show a json print of a cluster topology with all management metadata inside") @@ -115,16 +116,26 @@ public void test_C_client_capabilities_exposed() throws Exception { public void test_D_server_capabilities_exposed() throws Exception { Capability[] capabilities = consumer.readTopology().getSingleStripe().getActiveServerEntity(serverEntityIdentifier).get().getManagementRegistry().get().getCapabilities().toArray(new Capability[0]); - assertThat(capabilities.length, equalTo(4)); + assertThat(capabilities.length, equalTo(7)); assertThat(capabilities[0].getName(), equalTo("ClientStateSettings")); assertThat(capabilities[1].getName(), equalTo("OffHeapResourceSettings")); assertThat(capabilities[2].getName(), equalTo("ServerStoreSettings")); assertThat(capabilities[3].getName(), equalTo("PoolSettings")); + assertThat(capabilities[4].getName(), equalTo("ServerStoreStatistics")); + assertThat(capabilities[5].getName(), equalTo("PoolStatistics")); + assertThat(capabilities[6].getName(), equalTo("StatisticCollector")); assertThat(capabilities[1].getDescriptors(), hasSize(3)); // time + 2 resources assertThat(capabilities[2].getDescriptors(), hasSize(4)); // time descriptor + 3 dedicated store + // stats + + assertThat(capabilities[4].getDescriptors(), containsInAnyOrder(SERVER_STORE_DESCRIPTORS.toArray())); + assertThat(capabilities[4].getDescriptors(), hasSize(SERVER_STORE_DESCRIPTORS.size())); + assertThat(capabilities[5].getDescriptors(), containsInAnyOrder(POOL_DESCRIPTORS.toArray())); + assertThat(capabilities[5].getDescriptors(), hasSize(POOL_DESCRIPTORS.size())); + // ClientStateSettings assertThat(capabilities[0].getDescriptors(), hasSize(1)); @@ -207,7 +218,7 @@ public void test_G_stats_collection() throws Exception { cache1.get("key1"); cache1.get("key2"); - + List allStats = new ArrayList<>(); long val = 0; // it could be several seconds before the sampled stats could become available @@ -216,6 +227,8 @@ public void test_G_stats_collection() throws Exception { // get the stats (we are getting the primitive counter, not the sample history) List stats = waitForNextStats(); + allStats.addAll(stats); + for (ContextualStatistics stat : stats) { if (stat.getContext().get("cacheName").equals("dedicated-cache-1")) { Sample[] samples = stat.getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); @@ -233,6 +246,8 @@ public void test_G_stats_collection() throws Exception { do { List stats = waitForNextStats(); + allStats.addAll(stats); + for (ContextualStatistics stat : stats) { if (stat.getContext().get("cacheName").equals("dedicated-cache-1")) { Sample[] samples = stat.getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); @@ -244,7 +259,51 @@ public void test_G_stats_collection() throws Exception { } while(val != 4); - + // wait until we have some stats coming from the server entity + while (!allStats.stream().filter(statistics -> statistics.getContext().contains("consumerId")).findFirst().isPresent()) { + allStats.addAll(waitForNextStats()); + } + List serverStats = allStats.stream().filter(statistics -> statistics.getContext().contains("consumerId")).collect(Collectors.toList()); + + // server-side stats + + assertThat( + serverStats.stream() + .map(ContextualStatistics::getCapability) + .collect(Collectors.toSet()), + equalTo(new HashSet<>(Arrays.asList("PoolStatistics", "ServerStoreStatistics")))); + + // ensure we collect stats from all registered objects (pools and stores) + + assertThat( + serverStats.stream() + .filter(statistics -> statistics.getCapability().equals("PoolStatistics")) + .map(statistics -> statistics.getContext().get("alias")) + .collect(Collectors.toSet()), + equalTo(new HashSet<>(Arrays.asList("resource-pool-b", "resource-pool-a", "dedicated-cache-1", "cache-2")))); + + assertThat( + serverStats.stream() + .filter(statistics -> statistics.getCapability().equals("ServerStoreStatistics")) + .map(statistics -> statistics.getContext().get("alias")) + .collect(Collectors.toSet()), + equalTo(new HashSet<>(Arrays.asList("shared-cache-3", "shared-cache-2", "dedicated-cache-1", "cache-2")))); + + // ensure we collect all the stat names + + assertThat( + serverStats.stream() + .filter(statistics -> statistics.getCapability().equals("PoolStatistics")) + .flatMap(statistics -> statistics.getStatistics().keySet().stream()) + .collect(Collectors.toSet()), + equalTo(POOL_DESCRIPTORS.stream().map(StatisticDescriptor::getName).collect(Collectors.toSet()))); + + assertThat( + serverStats.stream() + .filter(statistics -> statistics.getCapability().equals("ServerStoreStatistics")) + .flatMap(statistics -> statistics.getStatistics().keySet().stream()) + .collect(Collectors.toSet()), + equalTo(SERVER_STORE_DESCRIPTORS.stream().map(StatisticDescriptor::getName).collect(Collectors.toSet()))); } @BeforeClass @@ -353,6 +412,20 @@ public static void initDescriptors() throws ClassNotFoundException { CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissCount", StatisticType.COUNTER_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissRatioRatio", StatisticType.RATIO_HISTORY)); + POOL_DESCRIPTORS.add(new StatisticDescriptor("Pool:AllocatedSize", StatisticType.SIZE_HISTORY)); + + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:AllocatedMemory", StatisticType.SIZE_HISTORY)); + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:DataAllocatedMemory", StatisticType.SIZE_HISTORY)); + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:OccupiedMemory", StatisticType.SIZE_HISTORY)); + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:DataOccupiedMemory", StatisticType.SIZE_HISTORY)); + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:Entries", StatisticType.COUNTER_HISTORY)); + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:UsedSlotCount", StatisticType.COUNTER_HISTORY)); + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:DataVitalMemory", StatisticType.SIZE_HISTORY)); + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:VitalMemory", StatisticType.SIZE_HISTORY)); + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:ReprobeLength", StatisticType.SIZE_HISTORY)); + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:RemovedSlotCount", StatisticType.COUNTER_HISTORY)); + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:DataSize", StatisticType.SIZE_HISTORY)); + SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:TableCapacity", StatisticType.SIZE_HISTORY)); } } diff --git a/clustered/server/build.gradle b/clustered/server/build.gradle index 5721a1dc46..ced76398ae 100644 --- a/clustered/server/build.gradle +++ b/clustered/server/build.gradle @@ -34,6 +34,9 @@ dependencies { exclude group: 'org.terracotta.management', module: 'management-registry' exclude group: 'org.terracotta.management', module: 'management-model' } + compile ("org.terracotta:statistics:$parent.statisticVersion") { + exclude group:'org.slf4j', module:'slf4j-api' + } compile"org.terracotta.management.dist:management-common:$parent.managementVersion" provided "org.terracotta:entity-server-api:$parent.entityApiVersion" provided "org.terracotta:standard-cluster-services:$parent.terracottaApisVersion" diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java index b7bdfb09a6..ffc15c3035 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java @@ -25,6 +25,7 @@ import org.ehcache.clustered.server.state.ClientMessageTracker; import org.ehcache.clustered.server.state.EhcacheStateService; import org.ehcache.clustered.server.state.InvalidationTracker; +import org.ehcache.clustered.server.state.ResourcePageSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.ehcache.clustered.common.internal.exceptions.IllegalMessageException; @@ -37,11 +38,7 @@ import org.terracotta.entity.ServiceRegistry; import org.terracotta.offheapresource.OffHeapResource; import org.terracotta.offheapresource.OffHeapResourceIdentifier; -import org.terracotta.offheapstore.buffersource.OffHeapBufferSource; -import org.terracotta.offheapstore.paging.OffHeapStorageArea; -import org.terracotta.offheapstore.paging.Page; import org.terracotta.offheapstore.paging.PageSource; -import org.terracotta.offheapstore.paging.UpfrontAllocatingPageSource; import org.terracotta.statistics.StatisticsManager; import java.util.Collections; @@ -56,15 +53,13 @@ import java.util.function.Function; import static java.util.stream.Collectors.toMap; -import static org.terracotta.offheapstore.util.MemoryUnit.GIGABYTES; -import static org.terracotta.offheapstore.util.MemoryUnit.MEGABYTES; public class EhcacheStateServiceImpl implements EhcacheStateService { private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheStateServiceImpl.class); - private static final String STATISTICS_STORE_TAG = "ServerStore"; + private static final String STATISTICS_STORE_TAG = "Store"; private static final String STATISTICS_POOL_TAG = "Pool"; private static final String PROPERTY_STORE_KEY = "storeName"; private static final String PROPERTY_POOL_KEY = "poolName"; @@ -139,7 +134,7 @@ public Set getStores() { } Set getSharedResourcePoolIds() { - return sharedResourcePools == null ? new HashSet<>() : Collections.unmodifiableSet(sharedResourcePools.keySet()); + return Collections.unmodifiableSet(sharedResourcePools.keySet()); } Set getDedicatedResourcePoolIds() { @@ -152,7 +147,12 @@ public String getDefaultServerResource() { @Override public Map getSharedResourcePools() { - return sharedResourcePools == null ? Collections.emptyMap() : sharedResourcePools.entrySet().stream().collect(toMap(Map.Entry::getKey, e -> e.getValue().getPool())); + return sharedResourcePools.entrySet().stream().collect(toMap(Map.Entry::getKey, e -> e.getValue().getPool())); + } + + @Override + public ResourcePageSource getSharedResourcePageSource(String name) { + return sharedResourcePools.get(name); } @Override @@ -161,6 +161,11 @@ public ServerSideConfiguration.Pool getDedicatedResourcePool(String name) { return resourcePageSource == null ? null : resourcePageSource.getPool(); } + @Override + public ResourcePageSource getDedicatedResourcePageSource(String name) { + return dedicatedResourcePools.get(name); + } + public void validate(ServerSideConfiguration configuration) throws ClusterException { if (!isConfigured()) { throw new LifecycleException("Clustered Tier Manager is not configured"); @@ -475,48 +480,4 @@ private static boolean nullSafeEquals(Object s1, Object s2) { return (s1 == null ? s2 == null : s1.equals(s2)); } - /** - * Pairs a {@link ServerSideConfiguration.Pool} and an {@link UpfrontAllocatingPageSource} instance providing storage - * for the pool. - */ - private static class ResourcePageSource implements PageSource{ - /** - * A description of the resource allocation underlying this {@code PageSource}. - */ - private final ServerSideConfiguration.Pool pool; - private final UpfrontAllocatingPageSource delegatePageSource; - - private ResourcePageSource(ServerSideConfiguration.Pool pool) { - this.pool = pool; - this.delegatePageSource = new UpfrontAllocatingPageSource(new OffHeapBufferSource(), pool.getSize(), GIGABYTES.toBytes(1), MEGABYTES.toBytes(128)); - } - - public ServerSideConfiguration.Pool getPool() { - return pool; - } - - public long getAllocatedSize() { - return delegatePageSource.getAllocatedSizeUnSync(); - } - - @Override - public Page allocate(int size, boolean thief, boolean victim, OffHeapStorageArea owner) { - return delegatePageSource.allocate(size, thief, victim, owner); - } - - @Override - public void free(Page page) { - delegatePageSource.free(page); - } - - @Override - public String toString() { - final StringBuilder sb = new StringBuilder("ResourcePageSource{"); - sb.append("pool=").append(pool); - sb.append(", delegatePageSource=").append(delegatePageSource); - sb.append('}'); - return sb.toString(); - } - } - } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractExposedStatistics.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractExposedStatistics.java new file mode 100644 index 0000000000..4b88ec1a93 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractExposedStatistics.java @@ -0,0 +1,202 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.terracotta.context.extended.RegisteredCompoundStatistic; +import org.terracotta.context.extended.RegisteredCounterStatistic; +import org.terracotta.context.extended.RegisteredRatioStatistic; +import org.terracotta.context.extended.RegisteredSizeStatistic; +import org.terracotta.context.extended.RegisteredStatistic; +import org.terracotta.context.extended.StatisticsRegistry; +import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; +import org.terracotta.management.model.stats.MemoryUnit; +import org.terracotta.management.model.stats.NumberUnit; +import org.terracotta.management.model.stats.Sample; +import org.terracotta.management.model.stats.Statistic; +import org.terracotta.management.model.stats.StatisticType; +import org.terracotta.management.model.stats.history.AverageHistory; +import org.terracotta.management.model.stats.history.CounterHistory; +import org.terracotta.management.model.stats.history.DurationHistory; +import org.terracotta.management.model.stats.history.RateHistory; +import org.terracotta.management.model.stats.history.RatioHistory; +import org.terracotta.management.model.stats.history.SizeHistory; +import org.terracotta.management.service.registry.provider.AliasBinding; +import org.terracotta.management.service.registry.provider.AliasBindingManagementProvider; +import org.terracotta.offheapstore.util.FindbugsSuppressWarnings; +import org.terracotta.statistics.extended.CompoundOperation; +import org.terracotta.statistics.extended.SampledStatistic; + +import java.io.Closeable; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +@FindbugsSuppressWarnings("EQ_DOESNT_OVERRIDE_EQUALS") +class AbstractExposedStatistics extends AliasBindingManagementProvider.ExposedAliasBinding implements Closeable { + + protected final StatisticsRegistry statisticsRegistry; + + AbstractExposedStatistics(long consumerId, T binding, StatisticConfiguration statisticConfiguration, ScheduledExecutorService executor, Object statisticContextObject) { + super(binding, consumerId); + if(statisticContextObject == null) { + this.statisticsRegistry = null; + + } else { + this.statisticsRegistry = new StatisticsRegistry( + statisticContextObject, + executor, + statisticConfiguration.averageWindowDuration(), + statisticConfiguration.averageWindowUnit(), + statisticConfiguration.historySize(), + statisticConfiguration.historyInterval(), + statisticConfiguration.historyIntervalUnit(), + statisticConfiguration.timeToDisable(), + statisticConfiguration.timeToDisableUnit()); + } + } + + void init() { + if (statisticsRegistry != null) { + Map registrations = statisticsRegistry.getRegistrations(); + for (RegisteredStatistic registeredStatistic : registrations.values()) { + registeredStatistic.getSupport().setAlwaysOn(true); + } + } + } + + @Override + public void close() { + if (statisticsRegistry != null) { + statisticsRegistry.clearRegistrations(); + } + } + + @SuppressWarnings("unchecked") + public Statistic queryStatistic(String statisticName, long since) { + if (statisticsRegistry != null) { + Map registrations = statisticsRegistry.getRegistrations(); + for (Entry entry : registrations.entrySet()) { + String name = entry.getKey(); + RegisteredStatistic registeredStatistic = entry.getValue(); + + if (registeredStatistic instanceof RegisteredCompoundStatistic) { + RegisteredCompoundStatistic registeredCompoundStatistic = (RegisteredCompoundStatistic) registeredStatistic; + CompoundOperation compoundOperation = registeredCompoundStatistic.getCompoundOperation(); + + if ((name + "Count").equals(statisticName)) { + SampledStatistic count = compoundOperation.compound((Set) registeredCompoundStatistic.getCompound()).count(); + return new CounterHistory(buildHistory(count, since), NumberUnit.COUNT); + } else if ((name + "Rate").equals(statisticName)) { + SampledStatistic rate = compoundOperation.compound((Set) registeredCompoundStatistic.getCompound()).rate(); + return new RateHistory(buildHistory(rate, since), TimeUnit.SECONDS); + + } else if ((name + "LatencyMinimum").equals(statisticName)) { + SampledStatistic minimum = compoundOperation.compound((Set) registeredCompoundStatistic.getCompound()).latency().minimum(); + return new DurationHistory(buildHistory(minimum, since), TimeUnit.NANOSECONDS); + + } else if ((name + "LatencyMaximum").equals(statisticName)) { + SampledStatistic maximum = compoundOperation.compound((Set) registeredCompoundStatistic.getCompound()).latency().maximum(); + return new DurationHistory(buildHistory(maximum, since), TimeUnit.NANOSECONDS); + + } else if ((name + "LatencyAverage").equals(statisticName)) { + SampledStatistic average = compoundOperation.compound((Set) registeredCompoundStatistic.getCompound()).latency().average(); + return new AverageHistory(buildHistory(average, since), TimeUnit.NANOSECONDS); + } + } else if (registeredStatistic instanceof RegisteredRatioStatistic) { + RegisteredRatioStatistic registeredRatioStatistic = (RegisteredRatioStatistic) registeredStatistic; + CompoundOperation compoundOperation = registeredRatioStatistic.getCompoundOperation(); + + if (name.equals(statisticName)) { + SampledStatistic ratio = (SampledStatistic) compoundOperation.ratioOf((Set) registeredRatioStatistic.getNumerator(), (Set) registeredRatioStatistic.getDenominator()); + return new RatioHistory(buildHistory(ratio, since), NumberUnit.RATIO); + } + } else if (registeredStatistic instanceof RegisteredSizeStatistic) { + RegisteredSizeStatistic registeredSizeStatistic = (RegisteredSizeStatistic) registeredStatistic; + if (name.equals(statisticName)) { + SampledStatistic count = (SampledStatistic) registeredSizeStatistic.getSampledStatistic(); + return new SizeHistory(buildHistory(count, since), MemoryUnit.B); + } + } else if (registeredStatistic instanceof RegisteredCounterStatistic) { + RegisteredCounterStatistic registeredCounterStatistic = (RegisteredCounterStatistic) registeredStatistic; + if (name.equals(statisticName)) { + SampledStatistic count = (SampledStatistic) registeredCounterStatistic.getSampledStatistic(); + return new CounterHistory(buildHistory(count, since), NumberUnit.COUNT); + } + } else { + throw new UnsupportedOperationException("Cannot handle registered statistic type : " + registeredStatistic); + } + } + } + + throw new IllegalArgumentException("No registered statistic named '" + statisticName + "'"); + } + + @Override + public Collection getDescriptors() { + Set capabilities = new HashSet<>(); + capabilities.addAll(queryStatisticsRegistry()); + return capabilities; + } + + private Set queryStatisticsRegistry() { + Set capabilities = new HashSet<>(); + + if (statisticsRegistry != null) { + Map registrations = statisticsRegistry.getRegistrations(); + + for (Entry entry : registrations.entrySet()) { + String statisticName = entry.getKey(); + RegisteredStatistic registeredStatistic = registrations.get(statisticName); + + if (registeredStatistic instanceof RegisteredCompoundStatistic) { + List statistics = new ArrayList<>(); + statistics.add(new StatisticDescriptor(entry.getKey() + "Count", StatisticType.COUNTER_HISTORY)); + statistics.add(new StatisticDescriptor(entry.getKey() + "Rate", StatisticType.RATE_HISTORY)); + statistics.add(new StatisticDescriptor(entry.getKey() + "LatencyMinimum", StatisticType.DURATION_HISTORY)); + statistics.add(new StatisticDescriptor(entry.getKey() + "LatencyMaximum", StatisticType.DURATION_HISTORY)); + statistics.add(new StatisticDescriptor(entry.getKey() + "LatencyAverage", StatisticType.AVERAGE_HISTORY)); + + capabilities.addAll(statistics); + } else if (registeredStatistic instanceof RegisteredRatioStatistic) { + capabilities.add(new StatisticDescriptor(entry.getKey() + "Ratio", StatisticType.RATIO_HISTORY)); + } else if (registeredStatistic instanceof RegisteredCounterStatistic) { + capabilities.add(new StatisticDescriptor(statisticName, StatisticType.COUNTER_HISTORY)); + } else if (registeredStatistic instanceof RegisteredSizeStatistic) { + capabilities.add(new StatisticDescriptor(statisticName, StatisticType.SIZE_HISTORY)); + } + } + } + + return capabilities; + } + + private static List> buildHistory(SampledStatistic sampledStatistic, long since) { + return sampledStatistic.history() + .stream() + .filter(timestamped -> timestamped.getTimestamp() >= since) + .map(timestamped -> new Sample<>(timestamped.getTimestamp(), timestamped.getSample())) + .collect(Collectors.toList()); + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractStatisticsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractStatisticsManagementProvider.java new file mode 100644 index 0000000000..f168a88d0a --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractStatisticsManagementProvider.java @@ -0,0 +1,90 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.terracotta.management.model.capabilities.Capability; +import org.terracotta.management.model.capabilities.StatisticsCapability; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.model.stats.Statistic; +import org.terracotta.management.registry.action.ExposedObject; +import org.terracotta.management.registry.action.Named; +import org.terracotta.management.registry.action.RequiredContext; +import org.terracotta.management.service.registry.provider.AliasBinding; +import org.terracotta.management.service.registry.provider.AliasBindingManagementProvider; + +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +@Named("ServerStoreSettings") +@RequiredContext({@Named("consumerId")}) +abstract class AbstractStatisticsManagementProvider extends AliasBindingManagementProvider { + + private final StatisticConfiguration statisticConfiguration; + + public AbstractStatisticsManagementProvider(Class type, StatisticConfiguration statisticConfiguration) { + super(type); + this.statisticConfiguration = statisticConfiguration; + } + + public StatisticConfiguration getStatisticConfiguration() { + return statisticConfiguration; + } + + @Override + protected void dispose(ExposedObject exposedObject) { + ((AbstractExposedStatistics) exposedObject).close(); + } + + @Override + public Capability getCapability() { + StatisticsCapability.Properties properties = new StatisticsCapability.Properties( + statisticConfiguration.averageWindowDuration(), + statisticConfiguration.averageWindowUnit(), + statisticConfiguration.historySize(), + statisticConfiguration.historyInterval(), + statisticConfiguration.historyIntervalUnit(), + statisticConfiguration.timeToDisable(), + statisticConfiguration.timeToDisableUnit()); + return new StatisticsCapability(getCapabilityName(), properties, getDescriptors(), getCapabilityContext()); + } + + @Override + public Map> collectStatistics(Context context, Collection statisticNames, long since) { + Map> statistics = new HashMap>(statisticNames.size()); + AbstractExposedStatistics ehcacheStatistics = (AbstractExposedStatistics) findExposedObject(context); + if (ehcacheStatistics != null) { + for (String statisticName : statisticNames) { + try { + statistics.put(statisticName, ehcacheStatistics.queryStatistic(statisticName, since)); + } catch (IllegalArgumentException ignored) { + // ignore when statisticName does not exist and throws an exception + } + } + } + return statistics; + } + + @Override + protected AbstractExposedStatistics wrap(T managedObject) { + AbstractExposedStatistics exposed = internalWrap(managedObject); + exposed.init(); + return exposed; + } + + protected abstract AbstractExposedStatistics internalWrap(T managedObject); + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java index fa258ed248..1f401c59d6 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java @@ -23,6 +23,7 @@ import org.slf4j.LoggerFactory; import org.terracotta.entity.ClientDescriptor; import org.terracotta.entity.ServiceRegistry; +import org.terracotta.management.model.call.Parameter; import org.terracotta.management.model.context.Context; import org.terracotta.management.service.registry.ConsumerManagementRegistry; import org.terracotta.management.service.registry.ConsumerManagementRegistryConfiguration; @@ -30,12 +31,26 @@ import org.terracotta.offheapresource.OffHeapResource; import org.terracotta.offheapresource.OffHeapResourceIdentifier; +import java.util.Collection; import java.util.Set; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.atomic.AtomicLong; + +import static java.util.Arrays.asList; public class Management { private static final Logger LOGGER = LoggerFactory.getLogger(Management.class); + // TODO FIXME: the following things are just temporary and should be removed/changed asap + // - scheduling should be done by using a voltron service (not yet available: see https://github.com/Terracotta-OSS/terracotta-apis/issues/158) + // - stats config should be given when configuring the entities (https://github.com/ehcache/ehcache3/issues/1567) + private static final AtomicLong managementSchedulerCount = new AtomicLong(); + private ScheduledExecutorService managementScheduler; + private final StatisticConfiguration statisticConfiguration = new StatisticConfiguration(); + private final ConsumerManagementRegistry managementRegistry; private final ServiceRegistry services; private final EhcacheStateService ehcacheStateService; @@ -55,6 +70,19 @@ public Management(ServiceRegistry services, EhcacheStateService ehcacheStateServ managementRegistry.addManagementProvider(new ServerStoreSettingsManagementProvider()); // expose settings about pools managementRegistry.addManagementProvider(new PoolSettingsManagementProvider(ehcacheStateService)); + + managementScheduler = Executors.unconfigurableScheduledExecutorService(Executors.newSingleThreadScheduledExecutor( + r -> { + Thread t = Executors.defaultThreadFactory().newThread(r); + t.setDaemon(true); + t.setName("ManagementScheduler-" + managementSchedulerCount.incrementAndGet()); + return t; + })); + + // expose stats about server stores + managementRegistry.addManagementProvider(new ServerStoreStatisticsManagementProvider(statisticConfiguration, managementScheduler)); + // expose stats about pools + managementRegistry.addManagementProvider(new PoolStatisticsManagementProvider(ehcacheStateService, statisticConfiguration, managementScheduler)); } } @@ -65,14 +93,77 @@ public void init() { managementRegistry.register(ehcacheStateService); + // PoolBinding.ALL_SHARED is a marker so that we can send events not specifically related to 1 pool + // this object is ignored from the stats and descriptors managementRegistry.register(PoolBinding.ALL_SHARED); + // exposes available offheap service resources for (String identifier : offHeapResourceIdentifiers) { OffHeapResource offHeapResource = services.getService(OffHeapResourceIdentifier.identifier(identifier)); managementRegistry.register(new OffHeapResourceBinding(identifier, offHeapResource)); } + // expose management calls on statistic collector + StatisticCollectorManagementProvider collectorManagementProvider = new StatisticCollectorManagementProvider( + managementRegistry, + statisticConfiguration, + managementScheduler, + new String[]{"PoolStatistics", "ServerStoreStatistics"}); + + managementRegistry.addManagementProvider(collectorManagementProvider); + + // start collecting stats + collectorManagementProvider.start(); + + // expose the management registry inside voltorn managementRegistry.refresh(); + + //TODO FIXME: following code should be triggered by a remote management call (https://github.com/Terracotta-OSS/terracotta-apis/issues/168) + try { + LOGGER.trace("init() - activating statistics"); + + Context entityContext = Context.create(managementRegistry.getContextContainer().getName(), managementRegistry.getContextContainer().getValue()); + + managementRegistry + .withCapability("StatisticCollector") + .call("updateCollectedStatistics", + new Parameter("PoolStatistics"), + new Parameter(asList( + "Pool:AllocatedSize" + ), Collection.class.getName())) + .on(entityContext) + .build() + .execute() + .getSingleResult() + .getValue(); + + managementRegistry + .withCapability("StatisticCollector") + .call("updateCollectedStatistics", + new Parameter("ServerStoreStatistics"), + new Parameter(asList( + "Store:AllocatedMemory", + "Store:DataAllocatedMemory", + "Store:OccupiedMemory", + "Store:DataOccupiedMemory", + "Store:Entries", + "Store:UsedSlotCount", + "Store:DataVitalMemory", + "Store:VitalMemory", + "Store:ReprobeLength", + "Store:RemovedSlotCount", + "Store:DataSize", + "Store:TableCapacity" + ), Collection.class.getName())) + .on(entityContext) + .build() + .execute() + .getSingleResult() + .getValue(); + + } catch (ExecutionException e) { + throw new RuntimeException(e.getCause()); + } } } @@ -80,6 +171,7 @@ public void close() { if (managementRegistry != null) { LOGGER.trace("close()"); managementRegistry.close(); + managementScheduler.shutdown(); } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java new file mode 100644 index 0000000000..df1b1aa894 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java @@ -0,0 +1,87 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.ehcache.clustered.server.state.EhcacheStateService; +import org.ehcache.clustered.server.state.ResourcePageSource; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.registry.action.ExposedObject; +import org.terracotta.management.registry.action.Named; +import org.terracotta.management.registry.action.RequiredContext; + +import java.util.Collection; +import java.util.HashSet; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.ScheduledExecutorService; +import java.util.stream.Collectors; + +import static java.util.Arrays.asList; +import static org.terracotta.context.extended.ValueStatisticDescriptor.descriptor; + +@Named("PoolStatistics") +@RequiredContext({@Named("consumerId"), @Named("type"), @Named("alias")}) +class PoolStatisticsManagementProvider extends AbstractStatisticsManagementProvider { + + private final EhcacheStateService ehcacheStateService; + private final ScheduledExecutorService executor; + + PoolStatisticsManagementProvider(EhcacheStateService ehcacheStateService, StatisticConfiguration statisticConfiguration, ScheduledExecutorService executor) { + super(PoolBinding.class, statisticConfiguration); + this.ehcacheStateService = ehcacheStateService; + this.executor = executor; + } + + @Override + public Collection> getExposedObjects() { + return super.getExposedObjects().stream().filter(e -> e.getTarget() != PoolBinding.ALL_SHARED).collect(Collectors.toList()); + } + + @Override + protected AbstractExposedStatistics internalWrap(PoolBinding managedObject) { + ResourcePageSource resourcePageSource = null; + + if (managedObject != PoolBinding.ALL_SHARED) { + String poolName = managedObject.getAlias(); + resourcePageSource = managedObject.getAllocationType() == PoolBinding.AllocationType.DEDICATED ? + ehcacheStateService.getDedicatedResourcePageSource(poolName) : + ehcacheStateService.getSharedResourcePageSource(poolName); + Objects.requireNonNull(resourcePageSource, "Unable to locale pool " + poolName); + } + + return new PoolExposedStatistics(getConsumerId(), managedObject, getStatisticConfiguration(), executor, resourcePageSource); + } + + private static class PoolExposedStatistics extends AbstractExposedStatistics { + + PoolExposedStatistics(long consumerId, PoolBinding binding, StatisticConfiguration statisticConfiguration, ScheduledExecutorService executor, ResourcePageSource resourcePageSource) { + super(consumerId, binding, statisticConfiguration, executor, resourcePageSource); + + if (resourcePageSource != null) { + statisticsRegistry.registerSize("AllocatedSize", descriptor("allocatedSize", tags("tier", "Pool"))); + } + } + + @Override + public Context getContext() { + return super.getContext().with("type", "PoolBinding"); + } + + } + + private static Set tags(String... tags) {return new HashSet<>(asList(tags));} + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java new file mode 100644 index 0000000000..fdfe831aea --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java @@ -0,0 +1,73 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.terracotta.management.model.context.Context; +import org.terracotta.management.registry.action.Named; +import org.terracotta.management.registry.action.RequiredContext; + +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.ScheduledExecutorService; + +import static java.util.Arrays.asList; +import static org.terracotta.context.extended.ValueStatisticDescriptor.descriptor; + +@Named("ServerStoreStatistics") +@RequiredContext({@Named("consumerId"), @Named("type"), @Named("alias")}) +class ServerStoreStatisticsManagementProvider extends AbstractStatisticsManagementProvider { + + private final ScheduledExecutorService executor; + + ServerStoreStatisticsManagementProvider(StatisticConfiguration statisticConfiguration, ScheduledExecutorService executor) { + super(ServerStoreBinding.class, statisticConfiguration); + this.executor = executor; + } + + @Override + protected AbstractExposedStatistics internalWrap(ServerStoreBinding managedObject) { + return new ServerStoreExposedStatistics(getConsumerId(), managedObject, getStatisticConfiguration(), executor); + } + + private static class ServerStoreExposedStatistics extends AbstractExposedStatistics { + + ServerStoreExposedStatistics(long consumerId, ServerStoreBinding binding, StatisticConfiguration statisticConfiguration, ScheduledExecutorService executor) { + super(consumerId, binding, statisticConfiguration, executor, binding.getValue()); + + statisticsRegistry.registerSize("AllocatedMemory", descriptor("allocatedMemory", tags("tier", "Store"))); + statisticsRegistry.registerSize("DataAllocatedMemory", descriptor("dataAllocatedMemory", tags("tier", "Store"))); + statisticsRegistry.registerSize("OccupiedMemory", descriptor("occupiedMemory", tags("tier", "Store"))); + statisticsRegistry.registerSize("DataOccupiedMemory", descriptor("dataOccupiedMemory", tags("tier", "Store"))); + statisticsRegistry.registerCounter("Entries", descriptor("entries", tags("tier", "Store"))); + statisticsRegistry.registerCounter("UsedSlotCount", descriptor("usedSlotCount", tags("tier", "Store"))); + statisticsRegistry.registerSize("DataVitalMemory", descriptor("dataVitalMemory", tags("tier", "Store"))); + statisticsRegistry.registerSize("VitalMemory", descriptor("vitalMemory", tags("tier", "Store"))); + statisticsRegistry.registerSize("ReprobeLength", descriptor("reprobeLength", tags("tier", "Store"))); + statisticsRegistry.registerCounter("RemovedSlotCount", descriptor("removedSlotCount", tags("tier", "Store"))); + statisticsRegistry.registerSize("DataSize", descriptor("dataSize", tags("tier", "Store"))); + statisticsRegistry.registerSize("TableCapacity", descriptor("tableCapacity", tags("tier", "Store"))); + } + + @Override + public Context getContext() { + return super.getContext().with("type", "ServerStore"); + } + + } + + private static Set tags(String... tags) {return new HashSet<>(asList(tags));} + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/StatisticCollectorManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/StatisticCollectorManagementProvider.java new file mode 100644 index 0000000000..407fa040de --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/StatisticCollectorManagementProvider.java @@ -0,0 +1,166 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.terracotta.management.model.context.Context; +import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.registry.ManagementRegistry; +import org.terracotta.management.registry.StatisticQuery; +import org.terracotta.management.registry.action.ExposedObject; +import org.terracotta.management.registry.action.Named; +import org.terracotta.management.registry.action.RequiredContext; +import org.terracotta.management.registry.collect.StatisticCollector; +import org.terracotta.management.registry.collect.StatisticCollectorProvider; +import org.terracotta.management.service.registry.MonitoringResolver; +import org.terracotta.management.service.registry.provider.ConsumerManagementProvider; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicLong; +import java.util.stream.Collectors; + +@Named("StatisticCollector") +@RequiredContext({@Named("consumerId")}) +class StatisticCollectorManagementProvider extends StatisticCollectorProvider implements ConsumerManagementProvider { + + private static final Logger LOGGER = LoggerFactory.getLogger(StatisticCollectorManagementProvider.class); + + private final ManagementRegistry managementRegistry; + private final StatisticConfiguration statisticConfiguration; + private final ScheduledExecutorService scheduledExecutorService; + private final String[] statsCapabilitynames; + private final ConcurrentMap selectedStatsPerCapability = new ConcurrentHashMap<>(); + + private volatile MonitoringResolver resolver; + + StatisticCollectorManagementProvider(ManagementRegistry managementRegistry, StatisticConfiguration statisticConfiguration, ScheduledExecutorService scheduledExecutorService, String[] statsCapabilitynames) { + super(StatisticCollector.class, null); + this.managementRegistry = managementRegistry; + this.statisticConfiguration = statisticConfiguration; + this.scheduledExecutorService = scheduledExecutorService; + this.statsCapabilitynames = statsCapabilitynames; + } + + @Override + public void accept(MonitoringResolver resolver) { + this.resolver = resolver; + } + + @Override + public boolean pushServerEntityNotification(StatisticCollector managedObjectSource, String type, Map attrs) { + return false; + } + + @Override + protected ExposedObject wrap(StatisticCollector managedObject) { + return new StatisticCollectorProvider.ExposedStatisticCollector<>(managedObject, Context.create("consumerId", String.valueOf(resolver.getConsumerId()))); + } + + @Override + protected void dispose(ExposedObject exposedObject) { + exposedObject.getTarget().stopStatisticCollector(); + } + + void start() { + StatisticCollector managedObject = new StatisticCollector() { + + private volatile ScheduledFuture task; + + @Override + public void startStatisticCollector() { + if (task == null) { + LOGGER.trace("startStatisticCollector()"); + + long timeToDisableMs = TimeUnit.MILLISECONDS.convert(statisticConfiguration.timeToDisable(), statisticConfiguration.timeToDisableUnit()); + long pollingIntervalMs = Math.round(timeToDisableMs * 0.75); // we poll at 75% of the time to disable (before the time to disable happens) + final AtomicLong lastPoll = new AtomicLong(getTimeMs()); + + task = scheduledExecutorService.scheduleWithFixedDelay(() -> { + try { + if (task != null && !selectedStatsPerCapability.isEmpty()) { + Collection statistics = new ArrayList<>(); + long since = lastPoll.get(); + + selectedStatsPerCapability.entrySet() + .stream() + .filter(entry -> Arrays.binarySearch(statsCapabilitynames, entry.getKey()) >= 0) + .forEach(entry -> { + AbstractStatisticsManagementProvider provider = (AbstractStatisticsManagementProvider) managementRegistry.getManagementProvidersByCapability(entry.getKey()) + .iterator().next(); + // note: .iterator().next() because the management registry is not shared, so there cannot be more than 1 capability with the same name. + Collection allContexts = provider.getExposedObjects().stream().map(ExposedObject::getContext).collect(Collectors.toList()); + for (ContextualStatistics contextualStatistics : entry.getValue().since(since).on(allContexts).build().execute()) { + statistics.add(contextualStatistics); + } + }); + + // next time, only poll history from this time + lastPoll.set(getTimeMs()); + + if (task != null && !statistics.isEmpty() && resolver != null) { + resolver.pushServerEntityStatistics(statistics.toArray(new ContextualStatistics[statistics.size()])); + } + } + } catch (RuntimeException e) { + LOGGER.error("StatisticCollector: " + e.getMessage(), e); + } + }, pollingIntervalMs, pollingIntervalMs, TimeUnit.MILLISECONDS); + } + } + + @Override + public void stopStatisticCollector() { + if (task != null) { + LOGGER.trace("stopStatisticCollector()"); + ScheduledFuture _task = task; + task = null; + _task.cancel(false); + } + } + + @Override + public void updateCollectedStatistics(String capabilityName, Collection statisticNames) { + if (!statisticNames.isEmpty()) { + LOGGER.trace("updateCollectedStatistics({}, {})", capabilityName, statisticNames); + StatisticQuery.Builder builder = managementRegistry.withCapability(capabilityName).queryStatistics(statisticNames); + selectedStatsPerCapability.put(capabilityName, builder); + } else { + // we clear the stats set + selectedStatsPerCapability.remove(capabilityName); + } + } + }; + + register(managedObject); + + managedObject.startStatisticCollector(); + } + + private long getTimeMs() { + // TODO FIXME: there is no timesource service in voltron: https://github.com/Terracotta-OSS/terracotta-apis/issues/167 + return System.currentTimeMillis(); + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/StatisticConfiguration.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/StatisticConfiguration.java new file mode 100644 index 0000000000..cad3ebcf20 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/StatisticConfiguration.java @@ -0,0 +1,148 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.management; + +import org.terracotta.management.model.Objects; +import org.terracotta.management.registry.ManagementProvider; + +import java.util.concurrent.TimeUnit; + +class StatisticConfiguration { + + private long averageWindowDuration = 60; + private TimeUnit averageWindowUnit = TimeUnit.SECONDS; + private int historySize = 100; + private long historyInterval = 1; + private TimeUnit historyIntervalUnit = TimeUnit.SECONDS; + private long timeToDisable = 30; + private TimeUnit timeToDisableUnit = TimeUnit.SECONDS; + + StatisticConfiguration() { + } + + StatisticConfiguration(long averageWindowDuration, TimeUnit averageWindowUnit, int historySize, long historyInterval, TimeUnit historyIntervalUnit, long timeToDisable, TimeUnit timeToDisableUnit) { + this.averageWindowDuration = averageWindowDuration; + this.averageWindowUnit = Objects.requireNonNull(averageWindowUnit); + this.historySize = historySize; + this.historyInterval = historyInterval; + this.historyIntervalUnit = Objects.requireNonNull(historyIntervalUnit); + this.timeToDisable = timeToDisable; + this.timeToDisableUnit = Objects.requireNonNull(timeToDisableUnit); + } + + public long averageWindowDuration() { + return averageWindowDuration; + } + + public TimeUnit averageWindowUnit() { + return averageWindowUnit; + } + + public int historySize() { + return historySize; + } + + public long historyInterval() { + return historyInterval; + } + + public TimeUnit historyIntervalUnit() { + return historyIntervalUnit; + } + + public long timeToDisable() { + return timeToDisable; + } + + public TimeUnit timeToDisableUnit() { + return timeToDisableUnit; + } + + + @Override + public String toString() { + return "{averageWindowDuration=" + averageWindowDuration() + + ", averageWindowUnit=" + averageWindowUnit() + + ", historyInterval=" + historyInterval() + + ", historyIntervalUnit=" + historyIntervalUnit() + + ", historySize=" + historySize() + + ", timeToDisable=" + timeToDisable() + + ", timeToDisableUnit=" + timeToDisableUnit() + + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StatisticConfiguration that = (StatisticConfiguration) o; + if (averageWindowDuration != that.averageWindowDuration) return false; + if (historySize != that.historySize) return false; + if (historyInterval != that.historyInterval) return false; + if (timeToDisable != that.timeToDisable) return false; + if (averageWindowUnit != that.averageWindowUnit) return false; + if (historyIntervalUnit != that.historyIntervalUnit) return false; + return timeToDisableUnit == that.timeToDisableUnit; + } + + @Override + public int hashCode() { + int result = (int) (averageWindowDuration ^ (averageWindowDuration >>> 32)); + result = 31 * result + averageWindowUnit.hashCode(); + result = 31 * result + historySize; + result = 31 * result + (int) (historyInterval ^ (historyInterval >>> 32)); + result = 31 * result + historyIntervalUnit.hashCode(); + result = 31 * result + (int) (timeToDisable ^ (timeToDisable >>> 32)); + result = 31 * result + timeToDisableUnit.hashCode(); + return result; + } + + public StatisticConfiguration setAverageWindowDuration(long averageWindowDuration) { + this.averageWindowDuration = averageWindowDuration; + return this; + } + + public StatisticConfiguration setAverageWindowUnit(TimeUnit averageWindowUnit) { + this.averageWindowUnit = averageWindowUnit; + return this; + } + + public StatisticConfiguration setHistoryInterval(long historyInterval) { + this.historyInterval = historyInterval; + return this; + } + + public StatisticConfiguration setHistoryIntervalUnit(TimeUnit historyIntervalUnit) { + this.historyIntervalUnit = historyIntervalUnit; + return this; + } + + public StatisticConfiguration setHistorySize(int historySize) { + this.historySize = historySize; + return this; + } + + public StatisticConfiguration setTimeToDisable(long timeToDisable) { + this.timeToDisable = timeToDisable; + return this; + } + + public StatisticConfiguration setTimeToDisableUnit(TimeUnit timeToDisableUnit) { + this.timeToDisableUnit = timeToDisableUnit; + return this; + } + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java index 0a21291d23..19eec35f8a 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java @@ -34,8 +34,12 @@ public interface EhcacheStateService { Map getSharedResourcePools(); + ResourcePageSource getSharedResourcePageSource(String name); + ServerSideConfiguration.Pool getDedicatedResourcePool(String name); + ResourcePageSource getDedicatedResourcePageSource(String name); + ServerStoreImpl getStore(String name); Set getStores(); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/ResourcePageSource.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/ResourcePageSource.java new file mode 100644 index 0000000000..9827e45474 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/ResourcePageSource.java @@ -0,0 +1,72 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server.state; + +import com.tc.classloader.CommonComponent; +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.terracotta.offheapstore.buffersource.OffHeapBufferSource; +import org.terracotta.offheapstore.paging.OffHeapStorageArea; +import org.terracotta.offheapstore.paging.Page; +import org.terracotta.offheapstore.paging.PageSource; +import org.terracotta.offheapstore.paging.UpfrontAllocatingPageSource; + +import static org.terracotta.offheapstore.util.MemoryUnit.GIGABYTES; +import static org.terracotta.offheapstore.util.MemoryUnit.MEGABYTES; + +/** + * Pairs a {@link ServerSideConfiguration.Pool} and an {@link UpfrontAllocatingPageSource} instance providing storage + * for the pool. + */ +@CommonComponent +public class ResourcePageSource implements PageSource { + /** + * A description of the resource allocation underlying this {@code PageSource}. + */ + private final ServerSideConfiguration.Pool pool; + private final UpfrontAllocatingPageSource delegatePageSource; + + public ResourcePageSource(ServerSideConfiguration.Pool pool) { + this.pool = pool; + this.delegatePageSource = new UpfrontAllocatingPageSource(new OffHeapBufferSource(), pool.getSize(), GIGABYTES.toBytes(1), MEGABYTES.toBytes(128)); + } + + public ServerSideConfiguration.Pool getPool() { + return pool; + } + + public long getAllocatedSize() { + return delegatePageSource.getAllocatedSizeUnSync(); + } + + @Override + public Page allocate(int size, boolean thief, boolean victim, OffHeapStorageArea owner) { + return delegatePageSource.allocate(size, thief, victim, owner); + } + + @Override + public void free(Page page) { + delegatePageSource.free(page); + } + + @Override + public String toString() { + final StringBuilder sb = new StringBuilder("ResourcePageSource{"); + sb.append("pool=").append(pool); + sb.append(", delegatePageSource=").append(delegatePageSource); + sb.append('}'); + return sb.toString(); + } +} From e0885a76091325e988d10f62a3d6a15922cfbff9 Mon Sep 17 00:00:00 2001 From: geoff gibson Date: Wed, 5 Oct 2016 14:40:17 -0700 Subject: [PATCH 108/218] adds HitCount and MissCount unit tests --- .../providers/statistics/HitCountTest.java | 151 ++++++++++++++++++ .../providers/statistics/MissCountTest.java | 144 +++++++++++++++++ .../providers/statistics/StatsUtil.java | 37 +++++ 3 files changed, 332 insertions(+) create mode 100755 management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java create mode 100755 management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java new file mode 100755 index 0000000000..3588c0731c --- /dev/null +++ b/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java @@ -0,0 +1,151 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import static java.util.Arrays.asList; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.config.units.MemoryUnit.MB; +import static org.ehcache.config.units.EntryUnit.ENTRIES; +import static org.hamcrest.CoreMatchers.is; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.Builder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.Configuration; +import org.ehcache.config.ResourcePools; +import org.ehcache.core.EhcacheManager; +import org.ehcache.core.config.DefaultConfiguration; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryService; +import org.ehcache.spi.service.Service; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.Timeout; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.terracotta.management.model.context.Context; + +/** + * + * + */ +@RunWith(Parameterized.class) +public class HitCountTest { + + @Rule + public final Timeout globalTimeout = Timeout.seconds(10); + + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + + private static final long CACHE_HIT_TOTAL = 4; + + private final ResourcePools resources; + private final List statNames; + private final List tierExpectedValues; + private final Long cacheExpectedValue; + + @Parameterized.Parameters + public static Collection data() { + return asList(new Object[][] { + //1 tier + { newResourcePoolsBuilder().heap(1, MB), Arrays.asList("OnHeap:HitCount"), Arrays.asList(CACHE_HIT_TOTAL), CACHE_HIT_TOTAL }, + { newResourcePoolsBuilder().offheap(1, MB), Arrays.asList("OffHeap:HitCount"), Arrays.asList(CACHE_HIT_TOTAL), CACHE_HIT_TOTAL }, + { newResourcePoolsBuilder().disk(1, MB), Arrays.asList("Disk:HitCount"), Arrays.asList(CACHE_HIT_TOTAL), CACHE_HIT_TOTAL }, + + //2 tier + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), Arrays.asList("OnHeap:HitCount","OffHeap:HitCount"), Arrays.asList(2L,2L), CACHE_HIT_TOTAL}, + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), Arrays.asList("OnHeap:HitCount","Disk:HitCount"), Arrays.asList(2L,2L), CACHE_HIT_TOTAL}, + //offheap and disk configuration below is not valid. Throws IllegalStateException no Store.Provider found to handle configured resource types [offheap,disk] + //{ newResourcePoolsBuilder().offheap(1, MB).disk(2, MB), Arrays.asList("OffHeap:HitCount","Disk:HitCount"), Arrays.asList(1L,2L), CACHE_HIT_TOTAL}, + + //3 tier + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB).disk(3, MB), Arrays.asList("OnHeap:HitCount","OffHeap:HitCount","Disk:HitCount"), Arrays.asList(2L,0L,2L), CACHE_HIT_TOTAL}, + { newResourcePoolsBuilder().heap(1, ENTRIES).offheap(2, MB).disk(3, MB), Arrays.asList("OnHeap:HitCount","OffHeap:HitCount","Disk:HitCount"), Arrays.asList(1L,1L,2L), CACHE_HIT_TOTAL}, + }); + } + + public HitCountTest(Builder resources, List statNames, List tierExpectedValues, Long cacheExpectedValue) { + this.resources = resources.build(); + this.statNames = statNames; + this.tierExpectedValues = tierExpectedValues; + this.cacheExpectedValue = cacheExpectedValue; + } + + @Test + public void test() throws InterruptedException, IOException { + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); + registryConfiguration.addConfiguration(new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES)); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + + Configuration cacheConfiguration = new DefaultConfiguration(EvictionTest.class.getClassLoader(), + new DefaultPersistenceConfiguration(diskPath.newFolder())); + + Collection services = new ArrayList(); + services.add(managementRegistry); + + CacheManager cacheManager = null; + + try { + cacheManager = new EhcacheManager(cacheConfiguration, services); + CacheConfiguration cacheConfig = newCacheConfigurationBuilder(Long.class, String.class, resources).build(); + + cacheManager.init(); + Cache cache = cacheManager.createCache("myCache", cacheConfig); + + cache.put(1L, "1");//put in lowest tier + cache.put(2L, "2");//put in lowest tier + cache.put(3L, "3");//put in lowest tier + + + cache.get(1L);//HIT lowest tier + cache.get(2L);//HIT lowest tier + cache.get(2L);//HIT highest tier + + cache.get(1L);//HIT middle/highest tier. Depends on tier configuration. + + Context context = StatsUtil.createContext(managementRegistry); + + long tierHitCountSum = 0; + for (int i = 0; i < statNames.size(); i++) { + tierHitCountSum += StatsUtil.getStatValue(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); + } + + long cacheHitCount = StatsUtil.getStatValue("Cache:HitCount", context, managementRegistry, cacheExpectedValue); + Assert.assertThat(tierHitCountSum, is(cacheHitCount)); + + } + finally { + if(cacheManager != null) { + cacheManager.close(); + } + } + } + +} diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java new file mode 100755 index 0000000000..249e8f2562 --- /dev/null +++ b/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java @@ -0,0 +1,144 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import static java.util.Arrays.asList; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.config.units.MemoryUnit.MB; +import static org.hamcrest.CoreMatchers.is; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.Builder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.Configuration; +import org.ehcache.config.ResourcePools; +import org.ehcache.core.EhcacheManager; +import org.ehcache.core.config.DefaultConfiguration; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryService; +import org.ehcache.spi.service.Service; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.Timeout; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.terracotta.management.model.context.Context; + +/** + * + * + */ +@RunWith(Parameterized.class) +public class MissCountTest { + + @Rule + public final Timeout globalTimeout = Timeout.seconds(10); + + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + + private final ResourcePools resources; + private final List statNames; + private final List tierExpectedValues; + private final Long cacheExpectedValue; + + @Parameterized.Parameters + public static Collection data() { + return asList(new Object[][] { + //1 tier + { newResourcePoolsBuilder().heap(1, MB), Arrays.asList("OnHeap:MissCount"), Arrays.asList(2L), 2L }, + { newResourcePoolsBuilder().offheap(1, MB), Arrays.asList("OffHeap:MissCount"), Arrays.asList(2L), 2L }, + { newResourcePoolsBuilder().disk(1, MB), Arrays.asList("Disk:MissCount"), Arrays.asList(2L), 2L }, + + //2 tier + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), Arrays.asList("OnHeap:MissCount","OffHeap:MissCount"), Arrays.asList(2L,2L), 2L}, + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), Arrays.asList("OnHeap:MissCount","Disk:MissCount"), Arrays.asList(2L,2L), 2L}, + //offheap and disk configuration below is not valid. Throws IllegalStateException no Store.Provider found to handle configured resource types [offheap,disk] + //{ newResourcePoolsBuilder().offheap(1, MB).disk(2, MB), Arrays.asList("OffHeap:MissCount","Disk:MissCount"), Arrays.asList(1L,2L), CACHE_HIT_TOTAL}, + + //3 tier + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB).disk(3, MB), Arrays.asList("OnHeap:MissCount","OffHeap:MissCount","Disk:MissCount"), Arrays.asList(2L,2L,2L), 2L} + }); + } + + public MissCountTest(Builder resources, List statNames, List tierExpectedValues, Long cacheExpectedValue) { + this.resources = resources.build(); + this.statNames = statNames; + this.tierExpectedValues = tierExpectedValues; + this.cacheExpectedValue = cacheExpectedValue; + } + + @Test + public void test() throws InterruptedException, IOException { + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); + registryConfiguration.addConfiguration(new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES)); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + + Configuration cacheConfiguration = new DefaultConfiguration(EvictionTest.class.getClassLoader(), + new DefaultPersistenceConfiguration(diskPath.newFolder())); + + Collection services = new ArrayList(); + services.add(managementRegistry); + + CacheManager cacheManager = null; + + try { + cacheManager = new EhcacheManager(cacheConfiguration, services); + CacheConfiguration cacheConfig = newCacheConfigurationBuilder(Long.class, String.class, resources).build(); + + cacheManager.init(); + Cache cache = cacheManager.createCache("myCache", cacheConfig); + + cache.put(1L, "1");//put in lowest tier + cache.put(2L, "2");//put in lowest tier + cache.put(3L, "3");//put in lowest tier + + cache.get(4L);//MISS + cache.get(5L);//MISS + + Context context = StatsUtil.createContext(managementRegistry); + + long tierMissCountSum = 0; + for (int i = 0; i < statNames.size(); i++) { + tierMissCountSum += StatsUtil.getStatValue(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); + } + + long cacheMissCount = StatsUtil.getStatValue("Cache:MissCount", context, managementRegistry, cacheExpectedValue); + //A cache.get() checks every tier, so there is one miss per tier. However the cache miss count only counts 1 miss regardless of the number of tiers. + Assert.assertThat(tierMissCountSum/statNames.size(), is(cacheMissCount)); + + } + finally { + if(cacheManager != null) { + cacheManager.close(); + } + } + } + +} diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java b/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java index 2d5bf24425..0667db5258 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java @@ -15,10 +15,17 @@ */ package org.ehcache.management.providers.statistics; +import java.util.Arrays; import org.ehcache.management.ManagementRegistryService; +import org.hamcrest.Matchers; +import org.junit.Assert; import org.terracotta.management.model.context.Context; import org.terracotta.management.model.context.ContextContainer; import org.terracotta.management.model.stats.AbstractStatisticHistory; +import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.model.stats.history.CounterHistory; +import org.terracotta.management.registry.ResultSet; +import org.terracotta.management.registry.StatisticQuery; /** * @@ -68,4 +75,34 @@ public static Context createContext(ManagementRegistryService managementRegistry .with(cacheManagerCtx.getName(), cacheManagerCtx.getValue()) .with(firstCacheCtx.getName(), firstCacheCtx.getValue()); } + + public static long getStatValue(String statName, Context context, ManagementRegistryService managementRegistry, long expectedResult) { + + StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(Arrays.asList(statName)) + .on(context) + .build(); + + long value = 0; + do { + ResultSet counters = query.execute(); + + ContextualStatistics statisticsContext = counters.getResult(context); + + Assert.assertThat(counters.size(), Matchers.is(1)); + + CounterHistory counterHistory = statisticsContext.getStatistic(CounterHistory.class, statName); + + if (counterHistory.getValue().length > 0) { + int mostRecentIndex = counterHistory.getValue().length - 1; + value = counterHistory.getValue()[mostRecentIndex].getValue(); + System.out.println("statName: " + statName + " value: " + value + " expectedResult: " + expectedResult); + } + + }while(value != expectedResult); + + Assert.assertThat(value, Matchers.is(expectedResult)); + + return value; + } } From d94ae9f1457b4bc6071068362dd3057542bd61d9 Mon Sep 17 00:00:00 2001 From: "U-EUR\\GGIB" Date: Tue, 1 Nov 2016 10:24:53 -0700 Subject: [PATCH 109/218] Issue #1454 adds count and ratio testing for all tiers and cache level PR fixes, removes bad comments and changes unit tests to use the traditional api builder adds comments in regards to adding timeouts when using some of the StatsUtil functions --- .../ClusteredStatisticsCountTest.java | 101 ++++++++++ .../ClusteredStatisticsRatioTest.java | 102 ++++++++++ .../providers/statistics/HitCountTest.java | 47 ++--- .../providers/statistics/HitRatioTest.java | 174 ++++++++++++++++++ .../providers/statistics/MissCountTest.java | 46 ++--- .../providers/statistics/MissRatioTest.java | 168 +++++++++++++++++ .../StandardEhcacheStatisticsTest.java | 171 ----------------- .../providers/statistics/StatsUtil.java | 57 +++++- 8 files changed, 631 insertions(+), 235 deletions(-) create mode 100755 clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java create mode 100755 clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsRatioTest.java create mode 100755 management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java create mode 100755 management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java new file mode 100755 index 0000000000..b36616a8df --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java @@ -0,0 +1,101 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.management; + +import static org.ehcache.clustered.management.AbstractClusteringManagementTest.cacheManager; +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; + +import java.util.List; +import org.ehcache.Cache; +import org.junit.Assert; +import org.junit.Test; +import org.terracotta.management.model.call.ContextualReturn; +import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.model.stats.Sample; +import org.terracotta.management.model.stats.history.CounterHistory; + +public class ClusteredStatisticsCountTest extends AbstractClusteringManagementTest { + + private static final long CACHE_HIT_COUNT = 2L; + private static final long CLUSTERED_HIT_COUNT = 2L; + private static final long CACHE_MISS_COUNT = 2L; + private static final long CLUSTERED_MISS_COUNT = 2L; + + @Test + public void countTest() throws Exception { + ContextualReturn contextualReturn = sendManagementCallToCollectStats("Cache:HitCount","Clustered:HitCount","Cache:MissCount","Clustered:MissCount"); + assertThat(contextualReturn.hasExecuted(), is(true)); + + Cache cache = cacheManager.getCache("dedicated-cache-1", String.class, String.class); + cache.put("one", "val1"); + cache.put("two", "val2"); + + cache.get("one"); //hit + cache.get("two"); //hit + + cache.get("three"); //miss + cache.get("four"); //miss + + + long cacheHitCount = 0; + long clusteredHitCount = 0; + long cacheMissCount = 0; + long clusteredMissCount = 0; + + // it could be several seconds before the sampled stats could become available + // let's try until we find the correct values + do { + + // get the stats (we are getting the primitive counter, not the sample history) + List stats = waitForNextStats(); + for (ContextualStatistics stat : stats) { + if (stat.getContext().get("cacheName").equals("dedicated-cache-1")) { + + Sample[] samplesCacheHitCount = stat.getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); + if(samplesCacheHitCount.length > 0) { + cacheHitCount = samplesCacheHitCount[samplesCacheHitCount.length - 1].getValue(); + } + + Sample[] samplesClusteredHitCount = stat.getStatistic(CounterHistory.class, "Clustered:HitCount").getValue(); + if(samplesClusteredHitCount.length > 0) { + clusteredHitCount = samplesClusteredHitCount[samplesClusteredHitCount.length - 1].getValue(); + } + + Sample[] samplesClusteredMissCount = stat.getStatistic(CounterHistory.class, "Clustered:MissCount").getValue(); + if(samplesClusteredMissCount.length > 0) { + clusteredMissCount = samplesClusteredMissCount[samplesClusteredMissCount.length - 1].getValue(); + } + + Sample[] samplesCacheMissCount = stat.getStatistic(CounterHistory.class, "Cache:MissCount").getValue(); + if(samplesCacheMissCount.length > 0) { + cacheMissCount = samplesCacheMissCount[samplesCacheMissCount.length - 1].getValue(); + } + } + } + } while( (cacheHitCount != CACHE_HIT_COUNT) && (clusteredHitCount != CLUSTERED_HIT_COUNT) && + (cacheMissCount != CACHE_MISS_COUNT) && (clusteredMissCount != CLUSTERED_MISS_COUNT)); + + Assert.assertThat(cacheHitCount,is(CACHE_HIT_COUNT)); + Assert.assertThat(clusteredHitCount,is(CLUSTERED_HIT_COUNT)); + Assert.assertThat(cacheMissCount,is(CACHE_MISS_COUNT)); + Assert.assertThat(clusteredMissCount,is(CLUSTERED_MISS_COUNT)); + + } + + + +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsRatioTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsRatioTest.java new file mode 100755 index 0000000000..292d93ea6b --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsRatioTest.java @@ -0,0 +1,102 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.management; + +import static org.ehcache.clustered.management.AbstractClusteringManagementTest.cacheManager; +import static org.ehcache.clustered.management.AbstractClusteringManagementTest.sendManagementCallToCollectStats; +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; + +import java.util.List; +import org.ehcache.Cache; +import org.junit.Assert; +import org.junit.Test; +import org.terracotta.management.model.call.ContextualReturn; +import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.model.stats.Sample; +import org.terracotta.management.model.stats.history.RatioHistory; + +public class ClusteredStatisticsRatioTest extends AbstractClusteringManagementTest { + + private static final double CACHE_HIT_RATIO = .5d; + private static final double CLUSTERED_HIT_RATIO = .5d; + private static final double CACHE_MISS_RATIO = .5d; + private static final double CLUSTERED_MISS_RATIO = .5d; + + @Test + public void ratioTest() throws Exception { + ContextualReturn contextualReturn = sendManagementCallToCollectStats("Cache:HitRatio","Clustered:HitRatio","Cache:MissRatio","Clustered:MissRatio"); + assertThat(contextualReturn.hasExecuted(), is(true)); + + Cache cache = cacheManager.getCache("dedicated-cache-1", String.class, String.class); + cache.put("one", "val1"); + cache.put("two", "val2"); + + cache.get("one"); //hit + cache.get("two"); //hit + + cache.get("three"); //miss + cache.get("four"); //miss + + + double cacheHitRatio = 0; + double clusteredHitRatio = 0; + double cacheMissRatio = 0; + double clusteredMissRatio = 0; + + // it could be several seconds before the sampled stats could become available + // let's try until we find the correct values + do { + + // get the stats (we are getting the primitive counter, not the sample history) + List stats = waitForNextStats(); + for (ContextualStatistics stat : stats) { + if (stat.getContext().get("cacheName").equals("dedicated-cache-1")) { + + Sample[] samplesCacheHitRatio = stat.getStatistic(RatioHistory.class, "Cache:HitRatio").getValue(); + if(samplesCacheHitRatio.length > 0) { + cacheHitRatio = samplesCacheHitRatio[samplesCacheHitRatio.length - 1].getValue(); + } + + Sample[] samplesClusteredHitRatio = stat.getStatistic(RatioHistory.class, "Clustered:HitRatio").getValue(); + if(samplesClusteredHitRatio.length > 0) { + clusteredHitRatio = samplesClusteredHitRatio[samplesClusteredHitRatio.length - 1].getValue(); + } + + Sample[] samplesClusteredMissRatio = stat.getStatistic(RatioHistory.class, "Clustered:MissRatio").getValue(); + if(samplesClusteredMissRatio.length > 0) { + clusteredMissRatio = samplesClusteredMissRatio[samplesClusteredMissRatio.length - 1].getValue(); + } + + Sample[] samplesCacheMissRatio = stat.getStatistic(RatioHistory.class, "Cache:MissRatio").getValue(); + if(samplesCacheMissRatio.length > 0) { + cacheMissRatio = samplesCacheMissRatio[samplesCacheMissRatio.length - 1].getValue(); + } + } + } + } while( (cacheHitRatio != CACHE_HIT_RATIO) && (clusteredHitRatio != CLUSTERED_HIT_RATIO) && + (cacheMissRatio != CACHE_MISS_RATIO) && (clusteredMissRatio != CLUSTERED_MISS_RATIO)); + + Assert.assertThat(cacheHitRatio,is(CACHE_HIT_RATIO)); + Assert.assertThat(clusteredHitRatio,is(CLUSTERED_HIT_RATIO)); + Assert.assertThat(cacheMissRatio,is(CACHE_MISS_RATIO)); + Assert.assertThat(clusteredMissRatio,is(CLUSTERED_MISS_RATIO)); + + } + + + +} diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java index 3588c0731c..80c4209356 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java @@ -16,14 +16,12 @@ package org.ehcache.management.providers.statistics; import static java.util.Arrays.asList; -import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.ehcache.config.units.MemoryUnit.MB; import static org.ehcache.config.units.EntryUnit.ENTRIES; import static org.hamcrest.CoreMatchers.is; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; @@ -32,16 +30,14 @@ import org.ehcache.CacheManager; import org.ehcache.config.Builder; import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.Configuration; import org.ehcache.config.ResourcePools; -import org.ehcache.core.EhcacheManager; -import org.ehcache.core.config.DefaultConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; import org.ehcache.management.ManagementRegistryService; import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryService; -import org.ehcache.spi.service.Service; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -51,10 +47,6 @@ import org.junit.runners.Parameterized; import org.terracotta.management.model.context.Context; -/** - * - * - */ @RunWith(Parameterized.class) public class HitCountTest { @@ -79,13 +71,12 @@ public static Collection data() { { newResourcePoolsBuilder().offheap(1, MB), Arrays.asList("OffHeap:HitCount"), Arrays.asList(CACHE_HIT_TOTAL), CACHE_HIT_TOTAL }, { newResourcePoolsBuilder().disk(1, MB), Arrays.asList("Disk:HitCount"), Arrays.asList(CACHE_HIT_TOTAL), CACHE_HIT_TOTAL }, - //2 tier + //2 tiers { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), Arrays.asList("OnHeap:HitCount","OffHeap:HitCount"), Arrays.asList(2L,2L), CACHE_HIT_TOTAL}, { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), Arrays.asList("OnHeap:HitCount","Disk:HitCount"), Arrays.asList(2L,2L), CACHE_HIT_TOTAL}, //offheap and disk configuration below is not valid. Throws IllegalStateException no Store.Provider found to handle configured resource types [offheap,disk] - //{ newResourcePoolsBuilder().offheap(1, MB).disk(2, MB), Arrays.asList("OffHeap:HitCount","Disk:HitCount"), Arrays.asList(1L,2L), CACHE_HIT_TOTAL}, - //3 tier + //3 tiers { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB).disk(3, MB), Arrays.asList("OnHeap:HitCount","OffHeap:HitCount","Disk:HitCount"), Arrays.asList(2L,0L,2L), CACHE_HIT_TOTAL}, { newResourcePoolsBuilder().heap(1, ENTRIES).offheap(2, MB).disk(3, MB), Arrays.asList("OnHeap:HitCount","OffHeap:HitCount","Disk:HitCount"), Arrays.asList(1L,1L,2L), CACHE_HIT_TOTAL}, }); @@ -100,24 +91,24 @@ public HitCountTest(Builder resources, List sta @Test public void test() throws InterruptedException, IOException { - DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); - registryConfiguration.addConfiguration(new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES)); - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); - - Configuration cacheConfiguration = new DefaultConfiguration(EvictionTest.class.getClassLoader(), - new DefaultPersistenceConfiguration(diskPath.newFolder())); - - Collection services = new ArrayList(); - services.add(managementRegistry); CacheManager cacheManager = null; try { - cacheManager = new EhcacheManager(cacheConfiguration, services); - CacheConfiguration cacheConfig = newCacheConfigurationBuilder(Long.class, String.class, resources).build(); - cacheManager.init(); - Cache cache = cacheManager.createCache("myCache", cacheConfig); + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); + registryConfiguration.addConfiguration(new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES)); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources).build(); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("myCache", cacheConfiguration) + .using(managementRegistry) + .using(new DefaultPersistenceConfiguration(diskPath.newFolder())) + .build(true); + + Cache cache = cacheManager.getCache("myCache", Long.class, String.class); cache.put(1L, "1");//put in lowest tier cache.put(2L, "2");//put in lowest tier @@ -134,10 +125,10 @@ public void test() throws InterruptedException, IOException { long tierHitCountSum = 0; for (int i = 0; i < statNames.size(); i++) { - tierHitCountSum += StatsUtil.getStatValue(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); + tierHitCountSum += StatsUtil.getExpectedValueFromCounterHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); } - long cacheHitCount = StatsUtil.getStatValue("Cache:HitCount", context, managementRegistry, cacheExpectedValue); + long cacheHitCount = StatsUtil.getExpectedValueFromCounterHistory("Cache:HitCount", context, managementRegistry, cacheExpectedValue); Assert.assertThat(tierHitCountSum, is(cacheHitCount)); } diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java new file mode 100755 index 0000000000..38ff18fe15 --- /dev/null +++ b/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java @@ -0,0 +1,174 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import static java.util.Arrays.asList; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.config.units.EntryUnit.ENTRIES; +import static org.ehcache.config.units.MemoryUnit.MB; +import static org.hamcrest.CoreMatchers.is; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.Builder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryService; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.Timeout; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.terracotta.management.model.context.Context; + +@RunWith(Parameterized.class) +public class HitRatioTest { + + @Rule + public final Timeout globalTimeout = Timeout.seconds(10); + + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + + private final ResourcePools resources; + private final List statNames; + private final List tierExpectedValues; + private final List getKeys; + private final Double cacheExpectedValue; + + @Parameterized.Parameters + public static Collection data() { + + List statNamesOnHeap = Arrays.asList("OnHeap:HitRatio"); + List statNamesOffHeap = Arrays.asList("OffHeap:HitRatio"); + List statNamesDisk = Arrays.asList("Disk:HitRatio"); + List statNamesOnHeapOffHeap = Arrays.asList("OnHeap:HitRatio","OffHeap:HitRatio"); + List statNamesOnHeapDisk = Arrays.asList("OnHeap:HitRatio","Disk:HitRatio"); + List statNamesThreeTiers = Arrays.asList("OnHeap:HitRatio","OffHeap:HitRatio","Disk:HitRatio"); + + return asList(new Object[][] { + //1 tier + { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(1l,2l,3l) , Arrays.asList(1d), 1d }, //3 hits, 0 misses + { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(1l,2l,4l,5l) , Arrays.asList(.5d), .5d }, //2 hits, 2 misses + { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(4l,5l) , Arrays.asList(0d), 0d }, //0 hits, 2 misses + + { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(1l,2l,3l), Arrays.asList(1d), 1d }, //3 hits, 0 misses + { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(1l,2l,4l,5l) , Arrays.asList(.5d), .5d }, //2 hits, 2 misses + { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(4l,5l) , Arrays.asList(0d), 0d }, //0 hits, 2 misses + + { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(1l,2l,3l) , Arrays.asList(1d), 1d }, //3 hits, 0 misses + { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(1l,2l,4l,5l) , Arrays.asList(.5d), .5d }, //2 hits, 2 misses + { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(4l,5l) , Arrays.asList(0d), 0d }, //0 hits, 2 misses + + //2 tiers + + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(1l,2l,3l) , Arrays.asList(0d,1d), 1d }, //3 offheap hits, 0 misses + + /* + explanation of ratio calc: + + Each get checks the heap first. For every get there is a hit/miss on the heap tier. This test checks the heap 4 times. + The first 3 gets are misses and the last get is a hit. + Thus heapHitRatio = 1 hit / 4 attempts = .25 + + If the get key is not in the heap then it checks the tier below. In this case it checks offheap. + This test checks the offheap tier on the first 3 gets, and finds the key on each check. So there are 3 hits. + Thus offHeapHitRatio = 3 hits / 3 attempts = 1 + */ + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(1l,2l,3l,1L) , Arrays.asList(.25d,1d), 1d },//3 offheap hits, 1 heap hit, 0 misses + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(1l,2l,4l,5l) , Arrays.asList(0d,.5), .5d }, //2 offheap hits, 2 misses + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(4l,5l) , Arrays.asList(0d,0d), 0d }, //0 hits, 2 misses + + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(1l,2l,3l,1L) , Arrays.asList(.25d,1d), 1d }, //3 disk hits, 1 heap hit, 0 misses + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(1l,2l,4l,5l) , Arrays.asList(0d,.5), .5d }, //2 disk hits, 2 misses + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(4l,5l) , Arrays.asList(0d,0d), 0d }, //0 hits, 2 misses + //offheap and disk configuration is not valid. Throws IllegalStateException no Store.Provider found to handle configured resource types [offheap,disk] + + //3 tiers + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB).disk(3, MB), statNamesThreeTiers, Arrays.asList(1l,2l,3l,1L) , Arrays.asList(.25d,0d,1d), 1d }, //3 disk hits, 1 heap hit, 0 misses + { newResourcePoolsBuilder().heap(1, ENTRIES).offheap(2, MB).disk(3, MB), statNamesThreeTiers, Arrays.asList(1l,2l,2L,1L), Arrays.asList(.25d,(1d/3d),1d), 1d},//3 disk hits, 1 offheap hit, 1 heap hit, 0 misses + }); + } + + public HitRatioTest(Builder resources, List statNames, List getKeys, List tierExpectedValues, Double cacheExpectedValue) { + this.resources = resources.build(); + this.statNames = statNames; + this.getKeys = getKeys; + this.tierExpectedValues = tierExpectedValues; + this.cacheExpectedValue = cacheExpectedValue; + } + + @Test + public void test() throws InterruptedException, IOException { + + CacheManager cacheManager = null; + + try { + + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); + registryConfiguration.addConfiguration(new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES)); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources).build(); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("myCache", cacheConfiguration) + .using(managementRegistry) + .using(new DefaultPersistenceConfiguration(diskPath.newFolder())) + .build(true); + + Cache cache = cacheManager.getCache("myCache", Long.class, String.class); + + cache.put(1L, "1");//put in lowest tier + cache.put(2L, "2");//put in lowest tier + cache.put(3L, "3");//put in lowest tier + + for(Long key : getKeys) { + cache.get(key); + } + + Context context = StatsUtil.createContext(managementRegistry); + + double tierHitRatio = 0; + for (int i = 0; i < statNames.size(); i++) { + tierHitRatio = StatsUtil.getExpectedValueFromRatioHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); + Assert.assertThat(tierHitRatio, is(tierExpectedValues.get(i))); + } + + double hitRatio = StatsUtil.getExpectedValueFromRatioHistory("Cache:HitRatio", context, managementRegistry, cacheExpectedValue); + Assert.assertThat(hitRatio, is(cacheExpectedValue)); + + } + finally { + if(cacheManager != null) { + cacheManager.close(); + } + } + } +} diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java index 249e8f2562..553b4751fe 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java @@ -16,13 +16,11 @@ package org.ehcache.management.providers.statistics; import static java.util.Arrays.asList; -import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.ehcache.config.units.MemoryUnit.MB; import static org.hamcrest.CoreMatchers.is; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; @@ -31,16 +29,14 @@ import org.ehcache.CacheManager; import org.ehcache.config.Builder; import org.ehcache.config.CacheConfiguration; -import org.ehcache.config.Configuration; import org.ehcache.config.ResourcePools; -import org.ehcache.core.EhcacheManager; -import org.ehcache.core.config.DefaultConfiguration; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; import org.ehcache.management.ManagementRegistryService; import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryService; -import org.ehcache.spi.service.Service; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -50,10 +46,6 @@ import org.junit.runners.Parameterized; import org.terracotta.management.model.context.Context; -/** - * - * - */ @RunWith(Parameterized.class) public class MissCountTest { @@ -76,13 +68,12 @@ public static Collection data() { { newResourcePoolsBuilder().offheap(1, MB), Arrays.asList("OffHeap:MissCount"), Arrays.asList(2L), 2L }, { newResourcePoolsBuilder().disk(1, MB), Arrays.asList("Disk:MissCount"), Arrays.asList(2L), 2L }, - //2 tier + //2 tiers { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), Arrays.asList("OnHeap:MissCount","OffHeap:MissCount"), Arrays.asList(2L,2L), 2L}, { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), Arrays.asList("OnHeap:MissCount","Disk:MissCount"), Arrays.asList(2L,2L), 2L}, //offheap and disk configuration below is not valid. Throws IllegalStateException no Store.Provider found to handle configured resource types [offheap,disk] - //{ newResourcePoolsBuilder().offheap(1, MB).disk(2, MB), Arrays.asList("OffHeap:MissCount","Disk:MissCount"), Arrays.asList(1L,2L), CACHE_HIT_TOTAL}, - //3 tier + //3 tiers { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB).disk(3, MB), Arrays.asList("OnHeap:MissCount","OffHeap:MissCount","Disk:MissCount"), Arrays.asList(2L,2L,2L), 2L} }); } @@ -96,24 +87,23 @@ public MissCountTest(Builder resources, List st @Test public void test() throws InterruptedException, IOException { - DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); - registryConfiguration.addConfiguration(new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES)); - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); - - Configuration cacheConfiguration = new DefaultConfiguration(EvictionTest.class.getClassLoader(), - new DefaultPersistenceConfiguration(diskPath.newFolder())); - - Collection services = new ArrayList(); - services.add(managementRegistry); CacheManager cacheManager = null; try { - cacheManager = new EhcacheManager(cacheConfiguration, services); - CacheConfiguration cacheConfig = newCacheConfigurationBuilder(Long.class, String.class, resources).build(); + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); + registryConfiguration.addConfiguration(new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES)); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources).build(); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("myCache", cacheConfiguration) + .using(managementRegistry) + .using(new DefaultPersistenceConfiguration(diskPath.newFolder())) + .build(true); - cacheManager.init(); - Cache cache = cacheManager.createCache("myCache", cacheConfig); + Cache cache = cacheManager.getCache("myCache", Long.class, String.class); cache.put(1L, "1");//put in lowest tier cache.put(2L, "2");//put in lowest tier @@ -126,10 +116,10 @@ public void test() throws InterruptedException, IOException { long tierMissCountSum = 0; for (int i = 0; i < statNames.size(); i++) { - tierMissCountSum += StatsUtil.getStatValue(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); + tierMissCountSum += StatsUtil.getExpectedValueFromCounterHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); } - long cacheMissCount = StatsUtil.getStatValue("Cache:MissCount", context, managementRegistry, cacheExpectedValue); + long cacheMissCount = StatsUtil.getExpectedValueFromCounterHistory("Cache:MissCount", context, managementRegistry, cacheExpectedValue); //A cache.get() checks every tier, so there is one miss per tier. However the cache miss count only counts 1 miss regardless of the number of tiers. Assert.assertThat(tierMissCountSum/statNames.size(), is(cacheMissCount)); diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java new file mode 100755 index 0000000000..9cecfe962b --- /dev/null +++ b/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java @@ -0,0 +1,168 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import static java.util.Arrays.asList; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.config.units.EntryUnit.ENTRIES; +import static org.ehcache.config.units.MemoryUnit.MB; +import static org.hamcrest.CoreMatchers.is; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.Builder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryService; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.Timeout; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.terracotta.management.model.context.Context; + +/** + * + * + */ +@RunWith(Parameterized.class) +public class MissRatioTest { + + @Rule + public final Timeout globalTimeout = Timeout.seconds(10); + + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + + private final ResourcePools resources; + private final List statNames; + private final List tierExpectedValues; + private final List getKeys; + private final Double cacheExpectedValue; + + @Parameterized.Parameters + public static Collection data() { + + List statNamesOnHeap = Arrays.asList("OnHeap:MissRatio"); + List statNamesOffHeap = Arrays.asList("OffHeap:MissRatio"); + List statNamesDisk = Arrays.asList("Disk:MissRatio"); + List statNamesOnHeapOffHeap = Arrays.asList("OnHeap:MissRatio","OffHeap:MissRatio"); + List statNamesOnHeapDisk = Arrays.asList("OnHeap:MissRatio","Disk:MissRatio"); + List statNamesThreeTiers = Arrays.asList("OnHeap:MissRatio","OffHeap:MissRatio","Disk:MissRatio"); + + return asList(new Object[][] { + //1 tier + { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(1l,2l,3l) , Arrays.asList(0d), 0d }, //0 misses, 3 hits + { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(1l,2l,4l,5l) , Arrays.asList(.5d), .5d }, //2 misses, 2 hits + { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(4l,5l) , Arrays.asList(1d), 1d }, //0 hits, 2 misses + + { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(1l,2l,3l), Arrays.asList(0d), 0d }, //0 misses, 3 hits + { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(1l,2l,4l,5l) , Arrays.asList(.5d), .5d }, //2 misses, 2 hits + { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(4l,5l) , Arrays.asList(1d), 1d }, //2 misses, 0 hits + + { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(1l,2l,3l) , Arrays.asList(0d), 0d }, //0 misses, 3 hits + { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(1l,2l,4l,5l) , Arrays.asList(.5d), .5d }, //2 misses, 2 hits + { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(4l,5l) , Arrays.asList(1d), 1d }, //2 misses, 0 hits + + //2 tiers + + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(1l,2l,3l) , Arrays.asList(1d,0d), 0d }, //3 heap misses, 0 offheap misses, 3 hits + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(1l,2l,3l,4L) , Arrays.asList(1d,.25d), .25d },//4 heap misses, 1 offheap miss, 3 hits + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(4L,5L) , Arrays.asList(1d,1d), 1d }, //2 heap misses, 2 offheap misses, 0 hits + + + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(1l,2l,3l) , Arrays.asList(1d,0d), 0d }, //3 heap misses, 0 disk misses, 3 hits + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(1l,2l,3l,4L) , Arrays.asList(1d,.25d), .25d },//4 heap misses, 1 disk miss, 3 hits + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(4L,5L) , Arrays.asList(1d,1d), 1d }, //2 heap misses, 2 disk misses, 0 hits + + //offheap and disk configuration is not valid. Throws IllegalStateException no Store.Provider found to handle configured resource types [offheap,disk] + + //3 tiers + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB).disk(3, MB), statNamesThreeTiers, Arrays.asList(1l,2l,3l,1L) , Arrays.asList(.75d,1d,0d), 0d }, //3 heap misses, 3 offheap misses, 0 disk misses, 4 hits + { newResourcePoolsBuilder().heap(1, ENTRIES).offheap(2, MB).disk(3, MB), statNamesThreeTiers, Arrays.asList(1l,2l,2L,4L), Arrays.asList(.75d,1d,1d/3d), 1d/4d},//3 heap misses, 3 offheap misses, 1 disk miss, 3 hits + }); + + } + + public MissRatioTest(Builder resources, List statNames, List getKeys, List tierExpectedValues, Double cacheExpectedValue) { + this.resources = resources.build(); + this.statNames = statNames; + this.getKeys = getKeys; + this.tierExpectedValues = tierExpectedValues; + this.cacheExpectedValue = cacheExpectedValue; + } + + @Test + public void test() throws InterruptedException, IOException { + + CacheManager cacheManager = null; + + try { + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); + registryConfiguration.addConfiguration(new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES)); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources).build(); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("myCache", cacheConfiguration) + .using(managementRegistry) + .using(new DefaultPersistenceConfiguration(diskPath.newFolder())) + .build(true); + + Cache cache = cacheManager.getCache("myCache", Long.class, String.class); + + cache.put(1L, "1");//put in lowest tier + cache.put(2L, "2");//put in lowest tier + cache.put(3L, "3");//put in lowest tier + + for(Long key : getKeys) { + cache.get(key); + } + + Context context = StatsUtil.createContext(managementRegistry); + + double tierMissRatio = 0; + for (int i = 0; i < statNames.size(); i++) { + tierMissRatio = StatsUtil.getExpectedValueFromRatioHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); + Assert.assertThat(tierMissRatio, is(tierExpectedValues.get(i))); + } + + double hitRatio = StatsUtil.getExpectedValueFromRatioHistory("Cache:MissRatio", context, managementRegistry, cacheExpectedValue); + Assert.assertThat(hitRatio, is(cacheExpectedValue)); + + } + finally { + if(cacheManager != null) { + cacheManager.close(); + } + } + } + +} diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java index cb9485f800..d69149ae6e 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java @@ -35,8 +35,6 @@ import org.terracotta.management.model.context.Context; import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.management.model.stats.history.CounterHistory; -import org.terracotta.management.model.stats.history.RatioHistory; -import org.terracotta.management.model.stats.history.SizeHistory; import static org.junit.Assert.assertThat; @@ -48,175 +46,6 @@ public class StandardEhcacheStatisticsTest { private final EhcacheStatisticsProviderConfiguration EHCACHE_STATS_CONFIG = new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES); - @Test - public void statsCacheMissTest() throws InterruptedException { - DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager1"); - registryConfiguration.addConfiguration(EHCACHE_STATS_CONFIG); - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); - - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder( - Long.class, String.class, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(1, MemoryUnit.MB).offheap(10, MemoryUnit.MB)) - .build(); - - CacheManager cacheManager = null; - - try { - cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("aCache", cacheConfiguration) - .using(managementRegistry) - .build(true); - - Cache cache = cacheManager.getCache("aCache", Long.class, String.class); - cache.put(1L, "one"); - - cache.get(1L);//HIT - cache.get(1L);//HIT - cache.get(2L);//MISS - cache.get(3L);//MISS - - Thread.sleep(1000); - - Context context = StatsUtil.createContext(managementRegistry); - - ContextualStatistics missCounter = managementRegistry.withCapability("StatisticsCapability") - .queryStatistics(Arrays.asList(/*"Cache:MissRate",*/ "Cache:MissCount", "Cache:MissRatio"))//TODO add Cache:MissRate once understand how to verify correct - .on(context) - .build() - .execute() - .getSingleResult(); - - assertThat(missCounter.size(), Matchers.is(2)); - - CounterHistory missCountCounterHistory = missCounter.getStatistic(CounterHistory.class, "Cache:MissCount"); - while(!StatsUtil.isHistoryReady(missCountCounterHistory, 0L)) {} - int mostRecentIndex = missCountCounterHistory.getValue().length - 1; - assertThat(missCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(2L)); - - RatioHistory ratioHistory = missCounter.getStatistic(RatioHistory.class, "Cache:MissRatio"); - mostRecentIndex = ratioHistory.getValue().length - 1; - // 2 hits, 2 misses -> HitRatio is 0.5 - assertThat(ratioHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(0.5d)); - } - finally { - if(cacheManager != null) { - cacheManager.close(); - } - } - } - - @Test - public void statsCacheHitTest() throws InterruptedException { - DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager2"); - registryConfiguration.addConfiguration(EHCACHE_STATS_CONFIG); - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); - - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, - ResourcePoolsBuilder.newResourcePoolsBuilder().heap(1, MemoryUnit.MB).offheap(10, MemoryUnit.MB)) - .build(); - - CacheManager cacheManager = null; - - try { - cacheManager = CacheManagerBuilder.newCacheManagerBuilder() - .withCache("bCache", cacheConfiguration) - .using(managementRegistry) - .build(true); - - Cache cache = cacheManager.getCache("bCache", Long.class, String.class); - cache.put(1L, "1"); - cache.put(2L, "2"); - cache.put(3L, "3"); - - Thread.sleep(1000); - - Context context = StatsUtil.createContext(managementRegistry); - - ContextualStatistics contextualStatistics = managementRegistry.withCapability("StatisticsCapability") - .queryStatistics(Arrays.asList(/*"Cache:HitRate",*/ "Cache:HitCount", "Cache:HitRatio"))//TODO add Cache:HitRate once understand how to verify correct - .on(context) - .build() - .execute() - .getSingleResult(); - - assertThat(contextualStatistics.size(), Matchers.is(2)); - - /////////////////////// - // NO HITS, NO MISSES// - /////////////////////// - - CounterHistory hitCountCounterHistory = contextualStatistics.getStatistic(CounterHistory.class, "Cache:HitCount"); - int mostRecentIndex = hitCountCounterHistory.getValue().length - 1; - assertThat(hitCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(0L)); - - RatioHistory ratioHistory = contextualStatistics.getStatistic(RatioHistory.class, "Cache:HitRatio"); - mostRecentIndex = ratioHistory.getValue().length - 1; - // no hits, no misses -> HitRatio is NaN - assertThat(ratioHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(Double.NaN)); - - /////////////////////// - // 3 HITS, NO MISSES // - /////////////////////// - - cache.get(1L);//HIT - cache.get(2L);//HIT - cache.get(3L);//HIT - - Thread.sleep(1000); - - contextualStatistics = managementRegistry.withCapability("StatisticsCapability") - .queryStatistics(Arrays.asList("Cache:HitCount", "Cache:HitRatio")) - .on(context) - .build() - .execute() - .getSingleResult(); - - hitCountCounterHistory = contextualStatistics.getStatistic(CounterHistory.class, "Cache:HitCount"); - while(!StatsUtil.isHistoryReady(hitCountCounterHistory, 0L)) {} - mostRecentIndex = hitCountCounterHistory.getValue().length - 1; - assertThat(hitCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(3L)); - - ratioHistory = contextualStatistics.getStatistic(RatioHistory.class, "Cache:HitRatio"); - mostRecentIndex = ratioHistory.getValue().length - 1; - // 3 hits, no misses -> HitRatio is 1 - assertThat(ratioHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(1.0)); - - /////////////////////// - // 3 HITS, 1 MISS // - /////////////////////// - - cache.get(4L);//MISS - - Thread.sleep(1000); - - contextualStatistics = managementRegistry.withCapability("StatisticsCapability") - .queryStatistics(Arrays.asList("Cache:MissCount", "Cache:HitCount", "Cache:HitRatio")) - .on(context) - .build() - .execute() - .getSingleResult(); - - CounterHistory missCountCounterHistory = contextualStatistics.getStatistic(CounterHistory.class, "Cache:MissCount"); - mostRecentIndex = missCountCounterHistory.getValue().length - 1; - while(!StatsUtil.isHistoryReady(missCountCounterHistory, 0L)) {} - assertThat(missCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(1L)); - - ratioHistory = contextualStatistics.getStatistic(RatioHistory.class, "Cache:HitRatio"); - mostRecentIndex = ratioHistory.getValue().length - 1; - // 3 hits, 1 misses -> HitRatio is 0.75 - assertThat(ratioHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(0.75)); - - hitCountCounterHistory = contextualStatistics.getStatistic(CounterHistory.class, "Cache:HitCount"); - mostRecentIndex = hitCountCounterHistory.getValue().length - 1; - - assertThat(hitCountCounterHistory.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(3L)); - } - finally { - if(cacheManager != null) { - cacheManager.close(); - } - } - } - @Test public void statsClearCacheTest() throws InterruptedException { CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java b/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java index 0667db5258..7005e16578 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java @@ -24,6 +24,7 @@ import org.terracotta.management.model.stats.AbstractStatisticHistory; import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.management.model.stats.history.CounterHistory; +import org.terracotta.management.model.stats.history.RatioHistory; import org.terracotta.management.registry.ResultSet; import org.terracotta.management.registry.StatisticQuery; @@ -76,7 +77,12 @@ public static Context createContext(ManagementRegistryService managementRegistry .with(firstCacheCtx.getName(), firstCacheCtx.getValue()); } - public static long getStatValue(String statName, Context context, ManagementRegistryService managementRegistry, long expectedResult) { + /* + NOTE: When using this method in other unit tests, make sure to declare a timeout as it is possible to get an infinite loop. + This should only occur if the stats value is different from your expectedResult, which may happen if the stats calculations + change, the stats value isn't accessible or if you enter the wrong expectedResult. + */ + public static long getExpectedValueFromCounterHistory(String statName, Context context, ManagementRegistryService managementRegistry, long expectedResult) { StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") .queryStatistics(Arrays.asList(statName)) @@ -89,15 +95,50 @@ public static long getStatValue(String statName, Context context, ManagementRegi ContextualStatistics statisticsContext = counters.getResult(context); - Assert.assertThat(counters.size(), Matchers.is(1)); + Assert.assertThat(counters.size(), Matchers.is(1)); - CounterHistory counterHistory = statisticsContext.getStatistic(CounterHistory.class, statName); + CounterHistory counterHistory = statisticsContext.getStatistic(CounterHistory.class, statName); - if (counterHistory.getValue().length > 0) { - int mostRecentIndex = counterHistory.getValue().length - 1; - value = counterHistory.getValue()[mostRecentIndex].getValue(); - System.out.println("statName: " + statName + " value: " + value + " expectedResult: " + expectedResult); - } + if (counterHistory.getValue().length > 0) { + int mostRecentIndex = counterHistory.getValue().length - 1; + value = counterHistory.getValue()[mostRecentIndex].getValue(); + System.out.println("statName: " + statName + " value: " + value + " expectedResult: " + expectedResult); + } + + }while(value != expectedResult); + + Assert.assertThat(value, Matchers.is(expectedResult)); + + return value; + } + + /* + NOTE: When using this method in other unit tests, make sure to declare a timeout as it is possible to get an infinite loop. + This should only occur if the stats value is different from your expectedResult, which may happen if the stats calculations + change, the stats value isn't accessible or if you enter the wrong expectedResult. + */ + public static double getExpectedValueFromRatioHistory(String statName, Context context, ManagementRegistryService managementRegistry, double expectedResult) { + + StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(Arrays.asList(statName)) + .on(context) + .build(); + + double value = 0; + do { + ResultSet counters = query.execute(); + + ContextualStatistics statisticsContext = counters.getResult(context); + + Assert.assertThat(counters.size(), Matchers.is(1)); + + RatioHistory ratioHistory = statisticsContext.getStatistic(RatioHistory.class, statName); + + if (ratioHistory.getValue().length > 0) { + int mostRecentIndex = ratioHistory.getValue().length - 1; + value = ratioHistory.getValue()[mostRecentIndex].getValue(); + System.out.println("statName: " + statName + " value: " + value + " expectedResult: " + expectedResult); + } }while(value != expectedResult); From c18727f356dc02c836c6e6d085917f0197b6e8d2 Mon Sep 17 00:00:00 2001 From: Albin Suresh Date: Thu, 3 Nov 2016 17:05:26 +0530 Subject: [PATCH 110/218] Closes #1580 Active crashes on sync when the cache manager is not configured with a default server resource --- .../clustered/server/EhcacheActiveEntity.java | 8 ++++++-- .../server/EhcacheActiveEntityTest.java | 19 +++++++++++++++++++ 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 8ffca46d91..229492049d 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -340,8 +340,12 @@ private void addInflightInvalidationsForStrongCache(ClientDescriptor clientDescr public void synchronizeKeyToPassive(PassiveSynchronizationChannel syncChannel, int concurrencyKey) { LOGGER.info("Sync started for concurrency key {}.", concurrencyKey); if (concurrencyKey == DEFAULT_KEY) { - ServerSideConfiguration configuration = - new ServerSideConfiguration(ehcacheStateService.getDefaultServerResource(), ehcacheStateService.getSharedResourcePools()); + ServerSideConfiguration configuration; + if (ehcacheStateService.getDefaultServerResource() == null) { + configuration = new ServerSideConfiguration(ehcacheStateService.getSharedResourcePools()); + } else { + configuration = new ServerSideConfiguration(ehcacheStateService.getDefaultServerResource(), ehcacheStateService.getSharedResourcePools()); + } Map storeConfigs = new HashMap<>(); for (String storeName : ehcacheStateService.getStores()) { diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index e93f23193c..f88e71a942 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -2652,6 +2652,25 @@ public void testSyncToPassive() throws Exception { } + @Test + public void testSyncToPassiveWithoutDefaultServerResource() throws Exception { + final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); + registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); + registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); + + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); + ClientDescriptor client = new TestClientDescriptor(); + activeEntity.connected(client); + + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder().build(); + + activeEntity.invoke(client, + MESSAGE_FACTORY.configureStoreManager(serverSideConfiguration)); + + PassiveSynchronizationChannel syncChannel = mock(PassiveSynchronizationChannel.class); + activeEntity.synchronizeKeyToPassive(syncChannel, 1); + } + @Test public void testLoadExistingRecoversInflightInvalidationsForEventualCache() { final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); From 090790c3086cf3f6c3bacb15c2633f6c308fbf8e Mon Sep 17 00:00:00 2001 From: Albin Suresh Date: Mon, 7 Nov 2016 12:10:05 +0530 Subject: [PATCH 111/218] Closes #1577 Align the concurrency keys of server store operation messages and replicated messages --- .../messages/PassiveReplicationMessage.java | 2 +- .../server/EhcacheActiveEntityTest.java | 41 +++++++++++++++++++ 2 files changed, 42 insertions(+), 1 deletion(-) diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java index ad2da535a1..cb20bb3b25 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java @@ -132,7 +132,7 @@ public ReplicationOp operation() { @Override public long concurrencyKey() { - return (this.cacheId.hashCode() + key); + return key; } } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index f88e71a942..bbfde482cc 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -30,6 +30,7 @@ import org.ehcache.clustered.common.internal.exceptions.ResourceBusyException; import org.ehcache.clustered.common.internal.exceptions.ResourceConfigurationException; import org.ehcache.clustered.common.internal.exceptions.ServerMisconfigurationException; +import org.ehcache.clustered.common.internal.messages.ConcurrentEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.Failure; @@ -39,6 +40,7 @@ import org.ehcache.clustered.common.internal.messages.LifecycleMessage.CreateServerStore; import org.ehcache.clustered.common.internal.messages.LifecycleMessage.DestroyServerStore; import org.ehcache.clustered.common.internal.messages.LifecycleMessage.ValidateStoreManager; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage; import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ServerStoreLifeCycleReplicationMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; @@ -2897,6 +2899,45 @@ public void testPromotedActiveIgnoresDuplicateMessages() throws MessageCodecExce verify(entityMessenger, times(0)).messageSelfAndDeferRetirement(any(), any()); } + @Test + public void testReplicationMessageAndOriginalServerStoreOpMessageHasSameConcurrency() throws MessageCodecException { + + final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); + registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); + + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); + + IEntityMessenger entityMessenger = registry.getEntityMessenger(); + + ClientDescriptor client = new TestClientDescriptor(); + activeEntity.connected(client); + + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() + .defaultResource("serverResource1") + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .build(); + activeEntity.invoke(client, MESSAGE_FACTORY.configureStoreManager(serverSideConfiguration)); + activeEntity.invoke(client, MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)); + + ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfigBuilder() + .shared("primary") + .build(); + activeEntity.invoke(client, MESSAGE_FACTORY.createServerStore("testCache", serverStoreConfiguration)); + + reset(entityMessenger); + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("testCache", CLIENT_ID); + EhcacheEntityMessage getAndAppend = messageFactory.getAndAppendOperation(1L, createPayload(1L)); + activeEntity.invoke(client, getAndAppend); + + ArgumentCaptor captor = ArgumentCaptor.forClass(PassiveReplicationMessage.ChainReplicationMessage.class); + verify(entityMessenger).messageSelfAndDeferRetirement(any(), captor.capture()); + PassiveReplicationMessage.ChainReplicationMessage replicatedMessage = captor.getValue(); + + assertThat(replicatedMessage.concurrencyKey(), is(((ConcurrentEntityMessage) getAndAppend).concurrencyKey())); + } + + + private void assertSuccess(EhcacheEntityResponse response) throws Exception { if (!response.equals(EhcacheEntityResponse.Success.INSTANCE)) { throw ((Failure) response).getCause(); From 1f06ccbd0351fbc7a8355bf789520e3d0e608ab4 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Thu, 3 Nov 2016 11:58:42 +0000 Subject: [PATCH 112/218] :memo: Fix #1566 Replaced obsolete use of fixed with dedicated --- docs/src/docs/asciidoc/user/clustered-cache.adoc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/src/docs/asciidoc/user/clustered-cache.adoc b/docs/src/docs/asciidoc/user/clustered-cache.adoc index 8360c4c0cd..14e6d32695 100644 --- a/docs/src/docs/asciidoc/user/clustered-cache.adoc +++ b/docs/src/docs/asciidoc/user/clustered-cache.adoc @@ -134,9 +134,9 @@ include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/clie Since the server resource identifier is not explicitly passed, this resource pool will be consumed out of the default server resource provided in Step 3. This demonstrates that a cache manager with clustering support can have multiple resource pools created out of several server off-heap resources. <4> Provide the cache configuration to be created. -<5> `ClusteredResourcePoolBuilder.fixed(String , long , MemoryUnit)` allocates a fixed pool of storage to the cache from the specified server off-heap resource. - In this example, a fixed pool of 32MB is allocated for `clustered-cache` from `primary-server-resource`. -<6> `ClusteredResourcePoolBuilder.shared(String)`, passing the name of the resource pool specifies that `shared-cache-1` shares the storage resources with other caches using the same resource pool (`resource-pool-a`). +<5> `ClusteredResourcePoolBuilder.clusteredDedicated(String , long , MemoryUnit)` allocates a dedicated pool of storage to the cache from the specified server off-heap resource. + In this example, a dedicated pool of 32MB is allocated for `clustered-cache` from `primary-server-resource`. +<6> `ClusteredResourcePoolBuilder.clusteredShared(String)`, passing the name of the resource pool specifies that `shared-cache-1` shares the storage resources with other caches using the same resource pool (`resource-pool-a`). <7> Configures another cache (`shared-cache-2`) that shares the resource pool (`resource-pool-a`) with `shared-cache-1`. <8> Creates a fully initialized cache manager with the clustered caches. @@ -166,7 +166,7 @@ include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/clie ---- <1> Configuring the heap tier for cache. -<2> Configuring the clustered tier of fixed size from the server off-heap resource using `ClusteredResourcePoolBuilder`. +<2> Configuring the clustered tier of dedicated size from the server off-heap resource using `ClusteredResourcePoolBuilder`. The equivalent XML configuration is as follows: From 3b1ad7fb87eac943e2fa8ab2582e979cdc13eceb Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Thu, 3 Nov 2016 14:05:18 +0000 Subject: [PATCH 113/218] :memo: Fix #1564 CacheManager level configuration for JCache * Fixed return types in methods of EhcacheCachingProvider as they referenced a non public type. --- .../jsr107/EhcacheCachingProvider.java | 4 +-- ...che107ConfigurationIntegrationDocTest.java | 27 +++++++++++++++++++ docs/src/docs/asciidoc/user/107.adoc | 16 +++++++++++ 3 files changed, 45 insertions(+), 2 deletions(-) diff --git a/107/src/main/java/org/ehcache/jsr107/EhcacheCachingProvider.java b/107/src/main/java/org/ehcache/jsr107/EhcacheCachingProvider.java index f9b679424e..0a416d4f2b 100644 --- a/107/src/main/java/org/ehcache/jsr107/EhcacheCachingProvider.java +++ b/107/src/main/java/org/ehcache/jsr107/EhcacheCachingProvider.java @@ -98,7 +98,7 @@ public CacheManager getCacheManager(URI uri, ClassLoader classLoader, Properties * * @return a cache manager */ - public Eh107CacheManager getCacheManager(URI uri, Configuration config) { + public CacheManager getCacheManager(URI uri, Configuration config) { return getCacheManager(uri, config, new Properties()); } @@ -112,7 +112,7 @@ public Eh107CacheManager getCacheManager(URI uri, Configuration config) { * * @return a cache manager */ - public Eh107CacheManager getCacheManager(URI uri, Configuration config, Properties properties) { + public CacheManager getCacheManager(URI uri, Configuration config, Properties properties) { Eh107CacheManager cacheManager; ConcurrentMap byURI; ClassLoader classLoader = config.getClassLoader(); diff --git a/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java b/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java index bc4ef947b8..5d087bda1c 100644 --- a/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java +++ b/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java @@ -21,17 +21,23 @@ import org.ehcache.config.CacheRuntimeConfiguration; import org.ehcache.config.ResourceType; import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.core.config.DefaultConfiguration; import org.ehcache.core.internal.util.ValueSuppliers; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; import org.ehcache.jsr107.Eh107Configuration; +import org.ehcache.jsr107.EhcacheCachingProvider; import org.junit.After; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.TemporaryFolder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pany.domain.Client; import com.pany.domain.Product; +import java.io.File; import java.util.Random; import java.util.concurrent.TimeUnit; @@ -66,6 +72,8 @@ public class EhCache107ConfigurationIntegrationDocTest { private CacheManager cacheManager; private CachingProvider cachingProvider; + @Rule + public TemporaryFolder tempFolder = new TemporaryFolder(); @Before public void setUp() throws Exception { @@ -267,4 +275,23 @@ public void testCacheThroughAtomicsXMLValid() throws Exception { getClass().getResource("/org/ehcache/docs/ehcache-jsr107-cache-through.xml").toURI(), getClass().getClassLoader()); } + + @Test + public void testCacheManagerLevelConfiguration() throws Exception { + // tag::ehcacheCacheManagerConfigurationExample[] + CachingProvider cachingProvider = Caching.getCachingProvider(); + EhcacheCachingProvider ehcacheProvider = (EhcacheCachingProvider) cachingProvider; // <1> + + DefaultConfiguration configuration = new DefaultConfiguration(ehcacheProvider.getDefaultClassLoader(), + new DefaultPersistenceConfiguration(getPersistenceDirectory())); // <2> + + CacheManager cacheManager = ehcacheProvider.getCacheManager(ehcacheProvider.getDefaultURI(), configuration); // <3> + // end::ehcacheCacheManagerConfigurationExample[] + + assertThat(cacheManager, notNullValue()); + } + + private File getPersistenceDirectory() { + return tempFolder.getRoot(); + } } diff --git a/docs/src/docs/asciidoc/user/107.adoc b/docs/src/docs/asciidoc/user/107.adoc index 7b444301ef..29eb2a75ed 100644 --- a/docs/src/docs/asciidoc/user/107.adoc +++ b/docs/src/docs/asciidoc/user/107.adoc @@ -80,6 +80,22 @@ include::{sourcedir31}/107/src/test/java/org/ehcache/docs/EhCache107Configuratio === Building the configuration using Ehcache APIs +==== CacheManager level configuration + +If you need to configure features at the `CacheManager` level, like persistence directory, you will have to use provider specific APIs. + +The way you do this is as follows: + +[source,java,indent=0] +---- +include::{sourcedir31}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=ehcacheCacheManagerConfigurationExample] +---- +<1> Cast the `CachingProvider` into the Ehcache specific implementation `org.ehcache.jsr107.EhcacheCachingProvider`, +<2> Create a configuration using the specific Ehcache `DefaultConfiguration` and pass it some `CacheManager` level configurations, +<3> Create the `CacheManager` using the method that takes an Ehcache configuration in parameter. + +==== Cache level configuration + You can also create a JSR-107 `Cache` using an Ehcache `CacheConfiguration`. When using this mechanism, no JSR-107 `CompleteConfiguration` is used and so you cannot get to one. From d5649714d9f97e0b3f78eb4866b298fd919e7411 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Thu, 3 Nov 2016 12:13:28 +0000 Subject: [PATCH 114/218] :memo: Fix #1549 Document system property replacement for XML --- docs/src/docs/asciidoc/user/xml.adoc | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/docs/src/docs/asciidoc/user/xml.adoc b/docs/src/docs/asciidoc/user/xml.adoc index 2d2956cd36..6c00ad7629 100644 --- a/docs/src/docs/asciidoc/user/xml.adoc +++ b/docs/src/docs/asciidoc/user/xml.adoc @@ -80,6 +80,25 @@ A `` element may contain all the same child elements as a `> to inspire you. +== Property replacement in XML configuration files + +Java system properties can be referenced inside XML configuration files. +The property value will replace the property reference during the configuration parsing. + +This is done by using the `${prop.name}` syntax. +It is supported in all attributes and elements values that accept the `${}` characters as legal characters. +This currently rules out all numbers, mostly used in sizing things, and identifiers, such as cache and template names. + +WARNING: If the system property does not exist, this will make the configuration parsing fail. + +A classical use case for this feature is for disk files location inside the `directory` attribute of the `persistence` tag: + +[source,xml] +---- + +---- +<1> Here `user.home` will be replaced by the value of the system property, something like `/home/user` + == XML programmatic parsing NOTE: If you are obtaining your `CacheManager` through the JSR-107 API, what follows is done automatically From 075c8b6d675bd9d6f9d8d6ecc4591f042dcaf8db Mon Sep 17 00:00:00 2001 From: Albin Suresh Date: Mon, 7 Nov 2016 17:43:57 +0530 Subject: [PATCH 115/218] Closes #1590 Create eventual client invalidation trackers during lifecycle sync --- .../org/ehcache/clustered/server/EhcachePassiveEntity.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index 71a0d65846..1526721a06 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -148,7 +148,8 @@ private void invokeRetirementMessages(PassiveReplicationMessage message) throws } } - private void untrackHashInvalidationForEventualCache(InvalidationCompleteMessage message) {InvalidationCompleteMessage invalidationCompleteMessage = message; + private void untrackHashInvalidationForEventualCache(InvalidationCompleteMessage message) { + InvalidationCompleteMessage invalidationCompleteMessage = message; ehcacheStateService.getInvalidationTracker(invalidationCompleteMessage.getCacheId()).getInvalidationMap().computeIfPresent(invalidationCompleteMessage.getKey(), (key, count) -> { if (count == 1) { return null; @@ -226,8 +227,12 @@ private void invokeSyncOperation(EntitySyncMessage message) throws ClusterExcept ehcacheStateService.configure(stateSyncMessage.getConfiguration()); management.sharedPoolsConfigured(); + for (Map.Entry entry : stateSyncMessage.getStoreConfigs().entrySet()) { ehcacheStateService.createStore(entry.getKey(), entry.getValue()); + if(entry.getValue().getConsistency() == Consistency.EVENTUAL) { + ehcacheStateService.addInvalidationtracker(entry.getKey()); + } management.serverStoreCreated(entry.getKey()); } stateSyncMessage.getTrackedClients().stream().forEach(id -> ehcacheStateService.getClientMessageTracker().add(id)); From b074b6e33a049b8005770ed8249d99eaabe2b831 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Mon, 7 Nov 2016 14:18:31 +0100 Subject: [PATCH 116/218] :memo: Documentation setup changes for Ehcache 3.2 * Replace sourcedir31 with sourcedir32 for website generation * Bump a few 3.1 to 3.2 where needed --- .../clustered-dist/src/assemble/README.txt | 6 ++--- docs/src/docs/asciidoc/user/107.adoc | 24 ++++++++--------- .../asciidoc/user/cache-event-listeners.adoc | 10 +++---- .../docs/asciidoc/user/caching-concepts.adoc | 4 +-- .../docs/asciidoc/user/caching-patterns.adoc | 4 +-- .../src/docs/asciidoc/user/caching-terms.adoc | 4 +-- .../docs/asciidoc/user/clustered-cache.adoc | 22 ++++++++-------- docs/src/docs/asciidoc/user/common.adoc | 6 ++--- .../docs/asciidoc/user/eviction-advisor.adoc | 6 ++--- docs/src/docs/asciidoc/user/examples.adoc | 6 ++--- docs/src/docs/asciidoc/user/expiry.adoc | 14 +++++----- .../docs/asciidoc/user/getting-started.adoc | 26 +++++++++---------- docs/src/docs/asciidoc/user/index.adoc | 8 +++--- docs/src/docs/asciidoc/user/management.adoc | 12 ++++----- .../asciidoc/user/serializers-copiers.adoc | 8 +++--- docs/src/docs/asciidoc/user/thread-pools.adoc | 14 +++++----- docs/src/docs/asciidoc/user/usermanaged.adoc | 14 +++++----- docs/src/docs/asciidoc/user/writers.adoc | 8 +++--- docs/src/docs/asciidoc/user/xa.adoc | 20 +++++++------- docs/src/docs/asciidoc/user/xml.adoc | 10 +++---- docs/src/docs/asciidoc/user/xsds.adoc | 12 ++++----- 21 files changed, 119 insertions(+), 119 deletions(-) diff --git a/clustered/clustered-dist/src/assemble/README.txt b/clustered/clustered-dist/src/assemble/README.txt index 4ab05faa32..9be3256669 100644 --- a/clustered/clustered-dist/src/assemble/README.txt +++ b/clustered/clustered-dist/src/assemble/README.txt @@ -1,11 +1,11 @@ -Ehcache 3.1 +Ehcache 3.2 ========================= -Welcome to version 3.1 of Ehcache with Terracotta-based distributed caching. +Welcome to version 3.2 of Ehcache with Terracotta-based distributed caching. Contents --------------- - Included in this kit are the following: + Included in this kit are the following: README.txt -- This file server -- Directory containing libraries, executables, and other supporting files for the Terracotta Server └─plugins -- Directory containing libraries for the applications installed in the server diff --git a/docs/src/docs/asciidoc/user/107.adoc b/docs/src/docs/asciidoc/user/107.adoc index 29eb2a75ed..0653afeb9d 100644 --- a/docs/src/docs/asciidoc/user/107.adoc +++ b/docs/src/docs/asciidoc/user/107.adoc @@ -1,9 +1,9 @@ --- --- = The Ehcache 3.x JSR-107 Provider -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -41,7 +41,7 @@ Here is some sample code that demonstrates the usage of the basic JCache configu [source,java] ---- -include::{sourcedir31}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=basicConfigurationExample] +include::{sourcedir32}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=basicConfigurationExample] ---- <1> Retrieves the default `CachingProvider` implementation from the application's classpath. This method will work if and only if there is only one JCache implementation jar in the classpath. @@ -71,7 +71,7 @@ you can still get to the underlying Ehcache `RuntimeCacheConfiguration`: [source,java,indent=0] ---- -include::{sourcedir31}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=mutableConfigurationExample] +include::{sourcedir32}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=mutableConfigurationExample] ---- <1> Create a JSR-107 cache using the `MutableConfiguration` from the specification <2> Get to the JSR-107 `CompleteConfiguration` @@ -88,7 +88,7 @@ The way you do this is as follows: [source,java,indent=0] ---- -include::{sourcedir31}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=ehcacheCacheManagerConfigurationExample] +include::{sourcedir32}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=ehcacheCacheManagerConfigurationExample] ---- <1> Cast the `CachingProvider` into the Ehcache specific implementation `org.ehcache.jsr107.EhcacheCachingProvider`, <2> Create a configuration using the specific Ehcache `DefaultConfiguration` and pass it some `CacheManager` level configurations, @@ -101,7 +101,7 @@ When using this mechanism, no JSR-107 `CompleteConfiguration` is used and so you [source,java,indent=0] ---- -include::{sourcedir31}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=ehcacheBasedConfigurationExample] +include::{sourcedir32}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=ehcacheBasedConfigurationExample] ---- <1> Create an Ehcache `CacheConfiguration` - through a builder as shown here or alternatively use an XML configuration (as described in the following section). <2> Use the configuration with JSR-107 API by wrapping it @@ -118,12 +118,12 @@ Find below the XML configuration followed by the code to use it from JSR-107: [source,xml,indent=0] ---- -include::{sourcedir31}/107/src/test/resources/org/ehcache/docs/ehcache-jsr107-config.xml[] +include::{sourcedir32}/107/src/test/resources/org/ehcache/docs/ehcache-jsr107-config.xml[] ---- [source,java,indent=0] ---- -include::{sourcedir31}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=jsr107UsingXMLConfigExample] +include::{sourcedir32}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=jsr107UsingXMLConfigExample] ---- <1> Invoking `javax.cache.spi.CachingProvider.getCacheManager(java.net.URI, java.lang.ClassLoader)` <2> and passing in a URI that resolves to an Ehcache XLM configuration file. @@ -147,7 +147,7 @@ You can do this at two different levels: [source,xml,indent=0] ---- -include::{sourcedir31}/107/src/test/resources/org/ehcache/docs/ehcache-107-mbeans-cache-manager-config.xml[lines=17..-1] +include::{sourcedir32}/107/src/test/resources/org/ehcache/docs/ehcache-107-mbeans-cache-manager-config.xml[lines=17..-1] ---- <1> Using the JSR-107 service extension, you can enable MBeans by default @@ -169,7 +169,7 @@ constraint. All that's needed is adding a `jsr107` service in your XML configura [source,xml,indent=0] ---- -include::{sourcedir31}/107/src/test/resources/org/ehcache/docs/ehcache-jsr107-template-override.xml[] +include::{sourcedir32}/107/src/test/resources/org/ehcache/docs/ehcache-jsr107-template-override.xml[] ---- <1> First, declare a namespace for the 107 extension, e.g. `jsr107` <2> Within a `service` element at the top of your configuration, add a `jsr107:defaults` element @@ -188,7 +188,7 @@ without modifying the application code. [source,java,indent=0] ---- -include::{sourcedir31}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=jsr107SupplementWithTemplatesExample] +include::{sourcedir32}/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java[tag=jsr107SupplementWithTemplatesExample] ---- <1> Assume existing JSR-107 configuration code, which is store-by-value by default <2> that creates JSR-107 `Cache` @@ -253,5 +253,5 @@ If you need _Ehcache through JCache_ behaviour, the following shows the relevant [source,xml,indent=0] ---- -include::{sourcedir31}/107/src/test/resources/org/ehcache/docs/ehcache-jsr107-cache-through.xml[tag=cacheThroughCAS] +include::{sourcedir32}/107/src/test/resources/org/ehcache/docs/ehcache-jsr107-cache-through.xml[tag=cacheThroughCAS] ---- diff --git a/docs/src/docs/asciidoc/user/cache-event-listeners.adoc b/docs/src/docs/asciidoc/user/cache-event-listeners.adoc index 02faf6a0ac..235b0e1c34 100644 --- a/docs/src/docs/asciidoc/user/cache-event-listeners.adoc +++ b/docs/src/docs/asciidoc/user/cache-event-listeners.adoc @@ -1,9 +1,9 @@ --- --- = Cache Event Listeners -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -20,7 +20,7 @@ registered with. [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=cacheEventListener] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=cacheEventListener] ---- <1> Create a `CacheEventListenerConfiguration` using the builder indicating the listener and the events to receive (in this case create and update events) @@ -59,7 +59,7 @@ Cache event listeners may also be added and removed while the cache is being use [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=registerListenerAtRuntime] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=registerListenerAtRuntime] ---- <1> Create a `CacheEventListener` implementation instance. @@ -75,5 +75,5 @@ Advanced users may want to tune the level of concurrency which may be used for d [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=configuringEventProcessingQueues] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=configuringEventProcessingQueues] ---- diff --git a/docs/src/docs/asciidoc/user/caching-concepts.adoc b/docs/src/docs/asciidoc/user/caching-concepts.adoc index 34b64fcb57..ac068cc1b1 100644 --- a/docs/src/docs/asciidoc/user/caching-concepts.adoc +++ b/docs/src/docs/asciidoc/user/caching-concepts.adoc @@ -1,9 +1,9 @@ --- --- = Concepts Related to Caching -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] diff --git a/docs/src/docs/asciidoc/user/caching-patterns.adoc b/docs/src/docs/asciidoc/user/caching-patterns.adoc index 6f1801f06c..9f39cb1641 100644 --- a/docs/src/docs/asciidoc/user/caching-patterns.adoc +++ b/docs/src/docs/asciidoc/user/caching-patterns.adoc @@ -1,9 +1,9 @@ --- --- = Cache Usage Patterns -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] diff --git a/docs/src/docs/asciidoc/user/caching-terms.adoc b/docs/src/docs/asciidoc/user/caching-terms.adoc index 6eefba7e3f..dae27e66a0 100644 --- a/docs/src/docs/asciidoc/user/caching-terms.adoc +++ b/docs/src/docs/asciidoc/user/caching-terms.adoc @@ -1,9 +1,9 @@ --- --- = Terms Related to Caching -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] diff --git a/docs/src/docs/asciidoc/user/clustered-cache.adoc b/docs/src/docs/asciidoc/user/clustered-cache.adoc index 14e6d32695..60ecb57d42 100644 --- a/docs/src/docs/asciidoc/user/clustered-cache.adoc +++ b/docs/src/docs/asciidoc/user/clustered-cache.adoc @@ -1,9 +1,9 @@ --- --- = Clustered Cache -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -66,7 +66,7 @@ Detailed instructions on how to configure and start a Terracotta server array ca [source,xml] ---- -include::{sourcedir31}/clustered/client/src/test/resources/configs/docs/tc-config.xml[] +include::{sourcedir32}/clustered/client/src/test/resources/configs/docs/tc-config.xml[] ---- The above configuration defines two named _server off-heap resources_: @@ -106,7 +106,7 @@ Here is a code sample that shows how to configure a cache manager with clusterin [source,java,indent=0] ---- -include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerExample] +include::{sourcedir32}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerExample] ---- <1> Returns the `org.ehcache.config.builders.CacheManagerBuilder` instance; @@ -123,7 +123,7 @@ This code sample demonstrates the usage of the concepts explained in the previou [source,java,indent=0] ---- -include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerWithServerSideConfigExample] +include::{sourcedir32}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerWithServerSideConfigExample] ---- <1> `defaultServerResource(String)` on `ClusteringServiceConfigurationBuilder` instance sets the default server off-heap resource for the cache manager. @@ -145,7 +145,7 @@ include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/clie When configuring a cache manager to connect to a clustered tier manager there are three possible connection modes: [source,java,indent=0] ---- -include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerLifecycle] +include::{sourcedir32}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerLifecycle] ---- <1> In auto-create mode if no clustered tier manager exists then one is created with the supplied configuration. If it exists and its configuration matches the supplied configuration then a connection is established. @@ -162,7 +162,7 @@ If it does not exist then the cache manager will fail to initialize. [source,java,indent=0] ---- -include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheTieredExample] +include::{sourcedir32}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheTieredExample] ---- <1> Configuring the heap tier for cache. @@ -172,7 +172,7 @@ The equivalent XML configuration is as follows: [source,xml,indent=0] ---- -include::{sourcedir31}/clustered/client/src/test/resources/configs/docs/ehcache-clustered.xml[tag=tieringSample] +include::{sourcedir32}/clustered/client/src/test/resources/configs/docs/ehcache-clustered.xml[tag=tieringSample] ---- <1> Specify the heap tier for cache. @@ -193,7 +193,7 @@ This comes with a latency penalty on the write operation required to give this g [source,java,indent=0] ---- -include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheConsistency] +include::{sourcedir32}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheConsistency] ---- <1> Specify the consistency level through the use of an additional service configuration, using _strong_ consistency here, @@ -203,7 +203,7 @@ The equivalent XML configuration is as follows: [source,xml,indent=0] ---- -include::{sourcedir31}/clustered/client/src/test/resources/configs/docs/ehcache-clustered.xml[tag=consistencySample] +include::{sourcedir32}/clustered/client/src/test/resources/configs/docs/ehcache-clustered.xml[tag=consistencySample] ---- <1> Specify the consistency level through a custom service configuration from the `clustered` namespace. @@ -230,7 +230,7 @@ Please review the example code below to see how this can be implemented. [source,java,indent=0] ---- -include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=unspecifiedClusteredCacheExample] +include::{sourcedir32}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=unspecifiedClusteredCacheExample] ---- <1> Configure the first cache manager with auto create <2> Build a cache configuration for a clustered _dedicated_ resource pool diff --git a/docs/src/docs/asciidoc/user/common.adoc b/docs/src/docs/asciidoc/user/common.adoc index 94b73f7f46..c4111a2e32 100644 --- a/docs/src/docs/asciidoc/user/common.adoc +++ b/docs/src/docs/asciidoc/user/common.adoc @@ -1,6 +1,6 @@ --- --- -ifndef::sourcedir31[] +ifndef::sourcedir32[] :notBuildingForSite: true ifdef::basebackend-html[:outfilesuffix: .html] :source-highlighter: coderay @@ -10,9 +10,9 @@ ifdef::basebackend-html[:outfilesuffix: .html] :icons: font :iconfont-remote!: :iconfont-name: font-awesome.min -:sourcedir31: ../../../../../ +:sourcedir32: ../../../../../ :imagesdir: images :sectanchors: :idprefix: :idseparator: - -endif::sourcedir31[] +endif::sourcedir32[] diff --git a/docs/src/docs/asciidoc/user/eviction-advisor.adoc b/docs/src/docs/asciidoc/user/eviction-advisor.adoc index e48f50b7c3..e712ded41b 100644 --- a/docs/src/docs/asciidoc/user/eviction-advisor.adoc +++ b/docs/src/docs/asciidoc/user/eviction-advisor.adoc @@ -1,9 +1,9 @@ --- --- = Eviction Advisor -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -24,7 +24,7 @@ preserving that entry in the cache, though there is no full guarantee of such. [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=cacheEvictionAdvisor] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=cacheEvictionAdvisor] ---- <1> Configure a constrained heap, as the eviction advisor is only relevant when mappings get evicted from the cache. diff --git a/docs/src/docs/asciidoc/user/examples.adoc b/docs/src/docs/asciidoc/user/examples.adoc index c919ba8da7..65660e5a4d 100644 --- a/docs/src/docs/asciidoc/user/examples.adoc +++ b/docs/src/docs/asciidoc/user/examples.adoc @@ -1,9 +1,9 @@ --- --- = Examples -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -104,5 +104,5 @@ Note the presence of the +Filling cache with peeps+, +Clearing peeps cache+, and [source,xml,indent=0] ---- -include::{sourcedir31}/107/src/test/resources/ehcache-example.xml[] +include::{sourcedir32}/107/src/test/resources/ehcache-example.xml[] ---- diff --git a/docs/src/docs/asciidoc/user/expiry.adoc b/docs/src/docs/asciidoc/user/expiry.adoc index b859f690f9..bd33a9e8b4 100644 --- a/docs/src/docs/asciidoc/user/expiry.adoc +++ b/docs/src/docs/asciidoc/user/expiry.adoc @@ -1,9 +1,9 @@ --- --- = Expiry -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -20,7 +20,7 @@ Expiry is configured at the cache level, in Java or in XML: [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=expiry] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=expiry] ---- <1> Expiry is configured at the cache level, so start by defining a cache configuration, @@ -28,7 +28,7 @@ include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[t [source,xml,indent=0] ---- -include::{sourcedir31}/xml/src/test/resources/configs/docs/getting-started.xml[tags=expiry] +include::{sourcedir32}/xml/src/test/resources/configs/docs/getting-started.xml[tags=expiry] ---- <1> At the cache level, using the predefined _time-to-live_ again. @@ -50,7 +50,7 @@ Supporting your own expiration scheme simply means implementing the `Expiry` int [source,java,indent=0] ---- -include::{sourcedir31}/api/src/main/java/org/ehcache/expiry/Expiry.java[lines=21..-1] +include::{sourcedir32}/api/src/main/java/org/ehcache/expiry/Expiry.java[lines=21..-1] ---- The main points to remember on the return value from these methods: @@ -71,7 +71,7 @@ In Java: [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=customExpiry] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=customExpiry] ---- <1> Simply pass your custom expiry instance into the cache builder. @@ -80,7 +80,7 @@ In XML: [source,xml,indent=0] ---- -include::{sourcedir31}/xml/src/test/resources/configs/docs/getting-started.xml[tags=customExpiry] +include::{sourcedir32}/xml/src/test/resources/configs/docs/getting-started.xml[tags=customExpiry] ---- <1> Simply pass the fully qualified class name of your custom expiry. diff --git a/docs/src/docs/asciidoc/user/getting-started.adoc b/docs/src/docs/asciidoc/user/getting-started.adoc index a4dc77b361..54eaf00e9a 100644 --- a/docs/src/docs/asciidoc/user/getting-started.adoc +++ b/docs/src/docs/asciidoc/user/getting-started.adoc @@ -1,9 +1,9 @@ --- --- -= Ehcache 3.1 Documentation -ifndef::sourcedir31[] += Ehcache 3.2 Documentation +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] We feel that the Ehcache 3.x API is a great improvement over the Ehcache 2.x API that has been used by millions of developers. We hope you enjoy this new generation of Ehcache! ifdef::notBuildingForSite[] @@ -29,7 +29,7 @@ As with the previous versions of Ehcache, the canonical way of dealing with `Cac [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=cachemanagerExample] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=cachemanagerExample] ---- <1> Static method `org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder` that returns a new `org.ehcache.config.builders.CacheManagerBuilder` @@ -62,7 +62,7 @@ In addition, for creating the cache manager with clustering support, you will ne [source,java,indent=0] ---- -include::{sourcedir31}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerExample] +include::{sourcedir32}/clustered/client/src/test/java/org/ehcache/clustered/client/docs/GettingStarted.java[tag=clusteredCacheManagerExample] ---- <1> Returns the `org.ehcache.config.builders.CacheManagerBuilder` instance; @@ -81,7 +81,7 @@ Ehcache 3 introduces the concept of `UserManagedCache`: [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java[tag=userManagedCacheExample] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java[tag=userManagedCacheExample] ---- <1> A new feature of Ehcache 3 is the ability to create `UserManagedCache` instances, i.e. ones not managed by a `CacheManager`, again you can either have the builder `init()` it for you, passing true, or @@ -105,7 +105,7 @@ the faster tiers. [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=offheapCacheManager] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=offheapCacheManager] ---- <1> If you wish to use off-heap, you'll have to define a resource pool, giving the memory size you want to allocate. @@ -120,7 +120,7 @@ Do not forget to define in the java options the `-XX:MaxDirectMemorySize` option [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=persistentCacheManager] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=persistentCacheManager] ---- <1> If you wish to use disk storage (like for persistent `Cache` instances), you'll have to provide a @@ -139,7 +139,7 @@ Note that Ehcache 3 only offers persistence in the case of clean shutdowns. [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=threeTiersCacheManager] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=threeTiersCacheManager] ---- <1> If you wish to use disk storage (like for persistent `Cache` instances), you'll have to provide a @@ -156,7 +156,7 @@ NOTE: Byte sizing has a runtime performance impact that depends on the size and [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=byteSizedTieredCache] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=byteSizedTieredCache] ---- <1> You can also size the heap tier in bytes. This will limit the amount of heap used by that tier for @@ -176,7 +176,7 @@ NOTE: Presently, `updateResourcePools()` only supports updating the heap tier an [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=updateResourcesAtRuntime] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=updateResourcesAtRuntime] ---- <1> You will need to create a new `ResourcePools` object with resources of required size, using `ResourcePoolsBuilder`. This object can @@ -191,7 +191,7 @@ The following illustrates how to configure a _time-to-live_ expiry. [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=expiry] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=expiry] ---- <1> Expiry is configured at the cache level, so start by defining a cache configuration, @@ -208,7 +208,7 @@ You can create an XML file to configure a `CacheManager`: [source,xml] ---- -include::{sourcedir31}/xml/src/test/resources/configs/docs/getting-started.xml[tags=gettingStarted] +include::{sourcedir32}/xml/src/test/resources/configs/docs/getting-started.xml[tags=gettingStarted] ---- <1> Declares a `Cache` aliased to `foo` diff --git a/docs/src/docs/asciidoc/user/index.adoc b/docs/src/docs/asciidoc/user/index.adoc index 86d60f8d88..95926edb6d 100644 --- a/docs/src/docs/asciidoc/user/index.adoc +++ b/docs/src/docs/asciidoc/user/index.adoc @@ -1,9 +1,9 @@ --- --- -= Ehcache 3.1 Documentation Overview -ifndef::sourcedir31[] += Ehcache 3.2 Documentation Overview +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -11,7 +11,7 @@ endif::notBuildingForSite[] == Table of Contents -The Table of Contents provides an overview of the Ehcache 3.1 documentation on this site. +The Table of Contents provides an overview of the Ehcache 3.2 documentation on this site. Each topic below corresponds to a menu item at the left. === Basic Topics diff --git a/docs/src/docs/asciidoc/user/management.adoc b/docs/src/docs/asciidoc/user/management.adoc index e1643d74b1..922aa14e7d 100644 --- a/docs/src/docs/asciidoc/user/management.adoc +++ b/docs/src/docs/asciidoc/user/management.adoc @@ -1,9 +1,9 @@ --- --- = Management and Monitoring -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -31,7 +31,7 @@ cache manager builder as a service: [source,java,indent=0] ---- -include::{sourcedir31}/management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=usingManagementRegistry] +include::{sourcedir32}/management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=usingManagementRegistry] ---- <1> Optional: give a name to your cache manager by using a custom configuration <2> Create an instance of `org.ehcache.management.registry.DefaultManagementRegistryService`. This is only required because the service is used below. @@ -51,7 +51,7 @@ and a cache name to uniquely identify the cache on which you want to query stats [source,java,indent=0] ---- -include::{sourcedir31}/management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=capabilitiesAndContexts] +include::{sourcedir32}/management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=capabilitiesAndContexts] ---- <1> Query the `ManagementRegistry` for the registered managed objects' capabilities. <2> Each capability has a unique name you will need to refer to it. @@ -75,7 +75,7 @@ a managed object. Examples of actions could be: clear caches, get their configur [source,java,indent=0] ---- -include::{sourcedir31}/management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=actionCall] +include::{sourcedir32}/management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=actionCall] ---- <1> Put something in a cache. <2> Call the 'clear' action on the managed cache. Refer to the descriptors of the provider to get the exact list of @@ -93,7 +93,7 @@ manager by default, but sometimes you may want one `ManagementRegistry` to manag [source,java,indent=0] ---- -include::{sourcedir31}/management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=managingMultipleCacheManagers] +include::{sourcedir32}/management/src/test/java/org/ehcache/docs/ManagementTest.java[tag=managingMultipleCacheManagers] ---- <1> Create an instance of `org.ehcache.management.SharedManagementService` <2> Pass it as a service to the first cache manager diff --git a/docs/src/docs/asciidoc/user/serializers-copiers.adoc b/docs/src/docs/asciidoc/user/serializers-copiers.adoc index d2e9a773c1..d95f25dbe3 100644 --- a/docs/src/docs/asciidoc/user/serializers-copiers.adoc +++ b/docs/src/docs/asciidoc/user/serializers-copiers.adoc @@ -1,9 +1,9 @@ --- --- = Serializers and Copiers -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -109,7 +109,7 @@ Implement the following interface, from package `org.ehcache.spi.serialization`: [source,java,indent=0] ---- -include::{sourcedir31}/api/src/main/java/org/ehcache/spi/serialization/Serializer.java[lines=21..-1] +include::{sourcedir32}/api/src/main/java/org/ehcache/spi/serialization/Serializer.java[lines=21..-1] ---- As the javadoc states, there are some constructor rules, see the <> for that. @@ -231,7 +231,7 @@ Implement the following interface: [source,java,indent=0] ---- -include::{sourcedir31}/api/src/main/java/org/ehcache/spi/copy/Copier.java[lines=19..-1] +include::{sourcedir32}/api/src/main/java/org/ehcache/spi/copy/Copier.java[lines=19..-1] ---- * `T copyForRead(T obj)` is invoked when a copy must be made upon a read operation (like a cache `get()`), diff --git a/docs/src/docs/asciidoc/user/thread-pools.adoc b/docs/src/docs/asciidoc/user/thread-pools.adoc index a0d8719fa3..b36c121f55 100644 --- a/docs/src/docs/asciidoc/user/thread-pools.adoc +++ b/docs/src/docs/asciidoc/user/thread-pools.adoc @@ -1,9 +1,9 @@ --- --- = Thread Pools -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -70,7 +70,7 @@ For instance, calling `CacheManagerBuilder.withDefaultDiskStoreThreadPool(String to calling `CacheManagerBuilder.using(new OffHeapDiskStoreProviderConfiguration(threadPoolAlias))`. The thread pool can be assigned to a service with the builders by passing a -`threadPoolAlias` parameter to the ad-hoc method. When a service isn't told anything about what thread pool to use, +`threadPoolAlias` parameter to the ad-hoc method. When a service isn't told anything about what thread pool to use, the default thread pool is used. @@ -84,7 +84,7 @@ Following are examples of describing how to configure the thread pools the diffe [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag=diskStore] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag=diskStore] ---- <1> Configure the thread pools. Note that the default one (`dflt`) is required for the events even when no event @@ -96,7 +96,7 @@ include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag= [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag=writeBehind] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag=writeBehind] ---- <1> Configure the thread pools. Note that the default one (`dflt`) is required for the events even when no event @@ -109,7 +109,7 @@ include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag= [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag=events] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag=events] ---- <1> Configure the thread pools. Note that there is no default one so all thread-using services must be configured @@ -122,7 +122,7 @@ include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/ThreadPools.java[tag= [source,xml] ---- -include::{sourcedir31}/xml/src/test/resources/configs/docs/thread-pools.xml[tags=threadPools] +include::{sourcedir32}/xml/src/test/resources/configs/docs/thread-pools.xml[tags=threadPools] ---- <1> Configure the thread pools. Note that there is no default one. diff --git a/docs/src/docs/asciidoc/user/usermanaged.adoc b/docs/src/docs/asciidoc/user/usermanaged.adoc index db61e594fa..23a2ef05a5 100644 --- a/docs/src/docs/asciidoc/user/usermanaged.adoc +++ b/docs/src/docs/asciidoc/user/usermanaged.adoc @@ -1,9 +1,9 @@ --- --- = User managed caches -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -29,7 +29,7 @@ While a `UserManagedCache` extends `Cache`, it offers additional methods: [source,java,indent=0] ---- -include::{sourcedir31}/api/src/main/java/org/ehcache/UserManagedCache.java[lines=17..-1] +include::{sourcedir32}/api/src/main/java/org/ehcache/UserManagedCache.java[lines=17..-1] ---- As can be seen, these methods deal with the lifecycle of the cache and need to be called explicitly. @@ -38,7 +38,7 @@ There is also the following interface which comes into play when a user managed [source,java,indent=0] ---- -include::{sourcedir31}/api/src/main/java/org/ehcache/PersistentUserManagedCache.java[lines=17..-1] +include::{sourcedir32}/api/src/main/java/org/ehcache/PersistentUserManagedCache.java[lines=17..-1] ---- @@ -48,7 +48,7 @@ include::{sourcedir31}/api/src/main/java/org/ehcache/PersistentUserManagedCache. [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java[tag=userManagedCacheExample] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java[tag=userManagedCacheExample] ---- <1> Create a `UserManagedCache` instance, again you can either have the builder `init()` it for you, passing true, or <2> pass false and it is up to you to `UserManagedCache.init()` them, prior to using them. @@ -73,7 +73,7 @@ If you want to use a disk persistent cache, you will need to create and lifecycl [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java[tag=persistentUserManagedCache] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java[tag=persistentUserManagedCache] ---- <1> Create the persistence service to be used by the cache for storing data on disk <2> Pass the persistence service to the builder as well as an id for the cache - note that this will make the builder produce a more specific type: `PersistentUserManagedCache` @@ -94,7 +94,7 @@ For more information on cache event listeners, see < Provide ExecutorService for ordered and unordered events delivery. <2> Provide listener configuration using CacheEventListenerConfigurationBuilder. diff --git a/docs/src/docs/asciidoc/user/writers.adoc b/docs/src/docs/asciidoc/user/writers.adoc index 5e9f294e6a..ea264f0bad 100644 --- a/docs/src/docs/asciidoc/user/writers.adoc +++ b/docs/src/docs/asciidoc/user/writers.adoc @@ -1,9 +1,9 @@ --- --- = Cache Writers -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -69,7 +69,7 @@ maximum write delay:: [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=writeThroughCache] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=writeThroughCache] ---- <1> We register a sample `CacheLoaderWriter` that knows about the mapping `(41L -> "zero")` @@ -81,7 +81,7 @@ include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[t [source,java,indent=0] ---- -include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=writeBehindCache] +include::{sourcedir32}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=writeBehindCache] ---- <1> For write-behind you need a configured `CacheLoaderWriter`. diff --git a/docs/src/docs/asciidoc/user/xa.adoc b/docs/src/docs/asciidoc/user/xa.adoc index 1f4db9a36b..28cf6a560a 100644 --- a/docs/src/docs/asciidoc/user/xa.adoc +++ b/docs/src/docs/asciidoc/user/xa.adoc @@ -1,15 +1,15 @@ --- --- = XA transactional caches -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] Ehcache 3 supports caches that work within a XA transaction's context controlled by a JTA transaction manager, fully supporting the whole two-phase commit protocol, including crash recovery. [IMPORTANT] ========================== -Ehcache 3.1 jar no longer contains the transaction related code. +Ehcache 3.1+ jar no longer contains the transaction related code. This is now available through a different binary: [source,xml] ---- @@ -51,7 +51,7 @@ INFO org.ehcache.transactions.xa.txmgr.btm.BitronixTransactionManagerLookup - Us [source,java,indent=0] ---- -include::{sourcedir31}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testSimpleXACache] +include::{sourcedir32}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testSimpleXACache] ---- <1> First start the Bitronix transaction manager. By default, Ehcache 3 will auto-detect it but will throw an exception @@ -83,7 +83,7 @@ transaction context. Nothing special needs to be configured for this to happen, [source,java,indent=0] ---- -include::{sourcedir31}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testXACacheWithWriteThrough] +include::{sourcedir32}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testXACacheWithWriteThrough] ---- <1> First start the Bitronix transaction manager. By default, Ehcache 3 will auto-detect it but will throw an exception @@ -107,7 +107,7 @@ will result in `XACacheException` being thrown. [source,java,indent=0] ---- -include::{sourcedir31}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testNonTransactionalAccess] +include::{sourcedir32}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testNonTransactionalAccess] ---- <1> First start the Bitronix transaction manager. By default, Ehcache 3 will auto-detect it but will throw an exception @@ -134,7 +134,7 @@ INFO o.e.t.x.j.DefaultJournalProvider - Using persistent XAStore journal [source,java,indent=0] ---- -include::{sourcedir31}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testXACacheWithThreeTiers] +include::{sourcedir32}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testXACacheWithThreeTiers] ---- <1> First start the Bitronix transaction manager. By default, Ehcache 3 will auto-detect it but will throw an exception @@ -157,7 +157,7 @@ XA caches: [source,xml] ---- -include::{sourcedir31}/transactions/src/test/resources/docs/configs/xa-getting-started.xml[tags=gettingStarted] +include::{sourcedir32}/transactions/src/test/resources/docs/configs/xa-getting-started.xml[tags=gettingStarted] ---- <1> Declare a `TransactionManagerLookup` that will look up your transaction manager. @@ -168,7 +168,7 @@ In order to parse an XML configuration, you can use the `XmlConfiguration` type: [source,java] ---- -include::{sourcedir31}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testXACacheWithXMLConfig] +include::{sourcedir32}/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java[tag=testXACacheWithXMLConfig] ---- <1> The Bitronix transaction manager must be started before the cache manager is initialized. @@ -181,7 +181,7 @@ And here is what the `BitronixTransactionManagerLookup` implementation looks lik [source,java] ---- -include::{sourcedir31}/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/BitronixTransactionManagerLookup.java[tag=BitronixLookup] +include::{sourcedir32}/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/btm/BitronixTransactionManagerLookup.java[tag=BitronixLookup] ---- <1> The `TransactionManagerLookup` interface must be implemented and offer a no-arg constructor. diff --git a/docs/src/docs/asciidoc/user/xml.adoc b/docs/src/docs/asciidoc/user/xml.adoc index 6c00ad7629..15a71542d6 100644 --- a/docs/src/docs/asciidoc/user/xml.adoc +++ b/docs/src/docs/asciidoc/user/xml.adoc @@ -1,9 +1,9 @@ --- --- = XML Configuration -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -106,7 +106,7 @@ NOTE: If you are obtaining your `CacheManager` through the JSR-107 API, what fol [source,java,indent=0] ---- -include::{sourcedir31}/xml/src/test/java/org/ehcache/docs/GettingStarted.java[tag=xmlConfig] +include::{sourcedir32}/xml/src/test/java/org/ehcache/docs/GettingStarted.java[tag=xmlConfig] ---- <1> Obtain a `URL` to your XML file's location <2> Instantiate an `XmlConfiguration` passing the XML file's URL to it @@ -118,14 +118,14 @@ to use a `` element from an XML file, e.g. the `/my-config.xml` [source,xml,indent=0] ---- -include::{sourcedir31}/xml/src/test/resources/configs/docs/template-sample.xml[tag=templateSample] +include::{sourcedir32}/xml/src/test/resources/configs/docs/template-sample.xml[tag=templateSample] ---- Creating a `CacheConfigurationBuilder` of that `example` `` element, would be done as follows: [source,java,indent=0] ---- -include::{sourcedir31}/xml/src/test/java/org/ehcache/docs/GettingStarted.java[tag=xmlTemplate] +include::{sourcedir32}/xml/src/test/java/org/ehcache/docs/GettingStarted.java[tag=xmlTemplate] ---- <1> Creates a builder, inheriting the capacity constraint of 200 entries <2> The inherent properties can be overridden by simply providing a different value prior to building the `CacheConfiguration` diff --git a/docs/src/docs/asciidoc/user/xsds.adoc b/docs/src/docs/asciidoc/user/xsds.adoc index 16e20cf48b..8a76c40e36 100644 --- a/docs/src/docs/asciidoc/user/xsds.adoc +++ b/docs/src/docs/asciidoc/user/xsds.adoc @@ -1,9 +1,9 @@ --- --- = Ehcache XSDs -ifndef::sourcedir31[] +ifndef::sourcedir32[] include::common.adoc[] -endif::sourcedir31[] +endif::sourcedir32[] ifdef::notBuildingForSite[] include::menu.adoc[] @@ -25,7 +25,7 @@ endif::notBuildingForSite[] [source,xml,indent=0] ---- -include::{sourcedir31}/107/src/test/resources/org/ehcache/docs/public-xsds-location.xml[tag=xsdLocations] +include::{sourcedir32}/107/src/test/resources/org/ehcache/docs/public-xsds-location.xml[tag=xsdLocations] ---- [[core]] @@ -33,7 +33,7 @@ include::{sourcedir31}/107/src/test/resources/org/ehcache/docs/public-xsds-locat [source,xsd,indent=0] ---- -include::{sourcedir31}/xml/src/main/resources/ehcache-core.xsd[lines=18..-1] +include::{sourcedir32}/xml/src/main/resources/ehcache-core.xsd[lines=18..-1] ---- [[jsr-107-extension]] @@ -41,12 +41,12 @@ include::{sourcedir31}/xml/src/main/resources/ehcache-core.xsd[lines=18..-1] [source,xsd,indent=0] ---- -include::{sourcedir31}/107/src/main/resources/ehcache-107ext.xsd[lines=18..-1] +include::{sourcedir32}/107/src/main/resources/ehcache-107ext.xsd[lines=18..-1] ---- == XA transactions extension [source,xsd,indent=0] ---- -include::{sourcedir31}/transactions/src/main/resources/ehcache-tx-ext.xsd[lines=18..-1] +include::{sourcedir32}/transactions/src/main/resources/ehcache-tx-ext.xsd[lines=18..-1] ---- From 3b877738ea51c4f03867f512d3dec95a4c8e4f56 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Mon, 7 Nov 2016 18:08:35 +0100 Subject: [PATCH 117/218] :bug: Fix release deploy issues * Do not escape sonatype URL from properties * Activate signing in transactions module --- gradle.properties | 2 +- transactions/build.gradle | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/gradle.properties b/gradle.properties index 95d91e9e83..ce670a6803 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,7 +1,7 @@ sonatypeUser = OVERRIDE_ME sonatypePwd = OVERRIDE_ME -deployUrl = 'https://oss.sonatype.org/service/local/staging/deploy/maven2/' +deployUrl = https://oss.sonatype.org/service/local/staging/deploy/maven2/ # Enable the daemon by adding org.gradle.daemon in USER_HOME/.gradle/gradle.properties org.gradle.parallel=true diff --git a/transactions/build.gradle b/transactions/build.gradle index 9f658db346..fe7c4d606e 100644 --- a/transactions/build.gradle +++ b/transactions/build.gradle @@ -31,3 +31,8 @@ dependencies { pomOnlyCompile "org.ehcache:ehcache:$parent.baseVersion" pomOnlyProvided 'javax.transaction:jta:1.1', 'org.codehaus.btm:btm:2.1.4' } + +project.signing { + required { project.isReleaseVersion && project.gradle.taskGraph.hasTask("uploadArchives") } + sign project.configurations.getByName('archives') +} From 06481a6154cd60d9376b1ebe91a9dd7ac2a3c61f Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Tue, 1 Nov 2016 17:01:29 -0400 Subject: [PATCH 118/218] :bug: Close #1574: Bugfixes in stats testing and stat initialization (ratio) - do not auto start stat (setAlwaysOne(true)), this will prevent the Time To Disable to work - do not use non-interruptible while loops - use timeouts to stop loops - also supports new better stats api with fixes and stat query methods --- build.gradle | 2 +- .../client/internal/store/ClusteredStore.java | 26 +- .../AbstractClusteringManagementTest.java | 12 +- .../ClusteredStatisticsCountTest.java | 7 +- .../ClusteredStatisticsRatioTest.java | 86 +++--- .../ClusteringManagementServiceTest.java | 38 +-- .../management/AbstractExposedStatistics.java | 156 ++++------- .../AbstractStatisticsManagementProvider.java | 1 - .../server/management/Management.java | 1 - .../core/statistics/StatisticMapper.java | 166 ------------ .../statistics/TierOperationOutcomes.java | 83 ++++++ .../statistics/TierOperationStatistic.java | 209 -------------- .../core/statistics/StatisticMapperTest.java | 256 ------------------ .../internal/store/disk/OffHeapDiskStore.java | 26 +- .../impl/internal/store/heap/OnHeapStore.java | 36 +-- .../internal/store/offheap/OffHeapStore.java | 36 +-- .../statistics/StandardEhcacheStatistics.java | 183 +++++-------- .../providers/statistics/EvictionTest.java | 52 ++-- .../providers/statistics/HitCountTest.java | 6 +- .../providers/statistics/HitRatioTest.java | 43 +-- .../providers/statistics/MissCountTest.java | 6 +- .../providers/statistics/MissRatioTest.java | 17 +- .../StandardEhcacheStatisticsTest.java | 19 +- .../providers/statistics/StatsUtil.java | 97 ++++--- .../DefaultManagementRegistryServiceTest.java | 16 +- .../DefaultSharedManagementServiceTest.java | 11 +- 26 files changed, 508 insertions(+), 1083 deletions(-) delete mode 100644 core/src/main/java/org/ehcache/core/statistics/StatisticMapper.java create mode 100644 core/src/main/java/org/ehcache/core/statistics/TierOperationOutcomes.java delete mode 100644 core/src/main/java/org/ehcache/core/statistics/TierOperationStatistic.java delete mode 100644 core/src/test/java/org/ehcache/core/statistics/StatisticMapperTest.java diff --git a/build.gradle b/build.gradle index c34a190ca0..b1fe22eeda 100644 --- a/build.gradle +++ b/build.gradle @@ -22,7 +22,7 @@ ext { // Third parties offheapVersion = '2.3.1' - statisticVersion = '1.3.0' + statisticVersion = '1.4.0' jcacheVersion = '1.0.0' slf4jVersion = '1.7.7' sizeofVersion = '0.3.0' diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java index 4ac6b504d3..e5f4b3b508 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java @@ -50,8 +50,7 @@ import org.ehcache.core.statistics.StoreOperationOutcomes; import org.ehcache.core.spi.time.TimeSource; import org.ehcache.core.spi.time.TimeSourceService; -import org.ehcache.core.statistics.TierOperationStatistic; -import org.ehcache.core.statistics.TierOperationStatistic.TierOperationOutcomes; +import org.ehcache.core.statistics.TierOperationOutcomes; import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; import org.ehcache.impl.internal.events.NullStoreEventDispatcher; import org.ehcache.spi.persistence.StateRepository; @@ -63,6 +62,7 @@ import org.ehcache.spi.service.ServiceConfiguration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.terracotta.statistics.MappedOperationStatistic; import org.terracotta.statistics.StatisticsManager; import org.terracotta.statistics.observer.OperationObserver; @@ -562,21 +562,21 @@ public static class Provider implements Store.Provider, AuthoritativeTier.Provid private volatile ClusteringService clusteringService; private final Map, StoreConfig> createdStores = new ConcurrentWeakIdentityHashMap, StoreConfig>(); - private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap, Collection>>(); + private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap, Collection>>(); @Override public ClusteredStore createStore(final Configuration storeConfig, final ServiceConfiguration... serviceConfigs) { ClusteredStore store = createStoreInternal(storeConfig, serviceConfigs); - Collection> tieredOps = new ArrayList>(); + Collection> tieredOps = new ArrayList>(); - TierOperationStatistic get = - new TierOperationStatistic( + MappedOperationStatistic get = + new MappedOperationStatistic( store, TierOperationOutcomes.GET_TRANSLATION, "get", TIER_HEIGHT, "get", STATISTICS_TAG); StatisticsManager.associate(get).withParent(store); tieredOps.add(get); - TierOperationStatistic evict = - new TierOperationStatistic( + MappedOperationStatistic evict = + new MappedOperationStatistic( store, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", TIER_HEIGHT, "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(store); tieredOps.add(evict); @@ -741,16 +741,16 @@ public void stop() { @Override public AuthoritativeTier createAuthoritativeTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { ClusteredStore authoritativeTier = createStoreInternal(storeConfig, serviceConfigs); - Collection> tieredOps = new ArrayList>(); + Collection> tieredOps = new ArrayList>(); - TierOperationStatistic get = - new TierOperationStatistic( + MappedOperationStatistic get = + new MappedOperationStatistic( authoritativeTier, TierOperationOutcomes.GET_AND_FAULT_TRANSLATION, "get", TIER_HEIGHT, "getAndFault", STATISTICS_TAG); StatisticsManager.associate(get).withParent(authoritativeTier); tieredOps.add(get); - TierOperationStatistic evict = - new TierOperationStatistic( + MappedOperationStatistic evict = + new MappedOperationStatistic( authoritativeTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", TIER_HEIGHT, "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(authoritativeTier); tieredOps.add(evict); diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java index 537c243b5a..e59624bc92 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java @@ -45,6 +45,8 @@ import org.terracotta.management.model.message.Message; import org.terracotta.management.model.notification.ContextualNotification; import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.model.stats.Statistic; +import org.terracotta.management.model.stats.StatisticHistory; import org.terracotta.testing.rules.BasicExternalCluster; import org.terracotta.testing.rules.Cluster; @@ -52,7 +54,9 @@ import java.io.FileNotFoundException; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Properties; import java.util.Scanner; import java.util.concurrent.Exchanger; @@ -217,7 +221,10 @@ protected static ContextualReturn sendManagementCallToCollectStats(String... new Parameter("StatisticsCapability"), new Parameter(asList(statNames), Collection.class.getName()))); - return exchanger.exchange(null); + ContextualReturn contextualReturn = exchanger.exchange(null); + assertThat(contextualReturn.hasExecuted(), is(true)); + + return contextualReturn; } finally { managementConnection.close(); } @@ -225,7 +232,7 @@ protected static ContextualReturn sendManagementCallToCollectStats(String... protected static List waitForNextStats() { // uses the monitoring consumre entity to get the content of the stat buffer when some stats are collected - while (true) { + while (!Thread.currentThread().isInterrupted()) { List messages = consumer.drainMessageBuffer() .stream() .filter(message -> message.getType().equals("STATISTICS")) @@ -237,6 +244,7 @@ protected static List waitForNextStats() { return messages; } } + return Collections.emptyList(); } protected static List messageTypes(List messages) { diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java index b36616a8df..239008fd6d 100755 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java @@ -15,7 +15,6 @@ */ package org.ehcache.clustered.management; -import static org.ehcache.clustered.management.AbstractClusteringManagementTest.cacheManager; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; @@ -37,8 +36,7 @@ public class ClusteredStatisticsCountTest extends AbstractClusteringManagementTe @Test public void countTest() throws Exception { - ContextualReturn contextualReturn = sendManagementCallToCollectStats("Cache:HitCount","Clustered:HitCount","Cache:MissCount","Clustered:MissCount"); - assertThat(contextualReturn.hasExecuted(), is(true)); + sendManagementCallToCollectStats("Cache:HitCount","Clustered:HitCount","Cache:MissCount","Clustered:MissCount"); Cache cache = cacheManager.getCache("dedicated-cache-1", String.class, String.class); cache.put("one", "val1"); @@ -86,7 +84,8 @@ public void countTest() throws Exception { } } } - } while( (cacheHitCount != CACHE_HIT_COUNT) && (clusteredHitCount != CLUSTERED_HIT_COUNT) && + } while(!Thread.currentThread().isInterrupted() && + (cacheHitCount != CACHE_HIT_COUNT) && (clusteredHitCount != CLUSTERED_HIT_COUNT) && (cacheMissCount != CACHE_MISS_COUNT) && (clusteredMissCount != CLUSTERED_MISS_COUNT)); Assert.assertThat(cacheHitCount,is(CACHE_HIT_COUNT)); diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsRatioTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsRatioTest.java index 292d93ea6b..0ddf774cb4 100755 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsRatioTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsRatioTest.java @@ -15,31 +15,38 @@ */ package org.ehcache.clustered.management; -import static org.ehcache.clustered.management.AbstractClusteringManagementTest.cacheManager; -import static org.ehcache.clustered.management.AbstractClusteringManagementTest.sendManagementCallToCollectStats; -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; - -import java.util.List; import org.ehcache.Cache; -import org.junit.Assert; import org.junit.Test; -import org.terracotta.management.model.call.ContextualReturn; import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.management.model.stats.Sample; import org.terracotta.management.model.stats.history.RatioHistory; -public class ClusteredStatisticsRatioTest extends AbstractClusteringManagementTest { +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; - private static final double CACHE_HIT_RATIO = .5d; - private static final double CLUSTERED_HIT_RATIO = .5d; - private static final double CACHE_MISS_RATIO = .5d; - private static final double CLUSTERED_MISS_RATIO = .5d; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.Matchers.array; +import static org.junit.Assert.assertThat; + +public class ClusteredStatisticsRatioTest extends AbstractClusteringManagementTest { @Test public void ratioTest() throws Exception { - ContextualReturn contextualReturn = sendManagementCallToCollectStats("Cache:HitRatio","Clustered:HitRatio","Cache:MissRatio","Clustered:MissRatio"); - assertThat(contextualReturn.hasExecuted(), is(true)); + String[] statNames = {"Cache:HitRatio", "Clustered:HitRatio", "Cache:MissRatio", "Clustered:MissRatio"}; + sendManagementCallToCollectStats(statNames); + + // When testing ratios, we need to wait for the first computation (we do not have any choice) to happen because ratio depends on 2 other sampled statistics. + // If you do not wait, then you'll always get some NaN because the hits will be done within the 1st second, and the hits won't be done in the right "window". + // A ratio is computed by dividing a rate with another rate. See CompoundOperationImpl.ratioOf(). + // And a rate is computed with values aggregated into a EventRateSimpleMovingAverage. + // The call to EventRateSimpleMovingAverage.rateUsingSeconds() will return 0 during the fist second (until first computation did happen). + // So the hits must be after the first second so that values get accumulated into the partitions of EventRateSimpleMovingAverage. + + // Also, we have to take in consideration that in clustered, there is a collector that is scheduled at 75% of the TTD to collect and send stats. + // So the delay can be greater than just the duration of the first sampling. + Thread.sleep(25000); Cache cache = cacheManager.getCache("dedicated-cache-1", String.class, String.class); cache.put("one", "val1"); @@ -51,52 +58,29 @@ public void ratioTest() throws Exception { cache.get("three"); //miss cache.get("four"); //miss - - double cacheHitRatio = 0; - double clusteredHitRatio = 0; - double cacheMissRatio = 0; - double clusteredMissRatio = 0; + Double[] ratios = new Double[statNames.length]; // it could be several seconds before the sampled stats could become available // let's try until we find the correct values do { // get the stats (we are getting the primitive counter, not the sample history) - List stats = waitForNextStats(); + // only keep CM stats for the following checks + List stats = waitForNextStats() + .stream() + .filter(statistics -> "dedicated-cache-1".equals(statistics.getContext().get("cacheName"))) + .collect(Collectors.toList()); + for (ContextualStatistics stat : stats) { - if (stat.getContext().get("cacheName").equals("dedicated-cache-1")) { - - Sample[] samplesCacheHitRatio = stat.getStatistic(RatioHistory.class, "Cache:HitRatio").getValue(); - if(samplesCacheHitRatio.length > 0) { - cacheHitRatio = samplesCacheHitRatio[samplesCacheHitRatio.length - 1].getValue(); - } - - Sample[] samplesClusteredHitRatio = stat.getStatistic(RatioHistory.class, "Clustered:HitRatio").getValue(); - if(samplesClusteredHitRatio.length > 0) { - clusteredHitRatio = samplesClusteredHitRatio[samplesClusteredHitRatio.length - 1].getValue(); - } - - Sample[] samplesClusteredMissRatio = stat.getStatistic(RatioHistory.class, "Clustered:MissRatio").getValue(); - if(samplesClusteredMissRatio.length > 0) { - clusteredMissRatio = samplesClusteredMissRatio[samplesClusteredMissRatio.length - 1].getValue(); - } - - Sample[] samplesCacheMissRatio = stat.getStatistic(RatioHistory.class, "Cache:MissRatio").getValue(); - if(samplesCacheMissRatio.length > 0) { - cacheMissRatio = samplesCacheMissRatio[samplesCacheMissRatio.length - 1].getValue(); - } + for (int i = 0; i < statNames.length; i++) { + String statName = statNames[i]; + Sample[] samples = stat.getStatistic(RatioHistory.class, statName).getValue(); + ratios[i] = samples.length > 0 ? samples[samples.length - 1].getValue() : 0d; } } - } while( (cacheHitRatio != CACHE_HIT_RATIO) && (clusteredHitRatio != CLUSTERED_HIT_RATIO) && - (cacheMissRatio != CACHE_MISS_RATIO) && (clusteredMissRatio != CLUSTERED_MISS_RATIO)); - - Assert.assertThat(cacheHitRatio,is(CACHE_HIT_RATIO)); - Assert.assertThat(clusteredHitRatio,is(CLUSTERED_HIT_RATIO)); - Assert.assertThat(cacheMissRatio,is(CACHE_MISS_RATIO)); - Assert.assertThat(clusteredMissRatio,is(CLUSTERED_MISS_RATIO)); + } while (!Thread.currentThread().isInterrupted() && !Arrays.equals(ratios, new Double[]{.5d, .5d, .5d, .5d})); + assertThat(ratios, is(array(equalTo(.5d), equalTo(.5d), equalTo(.5d), equalTo(.5d)))); } - - } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java index 6fd0c0889a..b0d09a5299 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java @@ -208,15 +208,14 @@ public void test_F_notifs_on_remove_cache() throws Exception { @Test public void test_G_stats_collection() throws Exception { - ContextualReturn contextualReturn = sendManagementCallToCollectStats("Cache:HitCount"); - assertThat(contextualReturn.hasExecuted(), is(true)); + sendManagementCallToCollectStats("Cache:HitCount"); Cache cache1 = cacheManager.getCache("dedicated-cache-1", String.class, String.class); cache1.put("key1", "val"); cache1.put("key2", "val"); - cache1.get("key1"); - cache1.get("key2"); + cache1.get("key1"); // hit + cache1.get("key2"); // hit List allStats = new ArrayList<>(); long val = 0; @@ -229,15 +228,18 @@ public void test_G_stats_collection() throws Exception { List stats = waitForNextStats(); allStats.addAll(stats); + // only keep CM stats for the following checks + stats = stats.stream() + .filter(statistics -> "dedicated-cache-1".equals(statistics.getContext().get("cacheName"))) + .collect(Collectors.toList()); + for (ContextualStatistics stat : stats) { - if (stat.getContext().get("cacheName").equals("dedicated-cache-1")) { - Sample[] samples = stat.getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); - if(samples.length > 0) { - val = samples[samples.length - 1].getValue(); - } + Sample[] samples = stat.getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); + if(samples.length > 0) { + val = samples[samples.length - 1].getValue(); } } - } while(val != 2); + } while(!Thread.currentThread().isInterrupted() && val != 2); // do some other operations cache1.get("key1"); @@ -247,20 +249,22 @@ public void test_G_stats_collection() throws Exception { List stats = waitForNextStats(); allStats.addAll(stats); + // only keep CM stats for the following checks + stats = stats.stream() + .filter(statistics -> "dedicated-cache-1".equals(statistics.getContext().get("cacheName"))) + .collect(Collectors.toList()); for (ContextualStatistics stat : stats) { - if (stat.getContext().get("cacheName").equals("dedicated-cache-1")) { - Sample[] samples = stat.getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); - if(samples.length > 0) { - val = samples[samples.length - 1].getValue(); - } + Sample[] samples = stat.getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); + if(samples.length > 0) { + val = samples[samples.length - 1].getValue(); } } - } while(val != 4); + } while(!Thread.currentThread().isInterrupted() && val != 4); // wait until we have some stats coming from the server entity - while (!allStats.stream().filter(statistics -> statistics.getContext().contains("consumerId")).findFirst().isPresent()) { + while (!Thread.currentThread().isInterrupted() && !allStats.stream().filter(statistics -> statistics.getContext().contains("consumerId")).findFirst().isPresent()) { allStats.addAll(waitForNextStats()); } List serverStats = allStats.stream().filter(statistics -> statistics.getContext().contains("consumerId")).collect(Collectors.toList()); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractExposedStatistics.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractExposedStatistics.java index 4b88ec1a93..76aff78ef3 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractExposedStatistics.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractExposedStatistics.java @@ -15,10 +15,6 @@ */ package org.ehcache.clustered.server.management; -import org.terracotta.context.extended.RegisteredCompoundStatistic; -import org.terracotta.context.extended.RegisteredCounterStatistic; -import org.terracotta.context.extended.RegisteredRatioStatistic; -import org.terracotta.context.extended.RegisteredSizeStatistic; import org.terracotta.context.extended.RegisteredStatistic; import org.terracotta.context.extended.StatisticsRegistry; import org.terracotta.management.model.capabilities.descriptors.Descriptor; @@ -37,13 +33,14 @@ import org.terracotta.management.service.registry.provider.AliasBinding; import org.terracotta.management.service.registry.provider.AliasBindingManagementProvider; import org.terracotta.offheapstore.util.FindbugsSuppressWarnings; -import org.terracotta.statistics.extended.CompoundOperation; +import org.terracotta.statistics.extended.SampleType; import org.terracotta.statistics.extended.SampledStatistic; import java.io.Closeable; -import java.util.ArrayList; import java.util.Collection; +import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; @@ -55,11 +52,21 @@ @FindbugsSuppressWarnings("EQ_DOESNT_OVERRIDE_EQUALS") class AbstractExposedStatistics extends AliasBindingManagementProvider.ExposedAliasBinding implements Closeable { + private static final Map COMPOUND_SUFFIXES = new HashMap<>(); + + static { + COMPOUND_SUFFIXES.put("Count", SampleType.COUNTER); + COMPOUND_SUFFIXES.put("Rate", SampleType.RATE); + COMPOUND_SUFFIXES.put("LatencyMinimum", SampleType.LATENCY_MIN); + COMPOUND_SUFFIXES.put("LatencyMaximum", SampleType.LATENCY_MAX); + COMPOUND_SUFFIXES.put("LatencyAverage", SampleType.LATENCY_AVG); + } + protected final StatisticsRegistry statisticsRegistry; AbstractExposedStatistics(long consumerId, T binding, StatisticConfiguration statisticConfiguration, ScheduledExecutorService executor, Object statisticContextObject) { super(binding, consumerId); - if(statisticContextObject == null) { + if (statisticContextObject == null) { this.statisticsRegistry = null; } else { @@ -77,12 +84,6 @@ class AbstractExposedStatistics extends AliasBindingMana } void init() { - if (statisticsRegistry != null) { - Map registrations = statisticsRegistry.getRegistrations(); - for (RegisteredStatistic registeredStatistic : registrations.values()) { - registeredStatistic.getSupport().setAlwaysOn(true); - } - } } @Override @@ -94,58 +95,33 @@ public void close() { @SuppressWarnings("unchecked") public Statistic queryStatistic(String statisticName, long since) { - if (statisticsRegistry != null) { - Map registrations = statisticsRegistry.getRegistrations(); - for (Entry entry : registrations.entrySet()) { - String name = entry.getKey(); - RegisteredStatistic registeredStatistic = entry.getValue(); - - if (registeredStatistic instanceof RegisteredCompoundStatistic) { - RegisteredCompoundStatistic registeredCompoundStatistic = (RegisteredCompoundStatistic) registeredStatistic; - CompoundOperation compoundOperation = registeredCompoundStatistic.getCompoundOperation(); - - if ((name + "Count").equals(statisticName)) { - SampledStatistic count = compoundOperation.compound((Set) registeredCompoundStatistic.getCompound()).count(); - return new CounterHistory(buildHistory(count, since), NumberUnit.COUNT); - } else if ((name + "Rate").equals(statisticName)) { - SampledStatistic rate = compoundOperation.compound((Set) registeredCompoundStatistic.getCompound()).rate(); - return new RateHistory(buildHistory(rate, since), TimeUnit.SECONDS); - - } else if ((name + "LatencyMinimum").equals(statisticName)) { - SampledStatistic minimum = compoundOperation.compound((Set) registeredCompoundStatistic.getCompound()).latency().minimum(); - return new DurationHistory(buildHistory(minimum, since), TimeUnit.NANOSECONDS); - - } else if ((name + "LatencyMaximum").equals(statisticName)) { - SampledStatistic maximum = compoundOperation.compound((Set) registeredCompoundStatistic.getCompound()).latency().maximum(); - return new DurationHistory(buildHistory(maximum, since), TimeUnit.NANOSECONDS); - - } else if ((name + "LatencyAverage").equals(statisticName)) { - SampledStatistic average = compoundOperation.compound((Set) registeredCompoundStatistic.getCompound()).latency().average(); - return new AverageHistory(buildHistory(average, since), TimeUnit.NANOSECONDS); - } - } else if (registeredStatistic instanceof RegisteredRatioStatistic) { - RegisteredRatioStatistic registeredRatioStatistic = (RegisteredRatioStatistic) registeredStatistic; - CompoundOperation compoundOperation = registeredRatioStatistic.getCompoundOperation(); - - if (name.equals(statisticName)) { - SampledStatistic ratio = (SampledStatistic) compoundOperation.ratioOf((Set) registeredRatioStatistic.getNumerator(), (Set) registeredRatioStatistic.getDenominator()); - return new RatioHistory(buildHistory(ratio, since), NumberUnit.RATIO); - } - } else if (registeredStatistic instanceof RegisteredSizeStatistic) { - RegisteredSizeStatistic registeredSizeStatistic = (RegisteredSizeStatistic) registeredStatistic; - if (name.equals(statisticName)) { - SampledStatistic count = (SampledStatistic) registeredSizeStatistic.getSampledStatistic(); - return new SizeHistory(buildHistory(count, since), MemoryUnit.B); - } - } else if (registeredStatistic instanceof RegisteredCounterStatistic) { - RegisteredCounterStatistic registeredCounterStatistic = (RegisteredCounterStatistic) registeredStatistic; - if (name.equals(statisticName)) { - SampledStatistic count = (SampledStatistic) registeredCounterStatistic.getSampledStatistic(); - return new CounterHistory(buildHistory(count, since), NumberUnit.COUNT); - } - } else { - throw new UnsupportedOperationException("Cannot handle registered statistic type : " + registeredStatistic); - } + // first search for a non-compound stat + SampledStatistic statistic = statisticsRegistry.findSampledStatistic(statisticName); + + // if not found, it can be a compound stat, so search for it + if (statistic == null) { + for (Iterator> it = COMPOUND_SUFFIXES.entrySet().iterator(); it.hasNext() && statistic == null; ) { + Entry entry = it.next(); + statistic = statisticsRegistry.findSampledCompoundStatistic(statisticName.substring(0, Math.max(0, statisticName.length() - entry.getKey().length())), entry.getValue()); + } + } + + if (statistic != null) { + List> samples = statistic + .history(since) + .stream() + .map(t -> new Sample<>(t.getTimestamp(), t.getSample())) + .collect(Collectors.toList()); + + switch (statistic.type()) { + case COUNTER: return new CounterHistory((List>) samples, NumberUnit.COUNT); + case RATE: return new RateHistory((List>) samples, TimeUnit.SECONDS); + case LATENCY_MIN: return new DurationHistory((List>) samples, TimeUnit.NANOSECONDS); + case LATENCY_MAX: return new DurationHistory((List>) samples, TimeUnit.NANOSECONDS); + case LATENCY_AVG: return new AverageHistory((List>) samples, TimeUnit.NANOSECONDS); + case RATIO: return new RatioHistory((List>) samples, NumberUnit.RATIO); + case SIZE: return new SizeHistory((List>) samples, MemoryUnit.B); + default: throw new UnsupportedOperationException(statistic.type().name()); } } @@ -155,35 +131,31 @@ public void close() { @Override public Collection getDescriptors() { Set capabilities = new HashSet<>(); - capabilities.addAll(queryStatisticsRegistry()); - return capabilities; - } - - private Set queryStatisticsRegistry() { - Set capabilities = new HashSet<>(); if (statisticsRegistry != null) { Map registrations = statisticsRegistry.getRegistrations(); - for (Entry entry : registrations.entrySet()) { String statisticName = entry.getKey(); RegisteredStatistic registeredStatistic = registrations.get(statisticName); - - if (registeredStatistic instanceof RegisteredCompoundStatistic) { - List statistics = new ArrayList<>(); - statistics.add(new StatisticDescriptor(entry.getKey() + "Count", StatisticType.COUNTER_HISTORY)); - statistics.add(new StatisticDescriptor(entry.getKey() + "Rate", StatisticType.RATE_HISTORY)); - statistics.add(new StatisticDescriptor(entry.getKey() + "LatencyMinimum", StatisticType.DURATION_HISTORY)); - statistics.add(new StatisticDescriptor(entry.getKey() + "LatencyMaximum", StatisticType.DURATION_HISTORY)); - statistics.add(new StatisticDescriptor(entry.getKey() + "LatencyAverage", StatisticType.AVERAGE_HISTORY)); - - capabilities.addAll(statistics); - } else if (registeredStatistic instanceof RegisteredRatioStatistic) { - capabilities.add(new StatisticDescriptor(entry.getKey() + "Ratio", StatisticType.RATIO_HISTORY)); - } else if (registeredStatistic instanceof RegisteredCounterStatistic) { - capabilities.add(new StatisticDescriptor(statisticName, StatisticType.COUNTER_HISTORY)); - } else if (registeredStatistic instanceof RegisteredSizeStatistic) { - capabilities.add(new StatisticDescriptor(statisticName, StatisticType.SIZE_HISTORY)); + switch (registeredStatistic.getType()) { + case COUNTER: + capabilities.add(new StatisticDescriptor(statisticName, StatisticType.COUNTER_HISTORY)); + break; + case RATIO: + capabilities.add(new StatisticDescriptor(entry.getKey() + "Ratio", StatisticType.RATIO_HISTORY)); + break; + case SIZE: + capabilities.add(new StatisticDescriptor(statisticName, StatisticType.SIZE_HISTORY)); + break; + case COMPOUND: + capabilities.add(new StatisticDescriptor(entry.getKey() + "Count", StatisticType.COUNTER_HISTORY)); + capabilities.add(new StatisticDescriptor(entry.getKey() + "Rate", StatisticType.RATE_HISTORY)); + capabilities.add(new StatisticDescriptor(entry.getKey() + "LatencyMinimum", StatisticType.DURATION_HISTORY)); + capabilities.add(new StatisticDescriptor(entry.getKey() + "LatencyMaximum", StatisticType.DURATION_HISTORY)); + capabilities.add(new StatisticDescriptor(entry.getKey() + "LatencyAverage", StatisticType.AVERAGE_HISTORY)); + break; + default: + throw new UnsupportedOperationException(registeredStatistic.getType().name()); } } } @@ -191,12 +163,4 @@ private Set queryStatisticsRegistry() { return capabilities; } - private static List> buildHistory(SampledStatistic sampledStatistic, long since) { - return sampledStatistic.history() - .stream() - .filter(timestamped -> timestamped.getTimestamp() >= since) - .map(timestamped -> new Sample<>(timestamped.getTimestamp(), timestamped.getSample())) - .collect(Collectors.toList()); - } - } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractStatisticsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractStatisticsManagementProvider.java index f168a88d0a..4242a6e355 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractStatisticsManagementProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractStatisticsManagementProvider.java @@ -29,7 +29,6 @@ import java.util.HashMap; import java.util.Map; -@Named("ServerStoreSettings") @RequiredContext({@Named("consumerId")}) abstract class AbstractStatisticsManagementProvider extends AliasBindingManagementProvider { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java index 1f401c59d6..9929a80377 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java @@ -203,7 +203,6 @@ public void sharedPoolsConfigured() { LOGGER.trace("sharedPoolsConfigured()"); ehcacheStateService.getSharedResourcePools() .entrySet() - .stream() .forEach(e -> managementRegistry.register(new PoolBinding(e.getKey(), e.getValue(), PoolBinding.AllocationType.SHARED))); managementRegistry.refresh(); managementRegistry.pushServerEntityNotification(PoolBinding.ALL_SHARED, "EHCACHE_RESOURCE_POOLS_CONFIGURED"); diff --git a/core/src/main/java/org/ehcache/core/statistics/StatisticMapper.java b/core/src/main/java/org/ehcache/core/statistics/StatisticMapper.java deleted file mode 100644 index eab43f5525..0000000000 --- a/core/src/main/java/org/ehcache/core/statistics/StatisticMapper.java +++ /dev/null @@ -1,166 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.core.statistics; - -import org.terracotta.statistics.OperationStatistic; -import org.terracotta.statistics.ValueStatistic; -import org.terracotta.statistics.observer.ChainedOperationObserver; - -import java.util.Collections; -import java.util.EnumMap; -import java.util.EnumSet; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; - -import static java.util.EnumSet.allOf; - -/** - * - */ -public class StatisticMapper, TARGET extends Enum> implements OperationStatistic { - - private final Class targetType; - private final Class sourceType; - private final OperationStatistic statistic; - private final Map> translation; - private final Map reverseTranslation; - private final ConcurrentMap, ChainedOperationObserver> derivedStats - = new ConcurrentHashMap, ChainedOperationObserver>(); - - public StatisticMapper(Map> translation, OperationStatistic statistic) { - Entry> first = translation.entrySet().iterator().next(); - - this.targetType = first.getKey().getDeclaringClass(); - this.sourceType = first.getValue().iterator().next().getDeclaringClass(); - this.statistic = statistic; - this.translation = translation; - Set unmappedTierOutcomes = allOf(targetType); - unmappedTierOutcomes.removeAll(translation.keySet()); - if (!unmappedTierOutcomes.isEmpty()) { - throw new IllegalArgumentException("Translation does not contain target outcomes " + unmappedTierOutcomes); - } - - this.reverseTranslation = reverse(translation); - Set unmappedStoreOutcomes = allOf(sourceType); - unmappedStoreOutcomes.removeAll(reverseTranslation.keySet()); - if (!unmappedStoreOutcomes.isEmpty()) { - throw new IllegalArgumentException("Translation does not contain source outcomes " + unmappedStoreOutcomes); - } - } - - private static , A extends Enum> Map reverse(Map> map) { - Map reverse = Collections.emptyMap(); - - for (Entry> e : map.entrySet()) { - for (B b : e.getValue()) { - if (reverse.isEmpty()) { - reverse = new EnumMap(b.getDeclaringClass()); - } - if (reverse.put(b, e.getKey()) != null) { - throw new IllegalArgumentException("Reverse statistic outcome mapping is ill-defined: " + map); - } - } - } - return reverse; - } - - @Override - public Class type() { - return targetType; - } - - @Override - public ValueStatistic statistic(TARGET result) { - return statistic.statistic(translation.get(result)); - } - - @Override - public ValueStatistic statistic(Set results) { - Set translated = EnumSet.noneOf(sourceType); - for (TARGET result : results) { - translated.addAll(translation.get(result)); - } - return statistic.statistic(translated); - } - - @Override - public long count(TARGET type) { - return statistic.sum(translation.get(type)); - } - - @Override - public long sum(Set types) { - Set translated = EnumSet.noneOf(sourceType); - for (TARGET type : types) { - translated.addAll(translation.get(type)); - } - return statistic.sum(translated); - } - - @Override - public long sum() { - return statistic.sum(); - } - - @Override - public void addDerivedStatistic(final ChainedOperationObserver derived) { - ChainedOperationObserver translator = new ChainedOperationObserver() { - @Override - public void begin(long time) { - derived.begin(time); - } - - @Override - public void end(long time, SOURCE result) { - derived.end(time, reverseTranslation.get(result)); - } - - @Override - public void end(long time, SOURCE result, long... parameters) { - derived.end(time, reverseTranslation.get(result), parameters); - } - }; - if (derivedStats.putIfAbsent(derived, translator) == null) { - statistic.addDerivedStatistic(translator); - } - } - - @Override - public void removeDerivedStatistic(ChainedOperationObserver derived) { - ChainedOperationObserver translator = derivedStats.remove(derived); - if (translator != null) { - statistic.removeDerivedStatistic(translator); - } - } - - @Override - public void begin() { - throw new UnsupportedOperationException(); - } - - @Override - public void end(TARGET result) { - throw new UnsupportedOperationException(); - } - - @Override - public void end(TARGET result, long... parameters) { - throw new UnsupportedOperationException(); - } -} diff --git a/core/src/main/java/org/ehcache/core/statistics/TierOperationOutcomes.java b/core/src/main/java/org/ehcache/core/statistics/TierOperationOutcomes.java new file mode 100644 index 0000000000..bae4fbe995 --- /dev/null +++ b/core/src/main/java/org/ehcache/core/statistics/TierOperationOutcomes.java @@ -0,0 +1,83 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.core.statistics; + +import java.util.EnumMap; +import java.util.Map; +import java.util.Set; + +import static java.util.Collections.unmodifiableMap; +import static java.util.EnumSet.of; + +public class TierOperationOutcomes { + + public static final Map> GET_TRANSLATION; + + static { + Map> translation = new EnumMap>(GetOutcome.class); + translation.put(GetOutcome.HIT, of(StoreOperationOutcomes.GetOutcome.HIT)); + translation.put(GetOutcome.MISS, of(StoreOperationOutcomes.GetOutcome.MISS, StoreOperationOutcomes.GetOutcome.TIMEOUT)); + GET_TRANSLATION = unmodifiableMap(translation); + } + + public static final Map> GET_AND_FAULT_TRANSLATION; + + static { + Map> translation = new EnumMap>(GetOutcome.class); + translation.put(GetOutcome.HIT, of(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.HIT)); + translation.put(GetOutcome.MISS, of(AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.MISS, AuthoritativeTierOperationOutcomes.GetAndFaultOutcome.TIMEOUT)); + GET_AND_FAULT_TRANSLATION = unmodifiableMap(translation); + } + + public static final Map> GET_AND_REMOVE_TRANSLATION; + + static { + Map> translation = new EnumMap>(GetOutcome.class); + translation.put(GetOutcome.HIT, of(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.HIT_REMOVED)); + translation.put(GetOutcome.MISS, of(LowerCachingTierOperationsOutcome.GetAndRemoveOutcome.MISS)); + GET_AND_REMOVE_TRANSLATION = unmodifiableMap(translation); + } + + public static final Map> GET_OR_COMPUTEIFABSENT_TRANSLATION; + + static { + Map> translation = new EnumMap>(GetOutcome.class); + translation.put(GetOutcome.HIT, of(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.HIT)); + translation.put(GetOutcome.MISS, of(CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULTED, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED, + CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.FAULT_FAILED_MISS, CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome.MISS)); + GET_OR_COMPUTEIFABSENT_TRANSLATION = unmodifiableMap(translation); + } + + public static final Map> EVICTION_TRANSLATION; + + static { + Map> translation = new EnumMap>(EvictionOutcome.class); + translation.put(EvictionOutcome.SUCCESS, of(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); + translation.put(EvictionOutcome.FAILURE, of(StoreOperationOutcomes.EvictionOutcome.FAILURE)); + EVICTION_TRANSLATION = unmodifiableMap(translation); + } + + public enum GetOutcome { + HIT, + MISS, + } + + public enum EvictionOutcome { + SUCCESS, + FAILURE + } + +} diff --git a/core/src/main/java/org/ehcache/core/statistics/TierOperationStatistic.java b/core/src/main/java/org/ehcache/core/statistics/TierOperationStatistic.java deleted file mode 100644 index 19d1d5c807..0000000000 --- a/core/src/main/java/org/ehcache/core/statistics/TierOperationStatistic.java +++ /dev/null @@ -1,209 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.core.statistics; - -import org.ehcache.core.statistics.AuthoritativeTierOperationOutcomes.GetAndFaultOutcome; -import org.ehcache.core.statistics.CachingTierOperationOutcomes.GetOrComputeIfAbsentOutcome; -import org.ehcache.core.statistics.LowerCachingTierOperationsOutcome.GetAndRemoveOutcome; -import org.terracotta.context.ContextManager; -import org.terracotta.context.TreeNode; -import org.terracotta.context.annotations.ContextAttribute; -import org.terracotta.context.query.Matcher; -import org.terracotta.context.query.Matchers; -import org.terracotta.context.query.Query; -import org.terracotta.statistics.OperationStatistic; -import org.terracotta.statistics.ValueStatistic; -import org.terracotta.statistics.observer.ChainedOperationObserver; - -import java.util.Collections; -import java.util.EnumMap; -import java.util.HashMap; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; - -import static java.util.Collections.unmodifiableMap; -import static java.util.EnumSet.of; -import static org.terracotta.context.query.Matchers.allOf; -import static org.terracotta.context.query.Matchers.attributes; -import static org.terracotta.context.query.Matchers.context; -import static org.terracotta.context.query.Matchers.identifier; -import static org.terracotta.context.query.Matchers.subclassOf; -import static org.terracotta.context.query.QueryBuilder.queryBuilder; -import static org.terracotta.context.query.Matchers.hasAttribute; - -@ContextAttribute("this") -public class TierOperationStatistic, D extends Enum> implements OperationStatistic { - - @ContextAttribute("name") public final String name; - @ContextAttribute("tags") public final Set tags; - @ContextAttribute("properties") public final Map properties; - @ContextAttribute("type") public final Class tierOutcomeType; - - private final StatisticMapper mapper; - - public TierOperationStatistic(Object tier, Map> translation, String statisticName, int tierHeight, String targetName, String discriminator) { - - this.name = statisticName; - this.tags = Collections.singleton("tier"); - this.properties = new HashMap(); - this.properties.put("tierHeight", tierHeight); - this.properties.put("discriminator", discriminator); - - Entry> first = translation.entrySet().iterator().next(); - Class storeOutcomeType = first.getValue().iterator().next().getDeclaringClass(); - this.tierOutcomeType = first.getKey().getDeclaringClass(); - - this.mapper = new StatisticMapper(translation, findOperationStat(tier, storeOutcomeType, targetName)); - } - - @Override - public Class type() { - return tierOutcomeType; - } - - @Override - public ValueStatistic statistic(D result) { - return mapper.statistic(result); - } - - @Override - public ValueStatistic statistic(Set results) { - return mapper.statistic(results); - } - - @Override - public long count(D type) { - return mapper.count(type); - } - - @Override - public long sum(Set types) { - return mapper.sum(types); - } - - @Override - public long sum() { - return mapper.sum(); - } - - @Override - public void addDerivedStatistic(final ChainedOperationObserver derived) { - mapper.addDerivedStatistic(derived); - } - - @Override - public void removeDerivedStatistic(ChainedOperationObserver derived) { - mapper.removeDerivedStatistic(derived); - } - - @Override - public void begin() { - mapper.begin(); - } - - @Override - public void end(D result) { - mapper.end(result); - } - - @Override - public void end(D result, long... parameters) { - mapper.end(result, parameters); - } - - @SuppressWarnings("unchecked") - private static > OperationStatistic findOperationStat(Object rootNode, final Class statisticType, final String statName) { - Query q = queryBuilder().descendants() - .filter(context(identifier(subclassOf(OperationStatistic.class)))) - .filter(context(attributes(Matchers.>allOf( - hasAttribute("name", statName), - hasAttribute("this", new Matcher() { - @Override - protected boolean matchesSafely(OperationStatistic object) { - return object.type().equals(statisticType); - } - }) - )))).build(); - - - Set result = q.execute(Collections.singleton(ContextManager.nodeFor(rootNode))); - - if (result.size() != 1) { - throw new RuntimeException("a single stat was expected; found " + result.size()); - } - - TreeNode node = result.iterator().next(); - return (OperationStatistic) node.getContext().attributes().get("this"); - } - - public static class TierOperationOutcomes { - - public static final Map> GET_TRANSLATION; - static { - Map> translation = new EnumMap>(GetOutcome.class); - translation.put(GetOutcome.HIT, of(StoreOperationOutcomes.GetOutcome.HIT)); - translation.put(GetOutcome.MISS, of(StoreOperationOutcomes.GetOutcome.MISS, StoreOperationOutcomes.GetOutcome.TIMEOUT)); - GET_TRANSLATION = unmodifiableMap(translation); - } - - public static final Map> GET_AND_FAULT_TRANSLATION; - - static { - Map> translation = new EnumMap>(GetOutcome.class); - translation.put(GetOutcome.HIT, of(GetAndFaultOutcome.HIT)); - translation.put(GetOutcome.MISS, of(GetAndFaultOutcome.MISS, GetAndFaultOutcome.TIMEOUT)); - GET_AND_FAULT_TRANSLATION = unmodifiableMap(translation); - } - - public static final Map> GET_AND_REMOVE_TRANSLATION; - static { - Map> translation = new EnumMap>(GetOutcome.class); - translation.put(GetOutcome.HIT, of(GetAndRemoveOutcome.HIT_REMOVED)); - translation.put(GetOutcome.MISS, of(GetAndRemoveOutcome.MISS)); - GET_AND_REMOVE_TRANSLATION = unmodifiableMap(translation); - } - - public static final Map> GET_OR_COMPUTEIFABSENT_TRANSLATION; - static { - Map> translation = new EnumMap>(GetOutcome.class); - translation.put(GetOutcome.HIT, of(GetOrComputeIfAbsentOutcome.HIT)); - translation.put(GetOutcome.MISS, of(GetOrComputeIfAbsentOutcome.FAULTED, GetOrComputeIfAbsentOutcome.FAULT_FAILED, - GetOrComputeIfAbsentOutcome.FAULT_FAILED_MISS, GetOrComputeIfAbsentOutcome.MISS)); - GET_OR_COMPUTEIFABSENT_TRANSLATION = unmodifiableMap(translation); - } - - public static final Map> EVICTION_TRANSLATION; - static { - Map> translation = new EnumMap>(EvictionOutcome.class); - translation.put(EvictionOutcome.SUCCESS, of(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); - translation.put(EvictionOutcome.FAILURE, of(StoreOperationOutcomes.EvictionOutcome.FAILURE)); - EVICTION_TRANSLATION = unmodifiableMap(translation); - }; - - public enum GetOutcome { - HIT, - MISS, - } - - public enum EvictionOutcome { - SUCCESS, - FAILURE - } - - } - -} diff --git a/core/src/test/java/org/ehcache/core/statistics/StatisticMapperTest.java b/core/src/test/java/org/ehcache/core/statistics/StatisticMapperTest.java deleted file mode 100644 index 485080cac5..0000000000 --- a/core/src/test/java/org/ehcache/core/statistics/StatisticMapperTest.java +++ /dev/null @@ -1,256 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.core.statistics; - -import org.junit.Test; -import org.mockito.ArgumentCaptor; -import org.terracotta.statistics.OperationStatistic; -import org.terracotta.statistics.observer.ChainedOperationObserver; - -import java.util.Collections; -import java.util.EnumMap; -import java.util.Map; -import java.util.Set; - -import static java.util.EnumSet.of; -import static org.ehcache.core.statistics.StatisticMapperTest.Source.C; -import static org.ehcache.core.statistics.StatisticMapperTest.Source.D; -import static org.ehcache.core.statistics.StatisticMapperTest.Source.E; -import static org.ehcache.core.statistics.StatisticMapperTest.Target.A; -import static org.ehcache.core.statistics.StatisticMapperTest.Target.B; -import static org.hamcrest.core.IsEqual.equalTo; -import static org.hamcrest.core.StringContains.containsString; -import static org.junit.Assert.assertThat; -import static org.mockito.Matchers.any; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; - -public class StatisticMapperTest { - - @Test - public void testInvalidSourceStatisticSet() { - try { - new StatisticMapper(Collections.>singletonMap(A, of(C, D)), null); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("target outcomes [B]")); - } - } - - @Test - public void testInvalidTargetStatisticSet() { - try { - Map> translation = new EnumMap>(Target.class); - translation.put(A, of(C)); - translation.put(B, of(D)); - new StatisticMapper(translation, null); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("source outcomes [E]")); - } - } - - @Test - public void testIllDefinedTranslation() { - try { - Map> translation = new EnumMap>(Target.class); - translation.put(A, of(C, D)); - translation.put(B, of(D, E)); - new StatisticMapper(translation, null); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("mapping is ill-defined")); - } - } - - @Test - public void testTargetTypeExtraction() { - Map> translation = new EnumMap>(Target.class); - translation.put(A, of(C)); - translation.put(B, of(D, E)); - StatisticMapper mapper = new StatisticMapper(translation, null); - - assertThat(mapper.type(), equalTo(Target.class)); - } - - @Test - public void testStatisticTranslation() { - OperationStatistic statistic = mock(OperationStatistic.class); - Map> translation = new EnumMap>(Target.class); - translation.put(A, of(C)); - translation.put(B, of(D, E)); - StatisticMapper mapper = new StatisticMapper(translation, statistic); - - mapper.statistic(B); - verify(statistic).statistic(of(D, E)); - - mapper.statistic(A); - verify(statistic).statistic(of(C)); - } - - @Test - public void testStatisticSetTranslation() { - OperationStatistic statistic = mock(OperationStatistic.class); - Map> translation = new EnumMap>(Target.class); - translation.put(A, of(C)); - translation.put(B, of(D, E)); - StatisticMapper mapper = new StatisticMapper(translation, statistic); - - mapper.statistic(of(A, B)); - verify(statistic).statistic(of(C, D, E)); - } - - @Test - public void testCountTranslation() { - OperationStatistic statistic = mock(OperationStatistic.class); - Map> translation = new EnumMap>(Target.class); - translation.put(A, of(C)); - translation.put(B, of(D, E)); - StatisticMapper mapper = new StatisticMapper(translation, statistic); - - mapper.count(B); - verify(statistic).sum(of(D, E)); - - mapper.count(A); - verify(statistic).sum(of(C)); - } - - @Test - public void testSumTranslation() { - OperationStatistic statistic = mock(OperationStatistic.class); - Map> translation = new EnumMap>(Target.class); - translation.put(A, of(C)); - translation.put(B, of(D, E)); - StatisticMapper mapper = new StatisticMapper(translation, statistic); - - mapper.sum(of(A, B)); - verify(statistic).sum(of(C, D, E)); - } - - @Test - public void testFullSum() { - OperationStatistic statistic = mock(OperationStatistic.class); - Map> translation = new EnumMap>(Target.class); - translation.put(A, of(C)); - translation.put(B, of(D, E)); - StatisticMapper mapper = new StatisticMapper(translation, statistic); - - mapper.sum(); - - verify(statistic).sum(); - } - - @Test - public void testDerivedStatisticBeginDelegation() { - ArgumentCaptor wrapperCapture = ArgumentCaptor.forClass(ChainedOperationObserver.class); - - OperationStatistic statistic = mock(OperationStatistic.class); - doNothing().when(statistic).addDerivedStatistic(wrapperCapture.capture()); - - Map> translation = new EnumMap>(Target.class); - translation.put(A, of(C)); - translation.put(B, of(D, E)); - StatisticMapper mapper = new StatisticMapper(translation, statistic); - - ChainedOperationObserver derived = mock(ChainedOperationObserver.class); - - mapper.addDerivedStatistic(derived); - - ChainedOperationObserver wrapper = wrapperCapture.getValue(); - - wrapper.begin(42L); - verify(derived).begin(42L); - } - - @Test - public void testDerivedStatisticEndDelegation() { - ArgumentCaptor wrapperCapture = ArgumentCaptor.forClass(ChainedOperationObserver.class); - - OperationStatistic statistic = mock(OperationStatistic.class); - doNothing().when(statistic).addDerivedStatistic(wrapperCapture.capture()); - - Map> translation = new EnumMap>(Target.class); - translation.put(A, of(C)); - translation.put(B, of(D, E)); - StatisticMapper mapper = new StatisticMapper(translation, statistic); - - ChainedOperationObserver derived = mock(ChainedOperationObserver.class); - - mapper.addDerivedStatistic(derived); - - ChainedOperationObserver wrapper = wrapperCapture.getValue(); - - wrapper.end(43L, E); - verify(derived).end(43L, B); - - wrapper.end(44L, C); - verify(derived).end(44L, A); - - wrapper.end(45L, D); - verify(derived).end(45L, B); - } - - @Test - public void testDerivedStatisticEndWithParametersDelegation() { - ArgumentCaptor wrapperCapture = ArgumentCaptor.forClass(ChainedOperationObserver.class); - - OperationStatistic statistic = mock(OperationStatistic.class); - doNothing().when(statistic).addDerivedStatistic(wrapperCapture.capture()); - - Map> translation = new EnumMap>(Target.class); - translation.put(A, of(C)); - translation.put(B, of(D, E)); - StatisticMapper mapper = new StatisticMapper(translation, statistic); - - ChainedOperationObserver derived = mock(ChainedOperationObserver.class); - - mapper.addDerivedStatistic(derived); - - ChainedOperationObserver wrapper = wrapperCapture.getValue(); - - wrapper.end(43L, E, 1L, 2L); - verify(derived).end(43L, B, 1L, 2L); - - wrapper.end(44L, C, 2L, 1L); - verify(derived).end(44L, A, 2L, 1L); - - wrapper.end(45L, D, 3L, 4L); - verify(derived).end(45L, B, 3L, 4L); - } - - @Test - public void testDerivedStatisticRemovalDelegation() { - OperationStatistic statistic = mock(OperationStatistic.class); - - Map> translation = new EnumMap>(Target.class); - translation.put(A, of(C)); - translation.put(B, of(D, E)); - StatisticMapper mapper = new StatisticMapper(translation, statistic); - - ChainedOperationObserver derived = mock(ChainedOperationObserver.class); - - mapper.addDerivedStatistic(derived); - mapper.removeDerivedStatistic(derived); - - verify(statistic).removeDerivedStatistic(any(ChainedOperationObserver.class)); - } - - enum Target { - A, B - } - - enum Source { - C, D, E - } -} diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java index 50d184620f..4e8cbc7c26 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java @@ -52,8 +52,7 @@ import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.core.internal.util.ConcurrentWeakIdentityHashMap; -import org.ehcache.core.statistics.TierOperationStatistic; -import org.ehcache.core.statistics.TierOperationStatistic.TierOperationOutcomes; +import org.ehcache.core.statistics.TierOperationOutcomes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terracotta.offheapstore.disk.paging.MappedPageSource; @@ -62,6 +61,7 @@ import org.terracotta.offheapstore.disk.storage.FileBackedStorageEngine; import org.terracotta.offheapstore.storage.portability.Portability; import org.terracotta.offheapstore.util.Factory; +import org.terracotta.statistics.MappedOperationStatistic; import org.terracotta.statistics.StatisticsManager; import java.io.File; @@ -304,7 +304,7 @@ private File getMetadataFile() { @ServiceDependencies({TimeSourceService.class, SerializationProvider.class, ExecutionService.class, DiskResourceService.class}) public static class Provider implements Store.Provider, AuthoritativeTier.Provider { - private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap, Collection>>(); + private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap, Collection>>(); private final Map, PersistenceSpaceIdentifier> createdStores = new ConcurrentWeakIdentityHashMap, PersistenceSpaceIdentifier>(); private final String defaultThreadPool; private volatile ServiceProvider serviceProvider; @@ -331,16 +331,16 @@ public int rankAuthority(ResourceType authorityResource, Collection OffHeapDiskStore createStore(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { OffHeapDiskStore store = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); - Collection> tieredOps = new ArrayList>(); + Collection> tieredOps = new ArrayList>(); - TierOperationStatistic get = - new TierOperationStatistic( + MappedOperationStatistic get = + new MappedOperationStatistic( store, TierOperationOutcomes.GET_TRANSLATION, "get", ResourceType.Core.DISK.getTierHeight(), "get", STATISTICS_TAG); StatisticsManager.associate(get).withParent(store); tieredOps.add(get); - TierOperationStatistic evict = - new TierOperationStatistic( + MappedOperationStatistic evict = + new MappedOperationStatistic( store, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.DISK.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(store); tieredOps.add(evict); @@ -475,16 +475,16 @@ public void stop() { @Override public AuthoritativeTier createAuthoritativeTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { OffHeapDiskStore authoritativeTier = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); - Collection> tieredOps = new ArrayList>(); + Collection> tieredOps = new ArrayList>(); - TierOperationStatistic get = - new TierOperationStatistic( + MappedOperationStatistic get = + new MappedOperationStatistic( authoritativeTier, TierOperationOutcomes.GET_AND_FAULT_TRANSLATION, "get", ResourceType.Core.DISK.getTierHeight(), "getAndFault", STATISTICS_TAG); StatisticsManager.associate(get).withParent(authoritativeTier); tieredOps.add(get); - TierOperationStatistic evict = - new TierOperationStatistic( + MappedOperationStatistic evict = + new MappedOperationStatistic( authoritativeTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.DISK.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(authoritativeTier); tieredOps.add(evict); diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java index ee7b5bd90d..be3695d304 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/heap/OnHeapStore.java @@ -66,11 +66,11 @@ import org.ehcache.core.statistics.HigherCachingTierOperationOutcomes; import org.ehcache.core.statistics.StoreOperationOutcomes; import org.ehcache.core.internal.util.ConcurrentWeakIdentityHashMap; -import org.ehcache.core.statistics.TierOperationStatistic; -import org.ehcache.core.statistics.TierOperationStatistic.TierOperationOutcomes; +import org.ehcache.core.statistics.TierOperationOutcomes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terracotta.offheapstore.util.FindbugsSuppressWarnings; +import org.terracotta.statistics.MappedOperationStatistic; import org.terracotta.statistics.StatisticsManager; import org.terracotta.statistics.observer.OperationObserver; @@ -1665,7 +1665,7 @@ public static class Provider implements Store.Provider, CachingTier.Provider, Hi private volatile ServiceProvider serviceProvider; private final Map, List> createdStores = new ConcurrentWeakIdentityHashMap, List>(); - private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap, Collection>>(); + private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap, Collection>>(); @Override public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { @@ -1680,16 +1680,16 @@ public int rankCachingTier(Set> resourceTypes, Collection OnHeapStore createStore(final Configuration storeConfig, final ServiceConfiguration... serviceConfigs) { OnHeapStore store = createStoreInternal(storeConfig, new ScopedStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); - Collection> tieredOps = new ArrayList>(); + Collection> tieredOps = new ArrayList>(); - TierOperationStatistic get = - new TierOperationStatistic( + MappedOperationStatistic get = + new MappedOperationStatistic( store, TierOperationOutcomes.GET_TRANSLATION, "get", ResourceType.Core.HEAP.getTierHeight(), "get", STATISTICS_TAG); StatisticsManager.associate(get).withParent(store); tieredOps.add(get); - TierOperationStatistic evict = - new TierOperationStatistic( + MappedOperationStatistic evict = + new MappedOperationStatistic( store, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.HEAP.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(store); tieredOps.add(evict); @@ -1777,16 +1777,16 @@ public void stop() { @Override public CachingTier createCachingTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { OnHeapStore cachingTier = createStoreInternal(storeConfig, NullStoreEventDispatcher.nullStoreEventDispatcher(), serviceConfigs); - Collection> tieredOps = new ArrayList>(); + Collection> tieredOps = new ArrayList>(); - TierOperationStatistic get = - new TierOperationStatistic( + MappedOperationStatistic get = + new MappedOperationStatistic( cachingTier, TierOperationOutcomes.GET_OR_COMPUTEIFABSENT_TRANSLATION, "get", ResourceType.Core.HEAP.getTierHeight(), "getOrComputeIfAbsent", STATISTICS_TAG); StatisticsManager.associate(get).withParent(cachingTier); tieredOps.add(get); - TierOperationStatistic evict - = new TierOperationStatistic( + MappedOperationStatistic evict + = new MappedOperationStatistic( cachingTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.HEAP.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(cachingTier); tieredOps.add(evict); @@ -1814,16 +1814,16 @@ public void initCachingTier(CachingTier resource) { @Override public HigherCachingTier createHigherCachingTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { OnHeapStore higherCachingTier = createStoreInternal(storeConfig, new ScopedStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); - Collection> tieredOps = new ArrayList>(); + Collection> tieredOps = new ArrayList>(); - TierOperationStatistic get = - new TierOperationStatistic( + MappedOperationStatistic get = + new MappedOperationStatistic( higherCachingTier, TierOperationOutcomes.GET_OR_COMPUTEIFABSENT_TRANSLATION, "get", ResourceType.Core.HEAP.getTierHeight(), "getOrComputeIfAbsent", STATISTICS_TAG); StatisticsManager.associate(get).withParent(higherCachingTier); tieredOps.add(get); - TierOperationStatistic evict = - new TierOperationStatistic( + MappedOperationStatistic evict = + new MappedOperationStatistic( higherCachingTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.HEAP.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(higherCachingTier); tieredOps.add(evict); diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java index ed55b18381..d017fb47a2 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/OffHeapStore.java @@ -45,8 +45,7 @@ import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.core.internal.util.ConcurrentWeakIdentityHashMap; -import org.ehcache.core.statistics.TierOperationStatistic; -import org.ehcache.core.statistics.TierOperationStatistic.TierOperationOutcomes; +import org.ehcache.core.statistics.TierOperationOutcomes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terracotta.offheapstore.paging.PageSource; @@ -56,6 +55,7 @@ import org.terracotta.offheapstore.storage.PointerSize; import org.terracotta.offheapstore.storage.portability.Portability; import org.terracotta.offheapstore.util.Factory; +import org.terracotta.statistics.MappedOperationStatistic; import org.terracotta.statistics.StatisticsManager; import java.util.ArrayList; @@ -135,7 +135,7 @@ public static class Provider implements Store.Provider, AuthoritativeTier.Provid private volatile ServiceProvider serviceProvider; private final Set> createdStores = Collections.newSetFromMap(new ConcurrentWeakIdentityHashMap, Boolean>()); - private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap, Collection>>(); + private final Map, Collection>> tierOperationStatistics = new ConcurrentWeakIdentityHashMap, Collection>>(); @Override public int rank(final Set> resourceTypes, final Collection> serviceConfigs) { @@ -150,16 +150,16 @@ public int rankAuthority(ResourceType authorityResource, Collection OffHeapStore createStore(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { OffHeapStore store = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); - Collection> tieredOps = new ArrayList>(); + Collection> tieredOps = new ArrayList>(); - TierOperationStatistic get = - new TierOperationStatistic( + MappedOperationStatistic get = + new MappedOperationStatistic( store, TierOperationOutcomes.GET_TRANSLATION, "get", ResourceType.Core.OFFHEAP.getTierHeight(), "get", STATISTICS_TAG); StatisticsManager.associate(get).withParent(store); tieredOps.add(get); - TierOperationStatistic evict = - new TierOperationStatistic( + MappedOperationStatistic evict = + new MappedOperationStatistic( store, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.OFFHEAP.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(store); tieredOps.add(evict); @@ -242,16 +242,16 @@ public void stop() { @Override public AuthoritativeTier createAuthoritativeTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { OffHeapStore authoritativeTier = createStoreInternal(storeConfig, new ThreadLocalStoreEventDispatcher(storeConfig.getDispatcherConcurrency()), serviceConfigs); - Collection> tieredOps = new ArrayList>(); + Collection> tieredOps = new ArrayList>(); - TierOperationStatistic get = - new TierOperationStatistic( + MappedOperationStatistic get = + new MappedOperationStatistic( authoritativeTier, TierOperationOutcomes.GET_AND_FAULT_TRANSLATION, "get", ResourceType.Core.OFFHEAP.getTierHeight(), "getAndFault", STATISTICS_TAG); StatisticsManager.associate(get).withParent(authoritativeTier); tieredOps.add(get); - TierOperationStatistic evict - = new TierOperationStatistic( + MappedOperationStatistic evict + = new MappedOperationStatistic( authoritativeTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.OFFHEAP.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(authoritativeTier); tieredOps.add(evict); @@ -273,16 +273,16 @@ public void initAuthoritativeTier(AuthoritativeTier resource) { @Override public LowerCachingTier createCachingTier(Configuration storeConfig, ServiceConfiguration... serviceConfigs) { OffHeapStore lowerCachingTier = createStoreInternal(storeConfig, NullStoreEventDispatcher.nullStoreEventDispatcher(), serviceConfigs); - Collection> tieredOps = new ArrayList>(); + Collection> tieredOps = new ArrayList>(); - TierOperationStatistic get - = new TierOperationStatistic( + MappedOperationStatistic get + = new MappedOperationStatistic( lowerCachingTier, TierOperationOutcomes.GET_AND_REMOVE_TRANSLATION, "get", ResourceType.Core.OFFHEAP.getTierHeight(), "getAndRemove", STATISTICS_TAG); StatisticsManager.associate(get).withParent(lowerCachingTier); tieredOps.add(get); - TierOperationStatistic evict = - new TierOperationStatistic( + MappedOperationStatistic evict = + new MappedOperationStatistic( lowerCachingTier, TierOperationOutcomes.EVICTION_TRANSLATION, "eviction", ResourceType.Core.OFFHEAP.getTierHeight(), "eviction", STATISTICS_TAG); StatisticsManager.associate(evict).withParent(lowerCachingTier); tieredOps.add(evict); diff --git a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java index 494a90c215..255cc69463 100644 --- a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java +++ b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java @@ -16,23 +16,21 @@ package org.ehcache.management.providers.statistics; import org.ehcache.core.statistics.CacheOperationOutcomes; -import org.ehcache.core.statistics.TierOperationStatistic; +import org.ehcache.core.statistics.TierOperationOutcomes; import org.ehcache.management.ManagementRegistryServiceConfiguration; import org.ehcache.management.config.StatisticsProviderConfiguration; import org.ehcache.management.providers.CacheBinding; import org.ehcache.management.providers.ExposedCacheBinding; import org.terracotta.context.extended.OperationStatisticDescriptor; -import org.terracotta.context.extended.RegisteredCompoundStatistic; -import org.terracotta.context.extended.RegisteredCounterStatistic; -import org.terracotta.context.extended.RegisteredRatioStatistic; -import org.terracotta.context.extended.RegisteredSizeStatistic; import org.terracotta.context.extended.RegisteredStatistic; import org.terracotta.context.extended.StatisticsRegistry; import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; import org.terracotta.management.model.stats.MemoryUnit; import org.terracotta.management.model.stats.NumberUnit; import org.terracotta.management.model.stats.Sample; import org.terracotta.management.model.stats.Statistic; +import org.terracotta.management.model.stats.StatisticType; import org.terracotta.management.model.stats.history.AverageHistory; import org.terracotta.management.model.stats.history.CounterHistory; import org.terracotta.management.model.stats.history.DurationHistory; @@ -40,21 +38,21 @@ import org.terracotta.management.model.stats.history.RatioHistory; import org.terracotta.management.model.stats.history.SizeHistory; import org.terracotta.statistics.archive.Timestamped; -import org.terracotta.statistics.extended.CompoundOperation; +import org.terracotta.statistics.extended.SampleType; import org.terracotta.statistics.extended.SampledStatistic; import java.util.ArrayList; import java.util.Collection; import java.util.EnumSet; +import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; -import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; -import org.terracotta.management.model.stats.StatisticType; import static java.util.Collections.singleton; import static java.util.EnumSet.allOf; @@ -63,6 +61,16 @@ class StandardEhcacheStatistics extends ExposedCacheBinding { + private static final Map COMPOUND_SUFFIXES = new HashMap(); + + static { + COMPOUND_SUFFIXES.put("Count", SampleType.COUNTER); + COMPOUND_SUFFIXES.put("Rate", SampleType.RATE); + COMPOUND_SUFFIXES.put("LatencyMinimum", SampleType.LATENCY_MIN); + COMPOUND_SUFFIXES.put("LatencyMaximum", SampleType.LATENCY_MAX); + COMPOUND_SUFFIXES.put("LatencyAverage", SampleType.LATENCY_AVG); + } + private final StatisticsRegistry statisticsRegistry; StandardEhcacheStatistics(ManagementRegistryServiceConfiguration registryConfiguration, CacheBinding cacheBinding, StatisticsProviderConfiguration statisticsProviderConfiguration, ScheduledExecutorService executor) { @@ -73,7 +81,7 @@ class StandardEhcacheStatistics extends ExposedCacheBinding { EnumSet hit = of(CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER, CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER); EnumSet miss = of(CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER, CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER); - OperationStatisticDescriptor getCacheStatisticDescriptor = OperationStatisticDescriptor.descriptor("get", singleton("cache"), CacheOperationOutcomes.GetOutcome.class); + OperationStatisticDescriptor getCacheStatisticDescriptor = OperationStatisticDescriptor.descriptor("get", singleton("cache"), CacheOperationOutcomes.GetOutcome.class); statisticsRegistry.registerCompoundOperations("Cache:Hit", getCacheStatisticDescriptor, hit); statisticsRegistry.registerCompoundOperations("Cache:Miss", getCacheStatisticDescriptor, miss); @@ -81,137 +89,92 @@ class StandardEhcacheStatistics extends ExposedCacheBinding { statisticsRegistry.registerRatios("Cache:HitRatio", getCacheStatisticDescriptor, hit, allOf(CacheOperationOutcomes.GetOutcome.class)); statisticsRegistry.registerRatios("Cache:MissRatio", getCacheStatisticDescriptor, miss, allOf(CacheOperationOutcomes.GetOutcome.class)); - Class tierOperationGetOucomeClass = TierOperationStatistic.TierOperationOutcomes.GetOutcome.class; - OperationStatisticDescriptor getTierStatisticDescriptor = OperationStatisticDescriptor.descriptor("get", singleton("tier"), tierOperationGetOucomeClass); + Class tierOperationGetOucomeClass = TierOperationOutcomes.GetOutcome.class; + OperationStatisticDescriptor getTierStatisticDescriptor = OperationStatisticDescriptor.descriptor("get", singleton("tier"), tierOperationGetOucomeClass); - statisticsRegistry.registerCompoundOperations("Hit", getTierStatisticDescriptor, of(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT)); - statisticsRegistry.registerCompoundOperations("Miss", getTierStatisticDescriptor, of(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS)); + statisticsRegistry.registerCompoundOperations("Hit", getTierStatisticDescriptor, of(TierOperationOutcomes.GetOutcome.HIT)); + statisticsRegistry.registerCompoundOperations("Miss", getTierStatisticDescriptor, of(TierOperationOutcomes.GetOutcome.MISS)); statisticsRegistry.registerCompoundOperations("Eviction", OperationStatisticDescriptor.descriptor("eviction", singleton("tier"), - TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class), - allOf(TierOperationStatistic.TierOperationOutcomes.EvictionOutcome.class)); - statisticsRegistry.registerRatios("HitRatio", getTierStatisticDescriptor, of(TierOperationStatistic.TierOperationOutcomes.GetOutcome.HIT), allOf(tierOperationGetOucomeClass)); - statisticsRegistry.registerRatios("MissRatio", getTierStatisticDescriptor, of(TierOperationStatistic.TierOperationOutcomes.GetOutcome.MISS), allOf(tierOperationGetOucomeClass)); + TierOperationOutcomes.EvictionOutcome.class), + allOf(TierOperationOutcomes.EvictionOutcome.class)); + statisticsRegistry.registerRatios("HitRatio", getTierStatisticDescriptor, of(TierOperationOutcomes.GetOutcome.HIT), allOf(tierOperationGetOucomeClass)); + statisticsRegistry.registerRatios("MissRatio", getTierStatisticDescriptor, of(TierOperationOutcomes.GetOutcome.MISS), allOf(tierOperationGetOucomeClass)); statisticsRegistry.registerCounter("MappingCount", descriptor("mappings", singleton("tier"))); statisticsRegistry.registerCounter("MaxMappingCount", descriptor("maxMappings", singleton("tier"))); statisticsRegistry.registerSize("AllocatedByteSize", descriptor("allocatedMemory", singleton("tier"))); statisticsRegistry.registerSize("OccupiedByteSize", descriptor("occupiedMemory", singleton("tier"))); - - Map registrations = statisticsRegistry.getRegistrations(); - for (RegisteredStatistic registeredStatistic : registrations.values()) { - registeredStatistic.getSupport().setAlwaysOn(true); - } } @SuppressWarnings("unchecked") - public Statistic queryStatistic(String statisticName, long since) { - Map registrations = statisticsRegistry.getRegistrations(); - for (Map.Entry entry : registrations.entrySet()) { - String name = entry.getKey(); - RegisteredStatistic registeredStatistic = entry.getValue(); - - if (registeredStatistic instanceof RegisteredCompoundStatistic) { - RegisteredCompoundStatistic registeredCompoundStatistic = (RegisteredCompoundStatistic) registeredStatistic; - CompoundOperation compoundOperation = registeredCompoundStatistic.getCompoundOperation(); - - if ((name + "Count").equals(statisticName)) { - SampledStatistic count = compoundOperation.compound((Set) registeredCompoundStatistic.getCompound()).count(); - return new CounterHistory(buildHistory(count, since), NumberUnit.COUNT); - } else if ((name + "Rate").equals(statisticName)) { - SampledStatistic rate = compoundOperation.compound((Set) registeredCompoundStatistic.getCompound()).rate(); - return new RateHistory(buildHistory(rate, since), TimeUnit.SECONDS); - - } else if ((name + "LatencyMinimum").equals(statisticName)) { - SampledStatistic minimum = compoundOperation.compound((Set) registeredCompoundStatistic.getCompound()).latency().minimum(); - return new DurationHistory(buildHistory(minimum, since), TimeUnit.NANOSECONDS); - - } else if ((name + "LatencyMaximum").equals(statisticName)) { - SampledStatistic maximum = compoundOperation.compound((Set) registeredCompoundStatistic.getCompound()).latency().maximum(); - return new DurationHistory(buildHistory(maximum, since), TimeUnit.NANOSECONDS); - - } else if ((name + "LatencyAverage").equals(statisticName)) { - SampledStatistic average = compoundOperation.compound((Set) registeredCompoundStatistic.getCompound()).latency().average(); - return new AverageHistory(buildHistory(average, since), TimeUnit.NANOSECONDS); - } - } else if (registeredStatistic instanceof RegisteredRatioStatistic) { - RegisteredRatioStatistic registeredRatioStatistic = (RegisteredRatioStatistic) registeredStatistic; - CompoundOperation compoundOperation = registeredRatioStatistic.getCompoundOperation(); - - if (name.equals(statisticName)) { - SampledStatistic ratio = (SampledStatistic) compoundOperation.ratioOf((Set) registeredRatioStatistic.getNumerator(), (Set) registeredRatioStatistic.getDenominator()); - return new RatioHistory(buildHistory(ratio, since), NumberUnit.RATIO); - } - } else if (registeredStatistic instanceof RegisteredSizeStatistic) { - RegisteredSizeStatistic registeredSizeStatistic = (RegisteredSizeStatistic) registeredStatistic; - if (name.equals(statisticName)) { - SampledStatistic count = (SampledStatistic) registeredSizeStatistic.getSampledStatistic(); - return new SizeHistory(buildHistory(count, since), MemoryUnit.B); - } - } else if (registeredStatistic instanceof RegisteredCounterStatistic) { - RegisteredCounterStatistic registeredCounterStatistic = (RegisteredCounterStatistic) registeredStatistic; - if (name.equals(statisticName)) { - SampledStatistic count = (SampledStatistic) registeredCounterStatistic.getSampledStatistic(); - return new CounterHistory(buildHistory(count, since), NumberUnit.COUNT); - } - } else { - throw new UnsupportedOperationException("Cannot handle registered statistic type : " + registeredStatistic); + Statistic queryStatistic(String statisticName, long since) { + // first search for a non-compound stat + SampledStatistic statistic = statisticsRegistry.findSampledStatistic(statisticName); + + // if not found, it can be a compound stat, so search for it + if (statistic == null) { + for (Iterator> it = COMPOUND_SUFFIXES.entrySet().iterator(); it.hasNext() && statistic == null; ) { + Entry entry = it.next(); + statistic = statisticsRegistry.findSampledCompoundStatistic(statisticName.substring(0, Math.max(0, statisticName.length() - entry.getKey().length())), entry.getValue()); } } - throw new IllegalArgumentException("No registered statistic named '" + statisticName + "'"); - } - - private List> buildHistory(SampledStatistic sampledStatistic, long since) { - List> result = new ArrayList>(); + if (statistic != null) { + List> history = statistic.history(since); + List samples = new ArrayList>(history.size()); + for (Timestamped timestamped : history) { + Sample sample = new Sample(timestamped.getTimestamp(), timestamped.getSample()); + samples.add(sample); + } - List> history = sampledStatistic.history(); - for (Timestamped timestamped : history) { - if(timestamped.getTimestamp() >= since) { - result.add(new Sample(timestamped.getTimestamp(), timestamped.getSample())); + switch (statistic.type()) { + case COUNTER: return new CounterHistory((List>) samples, NumberUnit.COUNT); + case RATE: return new RateHistory((List>) samples, TimeUnit.SECONDS); + case LATENCY_MIN: return new DurationHistory((List>) samples, TimeUnit.NANOSECONDS); + case LATENCY_MAX: return new DurationHistory((List>) samples, TimeUnit.NANOSECONDS); + case LATENCY_AVG: return new AverageHistory((List>) samples, TimeUnit.NANOSECONDS); + case RATIO: return new RatioHistory((List>) samples, NumberUnit.RATIO); + case SIZE: return new SizeHistory((List>) samples, MemoryUnit.B); + default: throw new UnsupportedOperationException(statistic.type().name()); } } - return result; + throw new IllegalArgumentException("No registered statistic named '" + statisticName + "'"); } @Override public Collection getDescriptors() { Set capabilities = new HashSet(); - capabilities.addAll(queryStatisticsRegistry()); - return capabilities; - } - - private Set queryStatisticsRegistry() { - Set capabilities = new HashSet(); - Map registrations = statisticsRegistry.getRegistrations(); - - for(Entry entry : registrations.entrySet()) { - String statisticName = entry.getKey().toString(); + for (Entry entry : registrations.entrySet()) { + String statisticName = entry.getKey(); RegisteredStatistic registeredStatistic = registrations.get(statisticName); - - if(registeredStatistic instanceof RegisteredCompoundStatistic) { - List statistics = new ArrayList(); - statistics.add(new StatisticDescriptor(entry.getKey() + "Count", StatisticType.COUNTER_HISTORY)); - statistics.add(new StatisticDescriptor(entry.getKey() + "Rate", StatisticType.RATE_HISTORY)); - statistics.add(new StatisticDescriptor(entry.getKey() + "LatencyMinimum", StatisticType.DURATION_HISTORY)); - statistics.add(new StatisticDescriptor(entry.getKey() + "LatencyMaximum", StatisticType.DURATION_HISTORY)); - statistics.add(new StatisticDescriptor(entry.getKey() + "LatencyAverage", StatisticType.AVERAGE_HISTORY)); - - capabilities.addAll(statistics); - } else if(registeredStatistic instanceof RegisteredRatioStatistic) { - capabilities.add(new StatisticDescriptor(entry.getKey() + "Ratio", StatisticType.RATIO_HISTORY)); - } else if(registeredStatistic instanceof RegisteredCounterStatistic) { - capabilities.add(new StatisticDescriptor(statisticName, StatisticType.COUNTER_HISTORY)); - } else if(registeredStatistic instanceof RegisteredSizeStatistic) { - capabilities.add(new StatisticDescriptor(statisticName, StatisticType.SIZE_HISTORY)); + switch (registeredStatistic.getType()) { + case COUNTER: + capabilities.add(new StatisticDescriptor(statisticName, StatisticType.COUNTER_HISTORY)); + break; + case RATIO: + capabilities.add(new StatisticDescriptor(entry.getKey() + "Ratio", StatisticType.RATIO_HISTORY)); + break; + case SIZE: + capabilities.add(new StatisticDescriptor(statisticName, StatisticType.SIZE_HISTORY)); + break; + case COMPOUND: + capabilities.add(new StatisticDescriptor(entry.getKey() + "Count", StatisticType.COUNTER_HISTORY)); + capabilities.add(new StatisticDescriptor(entry.getKey() + "Rate", StatisticType.RATE_HISTORY)); + capabilities.add(new StatisticDescriptor(entry.getKey() + "LatencyMinimum", StatisticType.DURATION_HISTORY)); + capabilities.add(new StatisticDescriptor(entry.getKey() + "LatencyMaximum", StatisticType.DURATION_HISTORY)); + capabilities.add(new StatisticDescriptor(entry.getKey() + "LatencyAverage", StatisticType.AVERAGE_HISTORY)); + break; + default: + throw new UnsupportedOperationException(registeredStatistic.getType().name()); } } return capabilities; } - public void dispose() { + void dispose() { statisticsRegistry.clearRegistrations(); } diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java index 453af75f31..0dde5e56c2 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java @@ -20,6 +20,8 @@ import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.ehcache.config.units.EntryUnit.ENTRIES; import static org.ehcache.config.units.MemoryUnit.MB; +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; import java.io.IOException; import java.util.ArrayList; @@ -41,21 +43,19 @@ import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryService; import org.ehcache.spi.service.Service; +import org.hamcrest.CoreMatchers; import org.hamcrest.Matchers; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; +import org.junit.rules.Timeout; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.terracotta.management.model.context.Context; import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.management.model.stats.history.CounterHistory; -/** - * - * - */ @RunWith(Parameterized.class) public class EvictionTest { @@ -97,6 +97,9 @@ public static Collection data() { @Rule public final TemporaryFolder diskPath = new TemporaryFolder(); + @Rule + public final Timeout globalTimeout = Timeout.seconds(10); + public EvictionTest(Builder resources, int iterations, List expected, byte[] value, List stats) { this.resources = resources.build(); this.iterations = iterations; @@ -124,48 +127,61 @@ public void test() throws IOException, InterruptedException { CacheConfiguration cacheConfig = newCacheConfigurationBuilder(Long.class, byte[].class, resources).build(); cacheManager.init(); + Cache cache = cacheManager.createCache("myCache", cacheConfig); + Context context = StatsUtil.createContext(managementRegistry); + + // we need to trigger first the stat computation with a first query + ContextualStatistics contextualStatistics = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(stats) + .on(context) + .build() + .execute() + .getSingleResult(); + assertThat(contextualStatistics.size(), Matchers.is(stats.size())); + for(long i=0; i cache = cacheManager.getCache("myCache", Long.class, String.class); cache.put(1L, "1");//put in lowest tier @@ -121,8 +125,6 @@ public void test() throws InterruptedException, IOException { cache.get(1L);//HIT middle/highest tier. Depends on tier configuration. - Context context = StatsUtil.createContext(managementRegistry); - long tierHitCountSum = 0; for (int i = 0; i < statNames.size(); i++) { tierHitCountSum += StatsUtil.getExpectedValueFromCounterHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java index 38ff18fe15..aa77e7aed1 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java @@ -15,17 +15,6 @@ */ package org.ehcache.management.providers.statistics; -import static java.util.Arrays.asList; -import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; -import static org.ehcache.config.units.EntryUnit.ENTRIES; -import static org.ehcache.config.units.MemoryUnit.MB; -import static org.hamcrest.CoreMatchers.is; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; -import java.util.concurrent.TimeUnit; import org.ehcache.Cache; import org.ehcache.CacheManager; import org.ehcache.config.Builder; @@ -46,12 +35,28 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.terracotta.management.model.context.Context; +import org.terracotta.management.model.stats.Sample; +import org.terracotta.management.model.stats.Statistic; +import org.terracotta.management.model.stats.StatisticHistory; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static java.util.Arrays.asList; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.config.units.EntryUnit.ENTRIES; +import static org.ehcache.config.units.MemoryUnit.MB; +import static org.hamcrest.CoreMatchers.is; @RunWith(Parameterized.class) public class HitRatioTest { @Rule - public final Timeout globalTimeout = Timeout.seconds(10); + public final Timeout globalTimeout = Timeout.seconds(30); @Rule public final TemporaryFolder diskPath = new TemporaryFolder(); @@ -132,8 +137,8 @@ public void test() throws InterruptedException, IOException { try { DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); - registryConfiguration.addConfiguration(new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES)); - ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + registryConfiguration.addConfiguration(new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.SECONDS,10,TimeUnit.MINUTES)); + final ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources).build(); @@ -143,18 +148,22 @@ public void test() throws InterruptedException, IOException { .using(new DefaultPersistenceConfiguration(diskPath.newFolder())) .build(true); + final Context context = StatsUtil.createContext(managementRegistry); + + StatsUtil.triggerStatComputation(managementRegistry, context, "Cache:HitRatio", "OnHeap:HitRatio", "OffHeap:HitRatio", "Disk:HitRatio"); + Cache cache = cacheManager.getCache("myCache", Long.class, String.class); + //System.out.println("put() 1, 2, 3"); cache.put(1L, "1");//put in lowest tier cache.put(2L, "2");//put in lowest tier cache.put(3L, "3");//put in lowest tier for(Long key : getKeys) { - cache.get(key); + String v = cache.get(key); + //System.out.println("get(" + key + "): " + (v == null ? "miss" : "hit")); } - Context context = StatsUtil.createContext(managementRegistry); - double tierHitRatio = 0; for (int i = 0; i < statNames.size(); i++) { tierHitRatio = StatsUtil.getExpectedValueFromRatioHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java index 553b4751fe..2eb79c3ddd 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java @@ -103,6 +103,10 @@ public void test() throws InterruptedException, IOException { .using(new DefaultPersistenceConfiguration(diskPath.newFolder())) .build(true); + Context context = StatsUtil.createContext(managementRegistry); + + StatsUtil.triggerStatComputation(managementRegistry, context, "Cache:MissCount", "OnHeap:MissCount", "OffHeap:MissCount", "Disk:MissCount"); + Cache cache = cacheManager.getCache("myCache", Long.class, String.class); cache.put(1L, "1");//put in lowest tier @@ -112,8 +116,6 @@ public void test() throws InterruptedException, IOException { cache.get(4L);//MISS cache.get(5L);//MISS - Context context = StatsUtil.createContext(managementRegistry); - long tierMissCountSum = 0; for (int i = 0; i < statNames.size(); i++) { tierMissCountSum += StatsUtil.getExpectedValueFromCounterHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java index 9cecfe962b..20dd6adc15 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java @@ -39,6 +39,7 @@ import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryService; import org.junit.Assert; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -47,10 +48,6 @@ import org.junit.runners.Parameterized; import org.terracotta.management.model.context.Context; -/** - * - * - */ @RunWith(Parameterized.class) public class MissRatioTest { @@ -125,7 +122,7 @@ public void test() throws InterruptedException, IOException { try { DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); - registryConfiguration.addConfiguration(new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES)); + registryConfiguration.addConfiguration(new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.SECONDS,10,TimeUnit.MINUTES)); ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources).build(); @@ -136,18 +133,22 @@ public void test() throws InterruptedException, IOException { .using(new DefaultPersistenceConfiguration(diskPath.newFolder())) .build(true); + Context context = StatsUtil.createContext(managementRegistry); + + StatsUtil.triggerStatComputation(managementRegistry, context, "Cache:MissRatio", "OnHeap:MissRatio", "OffHeap:MissRatio", "Disk:MissRatio"); + Cache cache = cacheManager.getCache("myCache", Long.class, String.class); + //System.out.println("put() 1, 2, 3"); cache.put(1L, "1");//put in lowest tier cache.put(2L, "2");//put in lowest tier cache.put(3L, "3");//put in lowest tier for(Long key : getKeys) { - cache.get(key); + String v = cache.get(key); + //System.out.println("get(" + key + "): " + (v == null ? "miss" : "hit")); } - Context context = StatsUtil.createContext(managementRegistry); - double tierMissRatio = 0; for (int i = 0; i < statNames.size(); i++) { tierMissRatio = StatsUtil.getExpectedValueFromRatioHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java index d69149ae6e..784d9a71d3 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java @@ -31,21 +31,22 @@ import org.ehcache.management.registry.DefaultManagementRegistryService; import org.hamcrest.Matchers; import org.junit.Assert; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.Timeout; import org.terracotta.management.model.context.Context; import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.management.model.stats.history.CounterHistory; import static org.junit.Assert.assertThat; -/** - * - * - */ public class StandardEhcacheStatisticsTest { private final EhcacheStatisticsProviderConfiguration EHCACHE_STATS_CONFIG = new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES); + @Rule + public final Timeout globalTimeout = Timeout.seconds(10); + @Test public void statsClearCacheTest() throws InterruptedException { CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, @@ -79,17 +80,19 @@ public void statsClearCacheTest() throws InterruptedException { Context context = StatsUtil.createContext(managementRegistry); - ContextualStatistics clearCounter = managementRegistry.withCapability("StatisticsCapability") + CounterHistory cache_Clear_Count; + do { + ContextualStatistics clearCounter = managementRegistry.withCapability("StatisticsCapability") .queryStatistics(Arrays.asList("Cache:ClearCount")) .on(context) .build() .execute() .getSingleResult(); - assertThat(clearCounter.size(), Matchers.is(1)); - CounterHistory cache_Clear_Count = clearCounter.getStatistic(CounterHistory.class, "Cache:ClearCount"); + assertThat(clearCounter.size(), Matchers.is(1)); + cache_Clear_Count = clearCounter.getStatistic(CounterHistory.class, "Cache:ClearCount"); + } while(!Thread.currentThread().isInterrupted() && !StatsUtil.isHistoryReady(cache_Clear_Count, 0L)); - while(!StatsUtil.isHistoryReady(cache_Clear_Count, 0L)) {} int mostRecentIndex = cache_Clear_Count.getValue().length - 1; assertThat(cache_Clear_Count.getValue()[mostRecentIndex].getValue(), Matchers.equalTo(2L)); } diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java b/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java index 7005e16578..78c6f975e5 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java @@ -15,54 +15,37 @@ */ package org.ehcache.management.providers.statistics; -import java.util.Arrays; import org.ehcache.management.ManagementRegistryService; import org.hamcrest.Matchers; -import org.junit.Assert; import org.terracotta.management.model.context.Context; import org.terracotta.management.model.context.ContextContainer; import org.terracotta.management.model.stats.AbstractStatisticHistory; import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.model.stats.Statistic; +import org.terracotta.management.model.stats.StatisticHistory; import org.terracotta.management.model.stats.history.CounterHistory; import org.terracotta.management.model.stats.history.RatioHistory; import org.terracotta.management.registry.ResultSet; import org.terracotta.management.registry.StatisticQuery; -/** - * - * - */ -public class StatsUtil { - - public static boolean isHistoryReady(AbstractStatisticHistory counterHistory, Double defaultValue) { +import java.util.Arrays; +import java.util.Map; - if(counterHistory.getValue().length > 0) { - int mostRecentIndex = counterHistory.getValue().length - 1; - if(defaultValue.equals(Double.POSITIVE_INFINITY)) { - if((Double)counterHistory.getValue()[mostRecentIndex].getValue() < defaultValue ) { - return true; - } - } else { - if((Double)counterHistory.getValue()[mostRecentIndex].getValue() > defaultValue ) { - return true; - } - } +import static org.junit.Assert.assertThat; - } - return false; - } +public class StatsUtil { public static boolean isHistoryReady(AbstractStatisticHistory counterHistory) { - if(counterHistory.getValue().length > 0) { + if (counterHistory.getValue().length > 0) { return true; } return false; } public static boolean isHistoryReady(AbstractStatisticHistory history, Long defaultValue) { - if(history.getValue().length > 0) { + if (history.getValue().length > 0) { int mostRecentIndex = history.getValue().length - 1; - if((Long)history.getValue()[mostRecentIndex].getValue() > defaultValue) { + if ((Long) history.getValue()[mostRecentIndex].getValue() > defaultValue) { return true; } } @@ -73,8 +56,8 @@ public static Context createContext(ManagementRegistryService managementRegistry ContextContainer cacheManagerCtx = managementRegistry.getContextContainer(); ContextContainer firstCacheCtx = cacheManagerCtx.getSubContexts().iterator().next(); return Context.empty() - .with(cacheManagerCtx.getName(), cacheManagerCtx.getValue()) - .with(firstCacheCtx.getName(), firstCacheCtx.getValue()); + .with(cacheManagerCtx.getName(), cacheManagerCtx.getValue()) + .with(firstCacheCtx.getName(), firstCacheCtx.getValue()); } /* @@ -85,9 +68,9 @@ public static Context createContext(ManagementRegistryService managementRegistry public static long getExpectedValueFromCounterHistory(String statName, Context context, ManagementRegistryService managementRegistry, long expectedResult) { StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") - .queryStatistics(Arrays.asList(statName)) - .on(context) - .build(); + .queryStatistics(Arrays.asList(statName)) + .on(context) + .build(); long value = 0; do { @@ -95,19 +78,18 @@ public static long getExpectedValueFromCounterHistory(String statName, Context c ContextualStatistics statisticsContext = counters.getResult(context); - Assert.assertThat(counters.size(), Matchers.is(1)); + assertThat(counters.size(), Matchers.is(1)); CounterHistory counterHistory = statisticsContext.getStatistic(CounterHistory.class, statName); if (counterHistory.getValue().length > 0) { int mostRecentIndex = counterHistory.getValue().length - 1; value = counterHistory.getValue()[mostRecentIndex].getValue(); - System.out.println("statName: " + statName + " value: " + value + " expectedResult: " + expectedResult); } - }while(value != expectedResult); + } while (!Thread.currentThread().isInterrupted() && value != expectedResult); - Assert.assertThat(value, Matchers.is(expectedResult)); + assertThat(value, Matchers.is(expectedResult)); return value; } @@ -120,9 +102,9 @@ public static long getExpectedValueFromCounterHistory(String statName, Context c public static double getExpectedValueFromRatioHistory(String statName, Context context, ManagementRegistryService managementRegistry, double expectedResult) { StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") - .queryStatistics(Arrays.asList(statName)) - .on(context) - .build(); + .queryStatistics(Arrays.asList(statName)) + .on(context) + .build(); double value = 0; do { @@ -130,20 +112,49 @@ public static double getExpectedValueFromRatioHistory(String statName, Context c ContextualStatistics statisticsContext = counters.getResult(context); - Assert.assertThat(counters.size(), Matchers.is(1)); + assertThat(counters.size(), Matchers.is(1)); RatioHistory ratioHistory = statisticsContext.getStatistic(RatioHistory.class, statName); if (ratioHistory.getValue().length > 0) { int mostRecentIndex = ratioHistory.getValue().length - 1; value = ratioHistory.getValue()[mostRecentIndex].getValue(); - System.out.println("statName: " + statName + " value: " + value + " expectedResult: " + expectedResult); } + } while (!Thread.currentThread().isInterrupted() && value != expectedResult); - }while(value != expectedResult); - - Assert.assertThat(value, Matchers.is(expectedResult)); + assertThat(value, Matchers.is(expectedResult)); return value; } + + // When testing ratios, we need to wait for the first computation (we do not have any choice) to happen because ratio depends on 2 other sampled statistics. + // If you do not wait, then you'll always get some NaN because the hits will be done within the 1st second, and the hits won't be done in the right "window". + // A ratio is computed by dividing a rate with another rate. See CompoundOperationImpl.ratioOf(). + // And a rate is computed with values aggregated into a EventRateSimpleMovingAverage. + // The call to EventRateSimpleMovingAverage.rateUsingSeconds() will return 0 during the fist second (until first computation did happen). + // So the hits must be after the first second so that values get accumulated into the partitions of EventRateSimpleMovingAverage. + + // Also, we have to take in consideration that in clustered, there is a collector that is scheduled at 75% of the TTD to collect and send stats. + // So the delay can be greater than just the duration of the first sampling. + public static void triggerStatComputation(ManagementRegistryService managementRegistry, Context context, String... statNames) { + boolean noSample; + do { + noSample = false; + Map> statistics = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(Arrays.asList(statNames)) + .on(context) + .build() + .execute() + .getSingleResult() + .getStatistics(); + + for (Map.Entry> entry : statistics.entrySet()) { + if (((StatisticHistory) entry.getValue()).getValue().length == 0) { + noSample = true; + break; + } + } + } while (!Thread.currentThread().isInterrupted() && noSample); + } + } diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java index 625def7b91..14523ef1db 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java @@ -34,6 +34,7 @@ import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.management.ManagementRegistryService; +import org.junit.rules.Timeout; import org.terracotta.management.registry.ResultSet; import org.terracotta.management.registry.StatisticQuery; import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; @@ -77,6 +78,9 @@ public class DefaultManagementRegistryServiceTest { @Rule public final TemporaryFolder diskPath = new TemporaryFolder(); + @Rule + public final Timeout globalTimeout = Timeout.seconds(10); + @Test public void testCanGetContext() { CacheManager cacheManager1 = null; @@ -242,7 +246,7 @@ public void testCanGetCapabilities() { cacheManager1.close(); } - @Test (timeout = 5000) + @Test public void testCanGetStats() { String queryStatisticName = "Cache:HitCount"; @@ -316,9 +320,9 @@ public void testCanGetStats() { } private static ResultSet getResultSet(Builder builder, Context context1, Context context2, Class type, String statisticsName) { - ResultSet counters; + ResultSet counters = null; - while(true) //wait till Counter history(s) is initialized and contains values. + while(!Thread.currentThread().isInterrupted()) //wait till Counter history(s) is initialized and contains values. { counters = builder.build().execute(); @@ -351,7 +355,7 @@ private static ResultSet getResultSet(Builder builder, Con return counters; } - @Test (timeout=5000) + @Test public void testCanGetStatsSinceTime() throws InterruptedException { CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) @@ -396,7 +400,7 @@ public void testCanGetStatsSinceTime() throws InterruptedException { Thread.sleep(100); statistics = builder.build().execute().getResult(context); getCount = statistics.getStatistic(CounterHistory.class); - } while (getCount.getValue().length < 1); + } while (!Thread.currentThread().isInterrupted() && getCount.getValue().length < 1); // within 1 second of history there has been 3 gets int mostRecentIndex = getCount.getValue().length - 1; @@ -421,7 +425,7 @@ public void testCanGetStatsSinceTime() throws InterruptedException { Thread.sleep(100); statistics = builder.build().execute().getResult(context); getCount = statistics.getStatistic(CounterHistory.class); - } while (getCount.getValue().length < 2); + } while (!Thread.currentThread().isInterrupted() && getCount.getValue().length < 2); // ------ // WITH since: the history will have 1 value diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java index 3819f8858b..d32fc46720 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java @@ -25,7 +25,9 @@ import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.Timeout; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.terracotta.management.model.call.ContextualReturn; @@ -68,6 +70,9 @@ public class DefaultSharedManagementServiceTest { ManagementRegistryServiceConfiguration config1; ManagementRegistryServiceConfiguration config2; + @Rule + public final Timeout globalTimeout = Timeout.seconds(10); + @Before public void init() { EhcacheStatisticsProviderConfiguration config = new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES); @@ -151,7 +156,7 @@ public void testSharedCapabilities() { assertThat(new ArrayList(capabilities2).get(3).getName(), equalTo("SettingsCapability")); } - @Test (timeout=10000) + @Test public void testStats() { String statisticName = "Cache:MissCount"; @@ -194,10 +199,10 @@ public void testStats() { } private static ResultSet getResultSet(StatisticQuery.Builder builder, List contextList, Class type, String statisticsName) { - ResultSet counters; + ResultSet counters = null; //wait till Counter history is initialized and contains values > 0. - while(true) { + while(!Thread.currentThread().isInterrupted()) { counters = builder.build().execute(); if(counters.getResult(contextList.get(0)).getStatistic(type, statisticsName).getValue().length > 0 && From a97bd5523e3d79cfd6784afd92f157d6c9216d9a Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Thu, 10 Nov 2016 09:57:04 +0100 Subject: [PATCH 119/218] :green_heart: Skip test when cluster setup fails --- .../ehcache/clustered/TerminatedServerTest.java | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java index 8e25f4b825..e84392eabc 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java @@ -76,6 +76,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; +import static org.junit.Assume.assumeNoException; /** * Provides integration tests in which the server is terminated before the Ehcache operation completes. @@ -160,12 +161,21 @@ public static void restoreProperties() { } } + private static Cluster createCluster() { + try { + return new BasicExternalCluster(new File("build/cluster"), 1, Collections.emptyList(), "", RESOURCE_CONFIG, ""); + } catch (IllegalArgumentException e) { + assumeNoException(e); + return null; + } + } + @Rule public final TestName testName = new TestName(); // Included in 'ruleChain' below. - private final Cluster cluster = - new BasicExternalCluster(new File("build/cluster"), 1, Collections.emptyList(), "", RESOURCE_CONFIG, ""); + private final Cluster cluster = createCluster(); + // The TestRule.apply method is called on the inner-most Rule first with the result being passed to each // successively outer rule until the outer-most rule is reached. For ExternalResource rules, the before From feae8577bd8f90bc193aff7c29315ead78e577e1 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Thu, 10 Nov 2016 11:11:17 +0100 Subject: [PATCH 120/218] :green_heart: Wait for active to be up again Otherwise, in case it is slow to come up, we may get a timeout on the first get. --- .../test/java/org/ehcache/clustered/sync/PassiveSyncTest.java | 1 + 1 file changed, 1 insertion(+) diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java index d5a36fb44b..e19e989418 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java @@ -91,6 +91,7 @@ public void testSync() throws Exception { CLUSTER.getClusterControl().startOneServer(); CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); CLUSTER.getClusterControl().terminateActive(); + CLUSTER.getClusterControl().waitForActive(); for (long i = -5; i < 5; i++) { assertThat(cache.get(i), equalTo("value" + i)); From 3d3a28fc92fa9d536583adce0f4c4d3178496be2 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Wed, 19 Oct 2016 10:05:02 +0200 Subject: [PATCH 121/218] :snowflake: Remove no longer needed 'changing true' on dependencies --- clustered/clustered-dist/build.gradle | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/clustered/clustered-dist/build.gradle b/clustered/clustered-dist/build.gradle index 4f1fada655..8a175b9bb1 100644 --- a/clustered/clustered-dist/build.gradle +++ b/clustered/clustered-dist/build.gradle @@ -42,8 +42,8 @@ configurations { } dependencies { - compile "org.terracotta.internal:client-runtime:$terracottaCoreVersion" changing true - compile "org.terracotta.internal:client-logging:$terracottaCoreVersion" changing true + compile "org.terracotta.internal:client-runtime:$terracottaCoreVersion" + compile "org.terracotta.internal:client-logging:$terracottaCoreVersion" serverLibs(project(':clustered:server')) { exclude group: 'org.terracotta', module: 'entity-server-api' @@ -52,7 +52,7 @@ dependencies { exclude group: 'org.terracotta.internal', module: 'tc-config-parser' } - kit "org.terracotta.internal:terracotta-kit:$terracottaCoreVersion@zip" changing true + kit "org.terracotta.internal:terracotta-kit:$terracottaCoreVersion@zip" shadowCompile "org.slf4j:slf4j-api:$parent.slf4jVersion" pomOnlyCompile "org.ehcache:ehcache:$parent.baseVersion" From 949ffd19289775914eb5d3c6dd9de0e1566ad322 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Wed, 19 Oct 2016 10:56:20 +0200 Subject: [PATCH 122/218] :shirt: API module no longer accepting any warnings --- api/build.gradle | 3 +++ 1 file changed, 3 insertions(+) diff --git a/api/build.gradle b/api/build.gradle index dcd161b0fa..8119b013a6 100644 --- a/api/build.gradle +++ b/api/build.gradle @@ -20,3 +20,6 @@ checkstyle { configFile = file("$projectDir/config/checkstyle.xml") } +tasks.withType(JavaCompile) { + options.compilerArgs += ['-Werror'] +} From 4b3de8ec31c7cedd32784447fd0b82a80cb712f7 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Wed, 19 Oct 2016 10:57:36 +0200 Subject: [PATCH 123/218] :shirt: Clean up warning in core * Add flag to fail build if warnings get introduced --- core/build.gradle | 6 +++++- .../core/internal/service/ServiceLocator.java | 17 ++++++++++++----- .../test/java/org/ehcache/core/CacheTest.java | 8 ++++---- .../ehcache/core/EhcacheBasicIteratorTest.java | 3 +++ .../ehcache/core/EhcacheBasicRemoveAllTest.java | 13 +++++++------ .../test/java/org/ehcache/core/EhcacheTest.java | 3 ++- .../EhcacheWithLoaderWriterBasicPutAllTest.java | 2 ++ ...EhcacheWithLoaderWriterBasicReplaceTest.java | 1 + ...heWithLoaderWriterBasicReplaceValueTest.java | 1 + .../core/EhcacheWithLoaderWriterTest.java | 7 +++++-- .../core/config/ResourcePoolsImplTest.java | 9 ++++++++- 11 files changed, 50 insertions(+), 20 deletions(-) diff --git a/core/build.gradle b/core/build.gradle index 07ba61edf4..84ce9d2858 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -22,4 +22,8 @@ dependencies { exclude group:'org.slf4j', module:'slf4j-api' } testCompile project(':spi-tester') -} \ No newline at end of file +} + +tasks.withType(JavaCompile) { + options.compilerArgs += ['-Werror'] +} diff --git a/core/src/main/java/org/ehcache/core/internal/service/ServiceLocator.java b/core/src/main/java/org/ehcache/core/internal/service/ServiceLocator.java index d03cbe61f6..03085f4bdd 100644 --- a/core/src/main/java/org/ehcache/core/internal/service/ServiceLocator.java +++ b/core/src/main/java/org/ehcache/core/internal/service/ServiceLocator.java @@ -250,7 +250,9 @@ public DependencySet with(ServiceCreationConfiguration co for (ServiceFactory factory : serviceFactories) { final Class factoryServiceType = factory.getServiceType(); if (serviceType.isAssignableFrom(factoryServiceType)) { - with(((ServiceFactory) factory).create(config)); + @SuppressWarnings("unchecked") + ServiceFactory serviceFactory = (ServiceFactory) factory; + with(serviceFactory.create(config)); success = true; } } @@ -383,7 +385,9 @@ private Collection> discoverServices(ServiceMap // Can have only one service registered under a concrete type continue; } - serviceFactories.add((ServiceFactory) factory); + @SuppressWarnings("unchecked") + ServiceFactory serviceFactory = (ServiceFactory) factory; + serviceFactories.add(serviceFactory); } } return serviceFactories; @@ -411,7 +415,9 @@ private static Set> identifyImmediateDependenciesOf(fin if (annotation != null) { for (final Class dependency : annotation.value()) { if (Service.class.isAssignableFrom(dependency)) { - dependencies.add((Class) dependency); + @SuppressWarnings("unchecked") + Class serviceDependency = (Class) dependency; + dependencies.add(serviceDependency); } else { throw new IllegalStateException("Service dependency declared by " + clazz.getName() + " is not a Service: " + dependency.getName()); @@ -505,11 +511,12 @@ public ServiceMap() { } public Set get(Class serviceType) { - Set s = services.get(serviceType); + @SuppressWarnings("unchecked") + Set s = (Set) services.get(serviceType); if (s == null) { return emptySet(); } else { - return (Set) unmodifiableSet(s); + return unmodifiableSet(s); } } diff --git a/core/src/test/java/org/ehcache/core/CacheTest.java b/core/src/test/java/org/ehcache/core/CacheTest.java index c96549585b..7a3b4c1aba 100644 --- a/core/src/test/java/org/ehcache/core/CacheTest.java +++ b/core/src/test/java/org/ehcache/core/CacheTest.java @@ -47,11 +47,11 @@ @SuppressWarnings({ "unchecked", "rawtypes" }) public abstract class CacheTest { - protected abstract InternalCache getCache(Store store); + protected abstract InternalCache getCache(Store store); @Test public void testTransistionsState() { - Store store = mock(Store.class); + Store store = mock(Store.class); InternalCache ehcache = getCache(store); assertThat(ehcache.getStatus(), CoreMatchers.is(Status.UNINITIALIZED)); @@ -63,10 +63,10 @@ public void testTransistionsState() { @Test public void testThrowsWhenNotAvailable() throws StoreAccessException { - Store store = mock(Store.class); + Store store = mock(Store.class); Store.Iterator mockIterator = mock(Store.Iterator.class); when(store.iterator()).thenReturn(mockIterator); - InternalCache ehcache = getCache(store); + InternalCache ehcache = getCache(store); try { ehcache.get("foo"); diff --git a/core/src/test/java/org/ehcache/core/EhcacheBasicIteratorTest.java b/core/src/test/java/org/ehcache/core/EhcacheBasicIteratorTest.java index c04901a2e0..b0b9f509ec 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheBasicIteratorTest.java +++ b/core/src/test/java/org/ehcache/core/EhcacheBasicIteratorTest.java @@ -207,13 +207,16 @@ public void testIteratorNonEmptyNextAfterLast() throws Exception { */ @Test public void testIteratorStoreAccessException() throws Exception { + @SuppressWarnings("unchecked") Store.ValueHolder valueHolder = mock(Store.ValueHolder.class); doReturn("bar").when(valueHolder).value(); + @SuppressWarnings("unchecked") Cache.Entry> storeEntry = mock(Cache.Entry.class); doReturn(valueHolder).when(storeEntry).getValue(); doReturn("foo").when(storeEntry).getKey(); + @SuppressWarnings("unchecked") Store.Iterator>> storeIterator = mock(Store.Iterator.class); doReturn(true).when(storeIterator).hasNext(); doReturn(storeEntry).when(storeIterator).next(); diff --git a/core/src/test/java/org/ehcache/core/EhcacheBasicRemoveAllTest.java b/core/src/test/java/org/ehcache/core/EhcacheBasicRemoveAllTest.java index ba4c1ce0e3..5af8761f2f 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheBasicRemoveAllTest.java +++ b/core/src/test/java/org/ehcache/core/EhcacheBasicRemoveAllTest.java @@ -259,14 +259,15 @@ public void testRemoveAllStoreSomeOverlapStoreAccessExceptionAfterNoWriter() thr } @Test + @SuppressWarnings("unchecked") public void removeAllStoreCallsMethodTwice() throws Exception { - this.store = mock(Store.class); - CacheLoaderWriter cacheLoaderWriter = mock(CacheLoaderWriter.class); + CacheLoaderWriter cacheLoaderWriter = mock(CacheLoaderWriter.class); final List removed = new ArrayList(); doAnswer(new Answer() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { - Iterable i = (Iterable) invocation.getArguments()[0]; + @SuppressWarnings("unchecked") + Iterable i = (Iterable) invocation.getArguments()[0]; for (String key : i) { removed.add(key); } @@ -275,13 +276,13 @@ public Object answer(InvocationOnMock invocation) throws Throwable { }).when(cacheLoaderWriter).deleteAll(any(Iterable.class)); final EhcacheWithLoaderWriter ehcache = this.getEhcacheWithLoaderWriter(cacheLoaderWriter); - final ArgumentCaptor functionArgumentCaptor = ArgumentCaptor.forClass(Function.class); + final ArgumentCaptor>, Iterable>>> functionArgumentCaptor = (ArgumentCaptor) ArgumentCaptor.forClass(Function.class); when(store.bulkCompute(anySet(), functionArgumentCaptor.capture())).then(new Answer() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { - Function function = functionArgumentCaptor.getValue(); - Iterable arg = new HashMap((Map) function.getClass().getDeclaredField("val$entriesToRemove").get(function)).entrySet(); + Function>, Iterable>> function = functionArgumentCaptor.getValue(); + Iterable> arg = new HashMap((Map) function.getClass().getDeclaredField("val$entriesToRemove").get(function)).entrySet(); function.apply(arg); function.apply(arg); return null; diff --git a/core/src/test/java/org/ehcache/core/EhcacheTest.java b/core/src/test/java/org/ehcache/core/EhcacheTest.java index ef4d995f57..7f59733cd2 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheTest.java +++ b/core/src/test/java/org/ehcache/core/EhcacheTest.java @@ -31,9 +31,10 @@ public class EhcacheTest extends CacheTest { @Override - protected InternalCache getCache(Store store) { + protected InternalCache getCache(Store store) { final CacheConfiguration config = new BaseCacheConfiguration(Object.class, Object.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools()); + @SuppressWarnings("unchecked") CacheEventDispatcher cacheEventDispatcher = mock(CacheEventDispatcher.class); return new Ehcache(config, store, cacheEventDispatcher, LoggerFactory.getLogger(Ehcache.class + "-" + "EhcacheTest")); } diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicPutAllTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicPutAllTest.java index 4d3efb77b9..c45d3321a2 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicPutAllTest.java +++ b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicPutAllTest.java @@ -1871,6 +1871,7 @@ public void testPutAllPartialIntersectionsImmediatelyExpiredCreatedEntries() thr final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); this.cacheLoaderWriter = spy(fakeLoaderWriter); + @SuppressWarnings("unchecked") final Expiry expiry = mock(Expiry.class); when(expiry.getExpiryForCreation(any(String.class), any(String.class))).thenReturn(Duration.ZERO); @@ -1901,6 +1902,7 @@ public void testPutAllPartialIntersectionsImmediatelyExpiredUpdatedEntries() thr final FakeCacheLoaderWriter fakeLoaderWriter = new FakeCacheLoaderWriter(originalWriterContent); this.cacheLoaderWriter = spy(fakeLoaderWriter); + @SuppressWarnings("unchecked") final Expiry expiry = mock(Expiry.class); when(expiry.getExpiryForUpdate(any(String.class), argThat(org.ehcache.core.util.Matchers.holding(instanceOf(String.class))), any(String.class))).thenReturn(Duration.ZERO); diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicReplaceTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicReplaceTest.java index 157db437af..4d9b220e0d 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicReplaceTest.java +++ b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicReplaceTest.java @@ -520,6 +520,7 @@ public void testReplaceWithImmediatelyExpiredEntry() throws Exception { final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "old-value")); + @SuppressWarnings("unchecked") final Expiry expiry = mock(Expiry.class); when(expiry.getExpiryForUpdate(eq("key"), argThat(holding("old-value")), eq("value"))).thenReturn(Duration.ZERO); diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicReplaceValueTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicReplaceValueTest.java index 29b8cba0f9..b6f4c4bf26 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicReplaceValueTest.java +++ b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicReplaceValueTest.java @@ -895,6 +895,7 @@ public void testReplaceWithImmediatelyExpiredEntry() throws Exception { final FakeCacheLoaderWriter fakeWriter = new FakeCacheLoaderWriter(Collections.singletonMap("key", "old-value")); + @SuppressWarnings("unchecked") final Expiry expiry = mock(Expiry.class); when(expiry.getExpiryForUpdate(eq("key"), argThat(holding("old-value")), eq("value"))).thenReturn(Duration.ZERO); diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterTest.java index 10961bb601..74da460183 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterTest.java +++ b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterTest.java @@ -52,18 +52,21 @@ public class EhcacheWithLoaderWriterTest extends CacheTest { @Override - protected InternalCache getCache(Store store) { + protected InternalCache getCache(Store store) { final CacheConfiguration config = new BaseCacheConfiguration(Object.class, Object.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools()); + @SuppressWarnings("unchecked") CacheEventDispatcher cacheEventDispatcher = mock(CacheEventDispatcher.class); + @SuppressWarnings("unchecked") CacheLoaderWriter cacheLoaderWriter = mock(CacheLoaderWriter.class); - return new EhcacheWithLoaderWriter(config, store, cacheLoaderWriter, cacheEventDispatcher, LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "-" + "EhcacheWithLoaderWriterTest")); + return new EhcacheWithLoaderWriter(config, store, cacheLoaderWriter, cacheEventDispatcher, LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "-" + "EhcacheWithLoaderWriterTest")); } @Test public void testIgnoresKeysReturnedFromCacheLoaderLoadAll() { LoadAllVerifyStore store = new LoadAllVerifyStore(); KeyFumblingCacheLoaderWriter loader = new KeyFumblingCacheLoaderWriter(); + @SuppressWarnings("unchecked") CacheEventDispatcher cacheEventDispatcher = mock(CacheEventDispatcher.class); CacheConfiguration config = new BaseCacheConfiguration(String.class, String.class, null, null, null, ResourcePoolsHelper.createHeapOnlyPools()); diff --git a/core/src/test/java/org/ehcache/core/config/ResourcePoolsImplTest.java b/core/src/test/java/org/ehcache/core/config/ResourcePoolsImplTest.java index ccf8806af1..0f02d65022 100644 --- a/core/src/test/java/org/ehcache/core/config/ResourcePoolsImplTest.java +++ b/core/src/test/java/org/ehcache/core/config/ResourcePoolsImplTest.java @@ -25,10 +25,11 @@ import org.hamcrest.Matchers; import org.junit.Test; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.List; -import static java.util.Arrays.asList; import static org.ehcache.config.ResourceType.Core.HEAP; import static org.ehcache.config.ResourceType.Core.OFFHEAP; import static org.ehcache.config.units.EntryUnit.ENTRIES; @@ -298,4 +299,10 @@ public void testUpdateResourceUnitFailure() { assertThat(existing.getPoolForResource(ResourceType.Core.HEAP).getUnit(), Matchers.is(MemoryUnit.MB)); } + private Collection asList(T value1, T value2) { + @SuppressWarnings("unchecked") + List list = Arrays.asList(value1, value2); + return list; + } + } From be5a54d4f719eae29675ce25a5f97965f1befddb Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Thu, 20 Oct 2016 17:34:49 +0200 Subject: [PATCH 124/218] :shirt: Clean up warning in core-spi-test * Add flag to fail build if warnings get introduced --- core-spi-test/build.gradle | 4 ++++ .../internal/store/StoreComputeIfAbsentTest.java | 16 +++++++++------- .../ehcache/internal/store/StoreComputeTest.java | 8 +++++--- .../internal/store/StoreContainsKeyTest.java | 6 ++++-- .../store/StoreCreationEventListenerTest.java | 1 + .../store/StoreEvictionEventListenerTest.java | 1 + .../store/StoreExpiryEventListenerTest.java | 1 + .../org/ehcache/internal/store/StoreGetTest.java | 4 +++- .../internal/store/StorePutIfAbsentTest.java | 4 +++- .../org/ehcache/internal/store/StorePutTest.java | 4 +++- .../store/StoreRemovalEventListenerTest.java | 1 + .../internal/store/StoreRemoveKeyTest.java | 8 ++++---- .../internal/store/StoreReplaceKeyValueTest.java | 9 ++++++--- .../store/StoreReplaceKeyValueValueTest.java | 4 +++- .../store/StoreUpdateEventListenerTest.java | 1 + 15 files changed, 49 insertions(+), 23 deletions(-) diff --git a/core-spi-test/build.gradle b/core-spi-test/build.gradle index 83f89d5256..333879bf33 100644 --- a/core-spi-test/build.gradle +++ b/core-spi-test/build.gradle @@ -20,3 +20,7 @@ dependencies { exclude group:'org.hamcrest', module:'hamcrest-core' } } + +tasks.withType(JavaCompile) { + options.compilerArgs += ['-Werror'] +} diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeIfAbsentTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeIfAbsentTest.java index 1dab5140fe..503c78556c 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeIfAbsentTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeIfAbsentTest.java @@ -50,12 +50,13 @@ public void tearDown() { kvStore = null; } if (kvStore2 != null) { + @SuppressWarnings("unchecked") + Store kvStore2 = (Store) this.kvStore2; factory.close(kvStore2); - kvStore2 = null; + this.kvStore2 = null; } } - @SuppressWarnings({ "rawtypes", "unchecked" }) @SPITest public void testWrongReturnValueType() throws Exception { kvStore = factory.newStore(); @@ -75,10 +76,11 @@ public void testWrongReturnValueType() throws Exception { } try { - kvStore.computeIfAbsent(key, new Function() { + kvStore.computeIfAbsent(key, new Function() { @Override - public Object apply(Object key) { - return badValue; // returning wrong value type from function + @SuppressWarnings("unchecked") + public V apply(K key) { + return (V) badValue; // returning wrong value type from function } }); throw new AssertionError(); @@ -89,8 +91,8 @@ public Object apply(Object key) { } } - @SuppressWarnings({ "rawtypes", "unchecked" }) @SPITest + @SuppressWarnings("unchecked") public void testWrongKeyType() throws Exception { kvStore2 = factory.newStore(); @@ -107,7 +109,7 @@ public void testWrongKeyType() throws Exception { } try { - kvStore2.computeIfAbsent(badKey, new Function() { // wrong key type + kvStore2.computeIfAbsent(badKey, new Function() { // wrong key type @Override public Object apply(Object key) { throw new AssertionError(); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeTest.java index 2094974bc0..a2db378b9c 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreComputeTest.java @@ -50,12 +50,14 @@ public void tearDown() { kvStore = null; } if (kvStore2 != null) { + @SuppressWarnings("unchecked") + Store kvStore2 = this.kvStore2; factory.close(kvStore2); - kvStore2 = null; + this.kvStore2 = null; } } - @SuppressWarnings({ "rawtypes", "unchecked" }) + @SuppressWarnings("unchecked") @SPITest public void testWrongReturnValueType() throws Exception { kvStore = factory.newStore(); @@ -87,7 +89,7 @@ public Object apply(Object key, Object oldValue) { } } - @SuppressWarnings({ "rawtypes", "unchecked" }) + @SuppressWarnings("unchecked") @SPITest public void testWrongKeyType() throws Exception { kvStore2 = factory.newStore(); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreContainsKeyTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreContainsKeyTest.java index 4905da3502..1b48bd924d 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreContainsKeyTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreContainsKeyTest.java @@ -49,8 +49,10 @@ public void tearDown() { kvStore = null; } if (kvStore2 != null) { + @SuppressWarnings("unchecked") + Store kvStore2 = this.kvStore2; factory.close(kvStore2); - kvStore2 = null; + this.kvStore2 = null; } } @@ -89,7 +91,7 @@ public void nullKeyThrowsException() } @SPITest - @SuppressWarnings({ "unchecked", "rawtypes" }) + @SuppressWarnings("unchecked") public void wrongKeyTypeThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { kvStore2 = factory.newStore(); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreCreationEventListenerTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreCreationEventListenerTest.java index 10ee9e0200..fedce4c85a 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreCreationEventListenerTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreCreationEventListenerTest.java @@ -124,6 +124,7 @@ private void verifyListenerInteractions(StoreEventListener listener) { } private StoreEventListener addListener(Store kvStore) { + @SuppressWarnings("unchecked") StoreEventListener listener = mock(StoreEventListener.class); kvStore.getStoreEventSource().addEventListener(listener); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreEvictionEventListenerTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreEvictionEventListenerTest.java index fa9e06c959..8e5b31edb0 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreEvictionEventListenerTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreEvictionEventListenerTest.java @@ -137,6 +137,7 @@ private void verifyListenerInteractions(StoreEventListener listener) { } private StoreEventListener addListener(Store kvStore) { + @SuppressWarnings("unchecked") StoreEventListener listener = mock(StoreEventListener.class); kvStore.getStoreEventSource().addEventListener(listener); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreExpiryEventListenerTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreExpiryEventListenerTest.java index daf9959436..852899071d 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreExpiryEventListenerTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreExpiryEventListenerTest.java @@ -162,6 +162,7 @@ private void verifyListenerInteractions(StoreEventListener listener) { } private StoreEventListener addListener(Store kvStore) { + @SuppressWarnings("unchecked") StoreEventListener listener = mock(StoreEventListener.class); kvStore.getStoreEventSource().addEventListener(listener); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreGetTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreGetTest.java index f02274d6cc..9388f7d13d 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreGetTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreGetTest.java @@ -58,8 +58,10 @@ public void tearDown() { kvStore = null; } if (kvStore2 != null) { + @SuppressWarnings("unchecked") + Store kvStore2 = this.kvStore2; factory.close(kvStore2); - kvStore2 = null; + this.kvStore2 = null; } } diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutIfAbsentTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutIfAbsentTest.java index b3cd97747e..a0eade861f 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutIfAbsentTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutIfAbsentTest.java @@ -56,8 +56,10 @@ public void tearDown() { kvStore = null; } if (kvStore2 != null) { + @SuppressWarnings("unchecked") + Store kvStore2 = this.kvStore2; factory.close(kvStore2); - kvStore2 = null; + this.kvStore2 = null; } } diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutTest.java index 18a7bbfa7f..2f792bb417 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StorePutTest.java @@ -56,8 +56,10 @@ public void tearDown() { kvStore = null; } if (kvStore2 != null) { + @SuppressWarnings("unchecked") + Store kvStore2 = this.kvStore2; factory.close(kvStore2); - kvStore2 = null; + this.kvStore2 = null; } } diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemovalEventListenerTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemovalEventListenerTest.java index e7892c2554..0057ff6ff5 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemovalEventListenerTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemovalEventListenerTest.java @@ -110,6 +110,7 @@ private void verifyListenerInteractions(StoreEventListener listener) { } private StoreEventListener addListener(Store kvStore) { + @SuppressWarnings("unchecked") StoreEventListener listener = mock(StoreEventListener.class); kvStore.getStoreEventSource().addEventListener(listener); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyTest.java index a5d83b2843..30c32bf08f 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreRemoveKeyTest.java @@ -49,8 +49,10 @@ public void tearDown() { kvStore = null; } if (kvStore2 != null) { + @SuppressWarnings("unchecked") + Store kvStore2 = this.kvStore2; factory.close(kvStore2); - kvStore2 = null; + this.kvStore2 = null; } } @@ -94,10 +96,8 @@ public void nullKeyThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { kvStore = factory.newStore(); - K key = null; - try { - kvStore.remove(key); + kvStore.remove(null); throw new AssertionError("Expected NullPointerException because the key is null"); } catch (NullPointerException e) { // expected diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueTest.java index 35c3936171..a45f90d353 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueTest.java @@ -51,8 +51,10 @@ public void tearDown() { kvStore = null; } if (kvStore2 != null) { + @SuppressWarnings("unchecked") + Store kvStore2 = this.kvStore2; factory.close(kvStore2); - kvStore2 = null; + this.kvStore2 = null; } } @@ -138,10 +140,9 @@ public void nullValueThrowsException() kvStore = factory.newStore(); K key = factory.createKey(1); - V value = null; try { - kvStore.replace(key, value); + kvStore.replace(key, null); throw new AssertionError("Expected NullPointerException because the value is null"); } catch (NullPointerException e) { // expected @@ -151,6 +152,7 @@ public void nullValueThrowsException() } @SPITest + @SuppressWarnings("unchecked") public void wrongKeyTypeThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { kvStore2 = factory.newStore(); @@ -172,6 +174,7 @@ public void wrongKeyTypeThrowsException() } @SPITest + @SuppressWarnings("unchecked") public void wrongValueTypeThrowsException() throws IllegalAccessException, InstantiationException, LegalSPITesterException { kvStore2 = factory.newStore(); diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueValueTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueValueTest.java index ef4a6a06e7..2c2a4a1c9a 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueValueTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreReplaceKeyValueValueTest.java @@ -53,8 +53,10 @@ public void tearDown() { kvStore = null; } if (kvStore2 != null) { + @SuppressWarnings("unchecked") + Store kvStore2 = this.kvStore2; factory.close(kvStore2); - kvStore2 = null; + this.kvStore2 = null; } } diff --git a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreUpdateEventListenerTest.java b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreUpdateEventListenerTest.java index 0128c344b2..c810cc24fa 100644 --- a/core-spi-test/src/main/java/org/ehcache/internal/store/StoreUpdateEventListenerTest.java +++ b/core-spi-test/src/main/java/org/ehcache/internal/store/StoreUpdateEventListenerTest.java @@ -126,6 +126,7 @@ private void verifyListenerInteractions(StoreEventListener listener) { } private StoreEventListener addListener(Store kvStore) { + @SuppressWarnings("unchecked") StoreEventListener listener = mock(StoreEventListener.class); kvStore.getStoreEventSource().addEventListener(listener); From f14ea6f2c84ee7e9bff2231b9112b36d7ef5d34a Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Fri, 21 Oct 2016 10:26:21 +0200 Subject: [PATCH 125/218] :shirt: Clean up warning impl --- build.gradle | 1 + .../DefaultCopyProviderConfiguration.java | 4 +- .../concurrent/ConcurrentHashMap.java | 1 + .../internal/store/disk/OffHeapDiskStore.java | 2 +- .../holders/SerializedOnHeapValueHolder.java | 3 +- .../portability/SerializerPortability.java | 4 +- .../persistence/FileBasedStateRepository.java | 5 +- .../serialization/CompactJavaSerializer.java | 4 +- .../CacheConfigurationBuilderTest.java | 9 +- .../builders/CacheManagerBuilderTest.java | 12 +- .../ehcache/core/spi/ServiceProviderTest.java | 5 +- ...ltCacheEventListenerConfigurationTest.java | 5 +- .../serializer/SerializerCountingTest.java | 1 + .../classes/ClassInstanceProviderTest.java | 19 +-- .../internal/copy/SerializingCopierTest.java | 1 + .../CacheEventDispatcherFactoryImplTest.java | 26 ++-- .../FudgingInvocationScopedEventSinkTest.java | 36 +++--- .../events/InvocationScopedEventSinkTest.java | 16 ++- .../ScopedStoreEventDispatcherTest.java | 9 +- .../PartitionedOrderedExecutorTest.java | 2 +- .../AbstractWriteBehindTestBase.java | 63 +++++----- .../persistence/TestDiskResourceService.java | 3 +- .../sizeof/DefaultSizeOfEngineTest.java | 9 +- .../spi/copy/DefaultCopyProviderTest.java | 14 ++- .../DefaultCacheLoaderWriterProviderTest.java | 19 +-- .../DefaultSerializationProviderTest.java | 115 ++++++++++++------ ...istentConcurrentOffHeapClockCacheTest.java | 2 + .../disk/OffHeapDiskStoreProviderTest.java | 27 ++-- .../store/disk/OffHeapDiskStoreSPITest.java | 4 +- .../store/disk/OffHeapDiskStoreTest.java | 6 + .../EhcachePersistentSegmentTest.java | 4 +- .../store/heap/BaseOnHeapStoreTest.java | 75 ++++++++++-- .../ByteSizedOnHeapStoreByRefSPITest.java | 2 + .../heap/CountSizedOnHeapStoreByRefTest.java | 1 + .../CountSizedOnHeapStoreByValueTest.java | 2 + .../heap/OnHeapStoreBulkMethodsTest.java | 4 +- .../store/heap/OnHeapStoreByRefSPITest.java | 2 + .../OnHeapStoreCachingTierByRefSPITest.java | 2 +- .../store/heap/OnHeapStoreEvictionTest.java | 2 + .../store/heap/OnHeapStoreKeyCopierTest.java | 4 +- .../store/heap/OnHeapStoreProviderTest.java | 4 +- .../heap/OnHeapStoreValueCopierTest.java | 4 +- .../heap/bytesized/ByteAccountingTest.java | 11 +- .../ByteSizedOnHeapStoreByRefTest.java | 1 + .../ByteSizedOnHeapStoreByValueTest.java | 2 + .../bytesized/OnHeapStoreBulkMethodsTest.java | 1 + .../OnHeapStoreCachingTierByRefSPITest.java | 1 + .../offheap/AbstractOffHeapStoreTest.java | 5 +- ...hcacheConcurrentOffHeapClockCacheTest.java | 2 + .../store/offheap/OffHeapStoreTest.java | 4 +- .../offheap/factories/EhcacheSegmentTest.java | 5 +- .../tiering/CompoundCachingTierTest.java | 10 ++ .../TieredStoreFlushWhileShutdownTest.java | 16 +-- .../store/tiering/TieredStoreSPITest.java | 5 +- .../store/tiering/TieredStoreTest.java | 11 ++ .../tiering/TieredStoreWith3TiersSPITest.java | 5 +- .../DefaultDiskResourceServiceTest.java | 1 + .../FileBasedStateRepositoryTest.java | 4 +- .../impl/serialization/AddedFieldTest.java | 2 + .../serialization/AddedSuperClassTest.java | 2 + .../serialization/ArrayPackageScopeTest.java | 1 + .../serialization/BasicSerializationTest.java | 4 + .../CompactJavaSerializerClassLoaderTest.java | 2 + ...mpactJavaSerializerClassUnloadingTest.java | 2 + .../ehcache/impl/serialization/EnumTest.java | 3 + .../serialization/FieldTypeChangeTest.java | 2 + .../impl/serialization/GetFieldTest.java | 1 + .../impl/serialization/PutFieldTest.java | 2 + .../serialization/ReadObjectNoDataTest.java | 1 + .../SerializeAfterEvolutionTest.java | 1 + 70 files changed, 458 insertions(+), 182 deletions(-) diff --git a/build.gradle b/build.gradle index b1fe22eeda..3daa02581a 100644 --- a/build.gradle +++ b/build.gradle @@ -112,6 +112,7 @@ subprojects { dependencies { compileOnly "com.google.code.findbugs:annotations:$parent.findbugsVersion" + testCompileOnly "com.google.code.findbugs:annotations:$parent.findbugsVersion" testCompile 'junit:junit:4.12', 'org.assertj:assertj-core:1.7.1', 'org.hamcrest:hamcrest-library:1.3' testCompile('org.mockito:mockito-core:1.9.5') { exclude group:'org.hamcrest', module:'hamcrest-core' diff --git a/impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopyProviderConfiguration.java b/impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopyProviderConfiguration.java index 9c0b8fedfa..4c575fe640 100644 --- a/impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopyProviderConfiguration.java +++ b/impl/src/main/java/org/ehcache/impl/config/copy/DefaultCopyProviderConfiguration.java @@ -94,7 +94,9 @@ public DefaultCopyProviderConfiguration addCopierFor(Class clazz, Class(copierClass)); + @SuppressWarnings("unchecked") + ClassInstanceConfiguration> configuration = (ClassInstanceConfiguration) new DefaultCopierConfiguration(copierClass); + getDefaults().put(clazz, configuration); return this; } } diff --git a/impl/src/main/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMap.java b/impl/src/main/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMap.java index e23ba543af..1cece698b0 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMap.java +++ b/impl/src/main/java/org/ehcache/impl/internal/concurrent/ConcurrentHashMap.java @@ -252,6 +252,7 @@ * @param the type of keys maintained by this map * @param the type of mapped values */ +@SuppressWarnings("unchecked") public class ConcurrentHashMap extends AbstractMap implements ConcurrentMap, Serializable { private static final long serialVersionUID = 7249069246763182397L; diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java b/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java index 4e8cbc7c26..7ea597c050 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStore.java @@ -395,7 +395,7 @@ public void releaseStore(Store resource) { throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); } try { - OffHeapDiskStore offHeapDiskStore = (OffHeapDiskStore)resource; + OffHeapDiskStore offHeapDiskStore = (OffHeapDiskStore)resource; close(offHeapDiskStore); StatisticsManager.nodeFor(offHeapDiskStore).clean(); tierOperationStatistics.remove(offHeapDiskStore); diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/SerializedOnHeapValueHolder.java b/impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/SerializedOnHeapValueHolder.java index 91ba363c59..2a47a0bce8 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/SerializedOnHeapValueHolder.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/heap/holders/SerializedOnHeapValueHolder.java @@ -87,7 +87,8 @@ public boolean equals(Object other) { if (this == other) return true; if (other == null || getClass() != other.getClass()) return false; - SerializedOnHeapValueHolder that = (SerializedOnHeapValueHolder)other; + @SuppressWarnings("unchecked") + SerializedOnHeapValueHolder that = (SerializedOnHeapValueHolder)other; if (!super.equals(that)) return false; try { diff --git a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/portability/SerializerPortability.java b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/portability/SerializerPortability.java index a3a0e49f1c..b41a2088e2 100644 --- a/impl/src/main/java/org/ehcache/impl/internal/store/offheap/portability/SerializerPortability.java +++ b/impl/src/main/java/org/ehcache/impl/internal/store/offheap/portability/SerializerPortability.java @@ -51,7 +51,9 @@ public T decode(ByteBuffer byteBuffer) { @Override public boolean equals(Object o, ByteBuffer byteBuffer) { try { - return serializer.equals((T)o, byteBuffer); + @SuppressWarnings("unchecked") + T otherValue = (T) o; + return serializer.equals(otherValue, byteBuffer); } catch (ClassNotFoundException e) { throw new SerializerException(e); } diff --git a/impl/src/main/java/org/ehcache/impl/persistence/FileBasedStateRepository.java b/impl/src/main/java/org/ehcache/impl/persistence/FileBasedStateRepository.java index ede4e96b55..e404724209 100644 --- a/impl/src/main/java/org/ehcache/impl/persistence/FileBasedStateRepository.java +++ b/impl/src/main/java/org/ehcache/impl/persistence/FileBasedStateRepository.java @@ -125,7 +125,10 @@ public StateHolder getPer return holder; } } - return (StateHolder) result.holder; + + @SuppressWarnings("unchecked") + StateHolder holder = (StateHolder) result.holder; + return holder; } @Override diff --git a/impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java b/impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java index 47dfca1731..475a32aeba 100644 --- a/impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java +++ b/impl/src/main/java/org/ehcache/impl/serialization/CompactJavaSerializer.java @@ -109,7 +109,9 @@ public T read(ByteBuffer binary) throws ClassNotFoundException, SerializerExcept try { ObjectInputStream oin = getObjectInputStream(new ByteBufferInputStream(binary)); try { - return (T) oin.readObject(); + @SuppressWarnings("unchecked") + T value = (T) oin.readObject(); + return value; } finally { oin.close(); } diff --git a/impl/src/test/java/org/ehcache/config/builders/CacheConfigurationBuilderTest.java b/impl/src/test/java/org/ehcache/config/builders/CacheConfigurationBuilderTest.java index 8a91b6799f..d379cee755 100644 --- a/impl/src/test/java/org/ehcache/config/builders/CacheConfigurationBuilderTest.java +++ b/impl/src/test/java/org/ehcache/config/builders/CacheConfigurationBuilderTest.java @@ -36,10 +36,8 @@ import org.ehcache.spi.service.ServiceConfiguration; import org.hamcrest.Matcher; import org.hamcrest.Matchers; -import org.hamcrest.collection.IsIterableContainingInAnyOrder; import org.hamcrest.core.IsSame; import org.junit.Test; -import org.mockito.Mockito; import java.nio.ByteBuffer; import java.util.Map; @@ -65,7 +63,10 @@ public boolean adviseAgainstEviction(Object key, Object value) { .withEvictionAdvisor(evictionAdvisor) .build(); - assertThat(evictionAdvisor, (Matcher)sameInstance(cacheConfiguration.getEvictionAdvisor())); + @SuppressWarnings("unchecked") + Matcher> evictionAdvisorMatcher = (Matcher) sameInstance(cacheConfiguration + .getEvictionAdvisor()); + assertThat(evictionAdvisor, evictionAdvisorMatcher); } @Test @@ -285,7 +286,9 @@ public void testCopyingOfExistingConfiguration() { Class keyClass = Integer.class; Class valueClass = String.class; ClassLoader loader = mock(ClassLoader.class); + @SuppressWarnings("unchecked") EvictionAdvisor eviction = mock(EvictionAdvisor.class); + @SuppressWarnings("unchecked") Expiry expiry = mock(Expiry.class); ServiceConfiguration service = mock(ServiceConfiguration.class); diff --git a/impl/src/test/java/org/ehcache/config/builders/CacheManagerBuilderTest.java b/impl/src/test/java/org/ehcache/config/builders/CacheManagerBuilderTest.java index 6c6fdc572d..e346337242 100644 --- a/impl/src/test/java/org/ehcache/config/builders/CacheManagerBuilderTest.java +++ b/impl/src/test/java/org/ehcache/config/builders/CacheManagerBuilderTest.java @@ -23,6 +23,7 @@ import org.ehcache.impl.copy.SerializingCopier; import org.ehcache.impl.serialization.CompactJavaSerializer; import org.ehcache.impl.serialization.JavaSerializer; +import org.ehcache.spi.serialization.Serializer; import org.junit.Test; import java.util.concurrent.atomic.AtomicInteger; @@ -56,6 +57,7 @@ public CacheManagerBuilder builder(final CacheManagerBui @Test public void testCanOverrideCopierInConfig() { + @SuppressWarnings("unchecked") CacheManagerBuilder managerBuilder = newCacheManagerBuilder() .withCopier(Long.class, (Class) IdentityCopier.class); assertNotNull(managerBuilder.withCopier(Long.class, SerializingCopier.asCopierClass())); @@ -63,9 +65,13 @@ public void testCanOverrideCopierInConfig() { @Test public void testCanOverrideSerializerConfig() { - CacheManagerBuilder managerBuilder = newCacheManagerBuilder() - .withSerializer(String.class, (Class) JavaSerializer.class); - assertNotNull(managerBuilder.withSerializer(String.class, (Class) CompactJavaSerializer.class)); + @SuppressWarnings("unchecked") + Class> serializer1 = (Class) JavaSerializer.class; + CacheManagerBuilder managerBuilder = newCacheManagerBuilder() + .withSerializer(String.class, serializer1); + @SuppressWarnings("unchecked") + Class> serializer2 = (Class) CompactJavaSerializer.class; + assertNotNull(managerBuilder.withSerializer(String.class, serializer2)); } @Test(expected = IllegalArgumentException.class) diff --git a/impl/src/test/java/org/ehcache/core/spi/ServiceProviderTest.java b/impl/src/test/java/org/ehcache/core/spi/ServiceProviderTest.java index 5ddc9a77d2..339dce04f6 100644 --- a/impl/src/test/java/org/ehcache/core/spi/ServiceProviderTest.java +++ b/impl/src/test/java/org/ehcache/core/spi/ServiceProviderTest.java @@ -28,7 +28,6 @@ import org.junit.Test; import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; -import static org.hamcrest.core.IsCollectionContaining.hasItem; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; @@ -58,7 +57,7 @@ public void testSupportsMultipleAuthoritativeTierProviders() throws Exception { IsCollectionContaining.hasItem(IsSame.sameInstance(cachingTierProvider))); assertThat(serviceLocator.getServicesOfType(AuthoritativeTier.Provider.class), IsCollectionContaining.hasItem(IsSame.sameInstance(authoritativeTierProvider))); - assertThat(serviceLocator.getServicesOfType((Class) diskStoreProvider.getClass()), - IsCollectionContaining.hasItem(IsSame.sameInstance(diskStoreProvider))); + assertThat(serviceLocator.getServicesOfType(OffHeapDiskStore.Provider.class), + IsCollectionContaining.hasItem(IsSame.sameInstance(diskStoreProvider))); } } diff --git a/impl/src/test/java/org/ehcache/impl/config/event/DefaultCacheEventListenerConfigurationTest.java b/impl/src/test/java/org/ehcache/impl/config/event/DefaultCacheEventListenerConfigurationTest.java index d34f18a4e0..c2eb674696 100644 --- a/impl/src/test/java/org/ehcache/impl/config/event/DefaultCacheEventListenerConfigurationTest.java +++ b/impl/src/test/java/org/ehcache/impl/config/event/DefaultCacheEventListenerConfigurationTest.java @@ -39,9 +39,10 @@ public void testFailsToConstructWithEmptyEventSetAndInstance() { @Test(expected = IllegalArgumentException.class) public void testFailsToConstructWithEmptyEventSetAndClass() { Set fireOn = emptySet(); - new DefaultCacheEventListenerConfiguration(fireOn, (Class)TestCacheEventListener.class); + Class eventListenerClass = TestCacheEventListener.class; + new DefaultCacheEventListenerConfiguration(fireOn, eventListenerClass); } - abstract static class TestCacheEventListener implements CacheEventListener { + abstract static class TestCacheEventListener implements CacheEventListener { } } diff --git a/impl/src/test/java/org/ehcache/impl/config/serializer/SerializerCountingTest.java b/impl/src/test/java/org/ehcache/impl/config/serializer/SerializerCountingTest.java index 023778f1ec..0f1cc1b92c 100644 --- a/impl/src/test/java/org/ehcache/impl/config/serializer/SerializerCountingTest.java +++ b/impl/src/test/java/org/ehcache/impl/config/serializer/SerializerCountingTest.java @@ -58,6 +58,7 @@ public class SerializerCountingTest { public TemporaryFolder folder = new TemporaryFolder(); @Before + @SuppressWarnings("unchecked") public void setUp() { cacheManager = newCacheManagerBuilder() .using(new DefaultSerializationProviderConfiguration().addSerializerFor(Serializable.class, (Class) CountingSerializer.class) diff --git a/impl/src/test/java/org/ehcache/impl/internal/classes/ClassInstanceProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/classes/ClassInstanceProviderTest.java index 707b159ea8..69ca762d45 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/classes/ClassInstanceProviderTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/classes/ClassInstanceProviderTest.java @@ -39,9 +39,12 @@ */ public class ClassInstanceProviderTest { + @SuppressWarnings("unchecked") + private Class> configClass = (Class)ClassInstanceConfiguration.class; + @Test public void testNewInstanceUsingAliasAndNoArgs() throws Exception { - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, (Class)ClassInstanceConfiguration.class); + ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, configClass); classInstanceProvider.preconfigured.put("test stuff", new ClassInstanceConfiguration(TestService.class)); TestService obj = classInstanceProvider.newInstance("test stuff", (ServiceConfiguration) null); @@ -51,7 +54,7 @@ public void testNewInstanceUsingAliasAndNoArgs() throws Exception { @Test public void testNewInstanceUsingAliasAndArg() throws Exception { - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, (Class)ClassInstanceConfiguration.class); + ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, configClass); classInstanceProvider.preconfigured.put("test stuff", new ClassInstanceConfiguration(TestService.class, "test string")); TestService obj = classInstanceProvider.newInstance("test stuff", (ServiceConfiguration) null); @@ -61,7 +64,7 @@ public void testNewInstanceUsingAliasAndArg() throws Exception { @Test public void testNewInstanceUsingServiceConfig() throws Exception { - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, (Class)ClassInstanceConfiguration.class); + ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, configClass); TestServiceConfiguration config = new TestServiceConfiguration(); TestService obj = classInstanceProvider.newInstance("test stuff", config); @@ -74,7 +77,7 @@ public void testNewInstanceUsingServiceConfigFactory() throws Exception { TestServiceProviderConfiguration factoryConfig = new TestServiceProviderConfiguration(); factoryConfig.getDefaults().put("test stuff", new ClassInstanceConfiguration(TestService.class)); - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(factoryConfig, (Class)ClassInstanceConfiguration.class); + ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(factoryConfig, configClass); classInstanceProvider.start(null); TestService obj = classInstanceProvider.newInstance("test stuff", (ServiceConfiguration) null); @@ -121,7 +124,7 @@ public void testReleaseCloseableInstanceThrows() throws Exception { @Test public void testNewInstanceWithActualInstanceInServiceConfig() throws Exception { - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, (Class)ClassInstanceConfiguration.class); + ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, configClass); TestService service = new TestService(); TestServiceConfiguration config = new TestServiceConfiguration(service); @@ -133,7 +136,7 @@ public void testNewInstanceWithActualInstanceInServiceConfig() throws Exception @Test public void testSameInstanceRetrievedMultipleTimesUpdatesTheProvidedCount() throws Exception { - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, (Class)ClassInstanceConfiguration.class); + ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, configClass); TestService service = new TestService(); TestServiceConfiguration config = new TestServiceConfiguration(service); @@ -148,7 +151,9 @@ public void testSameInstanceRetrievedMultipleTimesUpdatesTheProvidedCount() thro @Test public void testInstancesNotCreatedByProviderDoesNotClose() throws IOException { - ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, (Class)ClassInstanceConfiguration.class); + @SuppressWarnings("unchecked") + Class> configClass = (Class) ClassInstanceConfiguration.class; + ClassInstanceProvider classInstanceProvider = new ClassInstanceProvider(null, configClass); TestCloaseableService service = mock(TestCloaseableService.class); TestCloaseableServiceConfig config = new TestCloaseableServiceConfig(service); diff --git a/impl/src/test/java/org/ehcache/impl/internal/copy/SerializingCopierTest.java b/impl/src/test/java/org/ehcache/impl/internal/copy/SerializingCopierTest.java index 361736d26c..f55a8446f5 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/copy/SerializingCopierTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/copy/SerializingCopierTest.java @@ -33,6 +33,7 @@ public class SerializingCopierTest { @Test public void testCopy() throws Exception { + @SuppressWarnings("unchecked") Serializer serializer = mock(Serializer.class); String in = new String("foo"); ByteBuffer buff = mock(ByteBuffer.class); diff --git a/impl/src/test/java/org/ehcache/impl/internal/events/CacheEventDispatcherFactoryImplTest.java b/impl/src/test/java/org/ehcache/impl/internal/events/CacheEventDispatcherFactoryImplTest.java index 12526e6e01..1148d5e6b7 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/events/CacheEventDispatcherFactoryImplTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/events/CacheEventDispatcherFactoryImplTest.java @@ -31,7 +31,7 @@ import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; -import static org.mockito.Matchers.anyObject; +import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -45,28 +45,34 @@ public class CacheEventDispatcherFactoryImplTest { @Test public void testConfigurationOfThreadPoolAlias() { + @SuppressWarnings("unchecked") ServiceProvider serviceProvider = mock(ServiceProvider.class); when(serviceProvider.getService(ExecutionService.class)).thenReturn(mock(ExecutionService.class)); CacheEventDispatcherFactoryImpl factory = new CacheEventDispatcherFactoryImpl(); factory.start(serviceProvider); DefaultCacheEventDispatcherConfiguration config = spy(new DefaultCacheEventDispatcherConfiguration("aName")); - factory.createCacheEventDispatcher(mock(Store.class), config); + @SuppressWarnings("unchecked") + Store store = mock(Store.class); + factory.createCacheEventDispatcher(store, config); verify(config).getThreadPoolAlias(); } @Test + @SuppressWarnings("unchecked") public void testCreateCacheEventDispatcherReturnsDisabledDispatcherWhenNoThreadPool() throws Exception { ServiceProvider serviceProvider = mock(ServiceProvider.class); ExecutionService executionService = mock(ExecutionService.class); when(serviceProvider.getService(ExecutionService.class)).thenReturn(executionService); - when(executionService.getOrderedExecutor(eq("myAlias"), (BlockingQueue) anyObject())).thenThrow(IllegalArgumentException.class); - when(executionService.getUnorderedExecutor(eq("myAlias"), (BlockingQueue) anyObject())).thenThrow(IllegalArgumentException.class); + when(executionService.getOrderedExecutor(eq("myAlias"), any(BlockingQueue.class))).thenThrow(IllegalArgumentException.class); + when(executionService.getUnorderedExecutor(eq("myAlias"), any(BlockingQueue.class))).thenThrow(IllegalArgumentException.class); CacheEventDispatcherFactoryImpl cacheEventDispatcherFactory = new CacheEventDispatcherFactoryImpl(); cacheEventDispatcherFactory.start(serviceProvider); + @SuppressWarnings("unchecked") + Store store = mock(Store.class); try { - cacheEventDispatcherFactory.createCacheEventDispatcher(mock(Store.class), new DefaultCacheEventDispatcherConfiguration("myAlias")); + cacheEventDispatcherFactory.createCacheEventDispatcher(store, new DefaultCacheEventDispatcherConfiguration("myAlias")); fail("expected IllegalArgumentException"); } catch (IllegalArgumentException iae) { // expected @@ -74,17 +80,19 @@ public void testCreateCacheEventDispatcherReturnsDisabledDispatcherWhenNoThreadP } @Test + @SuppressWarnings("unchecked") public void testCreateCacheEventReturnsDisabledDispatcherWhenThreadPoolFound() throws Exception { ServiceProvider serviceProvider = mock(ServiceProvider.class); ExecutionService executionService = mock(ExecutionService.class); when(serviceProvider.getService(ExecutionService.class)).thenReturn(executionService); - when(executionService.getOrderedExecutor(eq("myAlias"), (BlockingQueue) anyObject())).thenReturn(mock(ExecutorService.class)); - when(executionService.getUnorderedExecutor(eq("myAlias"), (BlockingQueue) anyObject())).thenReturn(mock(ExecutorService.class)); + when(executionService.getOrderedExecutor(eq("myAlias"), any(BlockingQueue.class))).thenReturn(mock(ExecutorService.class)); + when(executionService.getUnorderedExecutor(eq("myAlias"), any(BlockingQueue.class))).thenReturn(mock(ExecutorService.class)); CacheEventDispatcherFactoryImpl cacheEventDispatcherFactory = new CacheEventDispatcherFactoryImpl(); cacheEventDispatcherFactory.start(serviceProvider); - CacheEventDispatcher dispatcher = cacheEventDispatcherFactory.createCacheEventDispatcher(mock(Store.class), new DefaultCacheEventDispatcherConfiguration("myAlias")); + Store store = mock(Store.class); + CacheEventDispatcher dispatcher = cacheEventDispatcherFactory.createCacheEventDispatcher(store, new DefaultCacheEventDispatcherConfiguration("myAlias")); assertThat(dispatcher, instanceOf(CacheEventDispatcherImpl.class)); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/events/FudgingInvocationScopedEventSinkTest.java b/impl/src/test/java/org/ehcache/impl/internal/events/FudgingInvocationScopedEventSinkTest.java index 8a4e7dd845..49b6f36812 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/events/FudgingInvocationScopedEventSinkTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/events/FudgingInvocationScopedEventSinkTest.java @@ -16,9 +16,11 @@ package org.ehcache.impl.internal.events; +import org.ehcache.core.spi.store.events.StoreEvent; import org.ehcache.event.EventType; import org.ehcache.core.spi.store.events.StoreEventFilter; import org.ehcache.core.spi.store.events.StoreEventListener; +import org.hamcrest.Matcher; import org.junit.Before; import org.junit.Test; import org.mockito.InOrder; @@ -40,10 +42,13 @@ */ public class FudgingInvocationScopedEventSinkTest { - private StoreEventListener listener; + private StoreEventListener listener; private FudgingInvocationScopedEventSink eventSink; + private Matcher> createdMatcher = eventType(EventType.CREATED); + private Matcher> evictedMatcher = eventType(EventType.EVICTED); @Before + @SuppressWarnings("unchecked") public void setUp() { HashSet> storeEventListeners = new HashSet>(); listener = mock(StoreEventListener.class); @@ -60,8 +65,8 @@ public void testEvictedDifferentKeyNoImpact() { eventSink.close(); InOrder inOrder = inOrder(listener); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.CREATED))); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.EVICTED))); + inOrder.verify(listener).onEvent(argThat(createdMatcher)); + inOrder.verify(listener).onEvent(argThat(evictedMatcher)); verifyNoMoreInteractions(listener); } @@ -72,8 +77,8 @@ public void testEvictedSameKeyAfterUpdateReplacesWithEvictCreate() { eventSink.close(); InOrder inOrder = inOrder(listener); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.EVICTED))); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.CREATED))); + inOrder.verify(listener).onEvent(argThat(evictedMatcher)); + inOrder.verify(listener).onEvent(argThat(createdMatcher)); verifyNoMoreInteractions(listener); } @@ -85,8 +90,8 @@ public void testEvictedSameKeyAfterCreateFudgesExpiryToo() { eventSink.close(); InOrder inOrder = inOrder(listener); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.EVICTED))); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.CREATED))); + inOrder.verify(listener).onEvent(argThat(evictedMatcher)); + inOrder.verify(listener).onEvent(argThat(createdMatcher)); verifyNoMoreInteractions(listener); } @@ -99,8 +104,8 @@ public void testEvictedSameKeyAfterUpdateReplacesWithEvictCreateEvenWithMultiple eventSink.close(); InOrder inOrder = inOrder(listener); - inOrder.verify(listener, times(3)).onEvent(argThat(eventType(EventType.EVICTED))); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.CREATED))); + inOrder.verify(listener, times(3)).onEvent(argThat(evictedMatcher)); + inOrder.verify(listener).onEvent(argThat(createdMatcher)); verifyNoMoreInteractions(listener); } @@ -114,8 +119,8 @@ public void testEvictedSameKeyAfterCreateFudgesExpiryTooEvenWithMultipleEvictsIn eventSink.close(); InOrder inOrder = inOrder(listener); - inOrder.verify(listener, times(3)).onEvent(argThat(eventType(EventType.EVICTED))); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.CREATED))); + inOrder.verify(listener, times(3)).onEvent(argThat(evictedMatcher)); + inOrder.verify(listener).onEvent(argThat(createdMatcher)); verifyNoMoreInteractions(listener); } @@ -127,9 +132,10 @@ public void testEvictedKeyDoesNotFudgeOlderEvents() { eventSink.close(); InOrder inOrder = inOrder(listener); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.UPDATED))); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.CREATED))); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.EVICTED))); + Matcher> updatedMatcher = eventType(EventType.UPDATED); + inOrder.verify(listener).onEvent(argThat(updatedMatcher)); + inOrder.verify(listener).onEvent(argThat(createdMatcher)); + inOrder.verify(listener).onEvent(argThat(evictedMatcher)); verifyNoMoreInteractions(listener); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/events/InvocationScopedEventSinkTest.java b/impl/src/test/java/org/ehcache/impl/internal/events/InvocationScopedEventSinkTest.java index 3e081b5cd7..77c8a16c1d 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/events/InvocationScopedEventSinkTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/events/InvocationScopedEventSinkTest.java @@ -16,9 +16,11 @@ package org.ehcache.impl.internal.events; +import org.ehcache.core.spi.store.events.StoreEvent; import org.ehcache.core.spi.store.events.StoreEventFilter; import org.ehcache.core.spi.store.events.StoreEventListener; import org.ehcache.event.EventType; +import org.hamcrest.Matcher; import org.junit.Before; import org.junit.Test; import org.mockito.InOrder; @@ -39,10 +41,11 @@ */ public class InvocationScopedEventSinkTest { - private StoreEventListener listener; + private StoreEventListener listener; private InvocationScopedEventSink eventSink; @Before + @SuppressWarnings("unchecked") public void setUp() { HashSet> storeEventListeners = new HashSet>(); listener = mock(StoreEventListener.class); @@ -63,10 +66,13 @@ public void testReset() { eventSink.close(); InOrder inOrder = inOrder(listener); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.CREATED))); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.UPDATED))); - inOrder.verify(listener).onEvent(argThat(eventType(EventType.EVICTED))); + Matcher> createdMatcher = eventType(EventType.CREATED); + inOrder.verify(listener).onEvent(argThat(createdMatcher)); + Matcher> updatedMatcher = eventType(EventType.UPDATED); + inOrder.verify(listener).onEvent(argThat(updatedMatcher)); + Matcher> evictedMatcher = eventType(EventType.EVICTED); + inOrder.verify(listener).onEvent(argThat(evictedMatcher)); verifyNoMoreInteractions(listener); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/events/ScopedStoreEventDispatcherTest.java b/impl/src/test/java/org/ehcache/impl/internal/events/ScopedStoreEventDispatcherTest.java index 7f68a8c764..8a18af5c67 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/events/ScopedStoreEventDispatcherTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/events/ScopedStoreEventDispatcherTest.java @@ -25,6 +25,7 @@ import org.ehcache.core.spi.store.events.StoreEventListener; import org.hamcrest.Matcher; import org.junit.Test; +import org.mockito.Matchers; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -63,8 +64,10 @@ public void testRegistersOrderingChange() { } @Test + @SuppressWarnings("unchecked") public void testListenerNotifiedUnordered() { ScopedStoreEventDispatcher dispatcher = new ScopedStoreEventDispatcher(1); + @SuppressWarnings("unchecked") StoreEventListener listener = mock(StoreEventListener.class); dispatcher.addEventListener(listener); @@ -76,8 +79,10 @@ public void testListenerNotifiedUnordered() { } @Test + @SuppressWarnings("unchecked") public void testListenerNotifiedOrdered() { ScopedStoreEventDispatcher dispatcher = new ScopedStoreEventDispatcher(1); + @SuppressWarnings("unchecked") StoreEventListener listener = mock(StoreEventListener.class); dispatcher.addEventListener(listener); dispatcher.setEventOrdering(true); @@ -92,9 +97,11 @@ public void testListenerNotifiedOrdered() { @Test public void testEventFiltering() { ScopedStoreEventDispatcher dispatcher = new ScopedStoreEventDispatcher(1); + @SuppressWarnings("unchecked") StoreEventListener listener = mock(StoreEventListener.class); dispatcher.addEventListener(listener); + @SuppressWarnings("unchecked") StoreEventFilter filter = mock(StoreEventFilter.class); when(filter.acceptEvent(eq(EventType.CREATED), anyString(), anyString(), anyString())).thenReturn(true); when(filter.acceptEvent(eq(EventType.REMOVED), anyString(), anyString(), anyString())).thenReturn(false); @@ -188,4 +195,4 @@ public Long apply(Long key, Long value) { assertThat(resultMap, is(map)); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedOrderedExecutorTest.java b/impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedOrderedExecutorTest.java index 1430705e90..cc51d8ac86 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedOrderedExecutorTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/executor/PartitionedOrderedExecutorTest.java @@ -297,7 +297,7 @@ public void testJobsAreExecutedInOrder() throws InterruptedException, ExecutionE List> tasks = new ArrayList>(); for (int i = 0; i < 100; i++) { final int index = i; - tasks.add(executor.submit(new Callable() { + tasks.add(executor.submit(new Callable() { @Override public Object call() throws Exception { diff --git a/impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/AbstractWriteBehindTestBase.java b/impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/AbstractWriteBehindTestBase.java index 6e8f49d821..831a613c79 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/AbstractWriteBehindTestBase.java +++ b/impl/src/test/java/org/ehcache/impl/internal/loaderwriter/writebehind/AbstractWriteBehindTestBase.java @@ -71,8 +71,7 @@ public abstract class AbstractWriteBehindTestBase { @Test public void testWriteOrdering() throws Exception { WriteBehindTestLoaderWriter loaderWriter = new WriteBehindTestLoaderWriter(); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -104,8 +103,7 @@ public void testWriteOrdering() throws Exception { @Test public void testWrites() throws Exception { WriteBehindTestLoaderWriter loaderWriter = new WriteBehindTestLoaderWriter(); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -133,8 +131,7 @@ public void testWrites() throws Exception { @Test public void testBulkWrites() throws Exception { WriteBehindTestLoaderWriter loaderWriter = new WriteBehindTestLoaderWriter(); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -181,8 +178,7 @@ public void testBulkWrites() throws Exception { @Test public void testThatAllGetsReturnLatestData() throws BulkCacheWritingException, Exception { WriteBehindTestLoaderWriter loaderWriter = new WriteBehindTestLoaderWriter(); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); @@ -230,8 +226,7 @@ public void testThatAllGetsReturnLatestData() throws BulkCacheWritingException, @Test public void testAllGetsReturnLatestDataWithKeyCollision() { WriteBehindTestLoaderWriter loaderWriter = new WriteBehindTestLoaderWriter(); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -260,10 +255,10 @@ public void testAllGetsReturnLatestDataWithKeyCollision() { @Test public void testBatchedDeletedKeyReturnsNull() throws Exception { + @SuppressWarnings("unchecked") CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); when(loaderWriter.load("key")).thenReturn("value"); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -285,6 +280,7 @@ public void testBatchedDeletedKeyReturnsNull() throws Exception { public void testUnBatchedDeletedKeyReturnsNull() throws Exception { final Semaphore semaphore = new Semaphore(0); + @SuppressWarnings("unchecked") CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); when(loaderWriter.load("key")).thenReturn("value"); doAnswer(new Answer() { @@ -294,8 +290,7 @@ public Object answer(InvocationOnMock invocation) throws Throwable { return null; } }).when(loaderWriter).delete("key"); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -316,10 +311,10 @@ public Object answer(InvocationOnMock invocation) throws Throwable { @Test public void testBatchedOverwrittenKeyReturnsNewValue() throws Exception { + @SuppressWarnings("unchecked") CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); when(loaderWriter.load("key")).thenReturn("value"); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -341,6 +336,7 @@ public void testBatchedOverwrittenKeyReturnsNewValue() throws Exception { public void testUnBatchedOverwrittenKeyReturnsNewValue() throws Exception { final Semaphore semaphore = new Semaphore(0); + @SuppressWarnings("unchecked") CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); when(loaderWriter.load("key")).thenReturn("value"); doAnswer(new Answer() { @@ -350,8 +346,7 @@ public Object answer(InvocationOnMock invocation) throws Throwable { return null; } }).when(loaderWriter).delete("key"); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -373,8 +368,7 @@ public Object answer(InvocationOnMock invocation) throws Throwable { @Test public void testCoaslecedWritesAreNotSeen() throws InterruptedException { WriteBehindTestLoaderWriter loaderWriter = new WriteBehindTestLoaderWriter(); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -401,8 +395,7 @@ public void testCoaslecedWritesAreNotSeen() throws InterruptedException { @Test public void testUnBatchedWriteBehindStopWaitsForEmptyQueue() { WriteBehindTestLoaderWriter loaderWriter = new WriteBehindTestLoaderWriter(); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -420,8 +413,7 @@ public void testUnBatchedWriteBehindStopWaitsForEmptyQueue() { @Test public void testBatchedWriteBehindStopWaitsForEmptyQueue() { WriteBehindTestLoaderWriter loaderWriter = new WriteBehindTestLoaderWriter(); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -439,6 +431,7 @@ public void testBatchedWriteBehindStopWaitsForEmptyQueue() { @Test public void testUnBatchedWriteBehindBlocksWhenFull() throws Exception { final Semaphore gate = new Semaphore(0); + @SuppressWarnings("unchecked") CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); doAnswer(new Answer() { @@ -449,8 +442,7 @@ public Object answer(InvocationOnMock invocation) throws Throwable { } }).when(loaderWriter).write(anyString(), anyString()); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -489,6 +481,7 @@ public void run() { } @Test + @SuppressWarnings("unchecked") public void testBatchedWriteBehindBlocksWhenFull() throws Exception { final Semaphore gate = new Semaphore(0); CacheLoaderWriter loaderWriter = mock(CacheLoaderWriter.class); @@ -501,8 +494,7 @@ public Object answer(InvocationOnMock invocation) throws Throwable { } }).when(loaderWriter).writeAll(any(Iterable.class)); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -543,8 +535,7 @@ public void run() { @Test public void testFilledBatchedIsWritten() throws Exception { WriteBehindTestLoaderWriter loaderWriter = new WriteBehindTestLoaderWriter(); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -572,8 +563,7 @@ public void testFilledBatchedIsWritten() throws Exception { @Test public void testAgedBatchedIsWritten() throws Exception { WriteBehindTestLoaderWriter loaderWriter = new WriteBehindTestLoaderWriter(); - CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); - when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn((CacheLoaderWriter)loaderWriter); + CacheLoaderWriterProvider cacheLoaderWriterProvider = getMockedCacheLoaderWriterProvider(loaderWriter); CacheManager cacheManager = managerBuilder().using(cacheLoaderWriterProvider).build(true); try { @@ -604,6 +594,7 @@ class TestWriteBehindProvider extends WriteBehindProviderFactory.Provider { private WriteBehind writeBehind = null; @Override + @SuppressWarnings("unchecked") public WriteBehind createWriteBehindLoaderWriter(final CacheLoaderWriter cacheLoaderWriter, final WriteBehindConfiguration configuration) { this.writeBehind = super.createWriteBehindLoaderWriter(cacheLoaderWriter, configuration); return writeBehind; @@ -632,4 +623,12 @@ public WriteBehind getWriteBehind() { cacheManager.close(); } } + + @SuppressWarnings("unchecked") + protected CacheLoaderWriterProvider getMockedCacheLoaderWriterProvider(CacheLoaderWriter loaderWriter) { + CacheLoaderWriterProvider cacheLoaderWriterProvider = mock(CacheLoaderWriterProvider.class); + when(cacheLoaderWriterProvider.createCacheLoaderWriter(anyString(), (CacheConfiguration)anyObject())).thenReturn(loaderWriter); + return cacheLoaderWriterProvider; + } + } diff --git a/impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java b/impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java index 63e9d18b5f..2d4be23fb0 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java +++ b/impl/src/test/java/org/ehcache/impl/internal/persistence/TestDiskResourceService.java @@ -62,7 +62,8 @@ protected void before() throws Throwable { fileService = new DefaultLocalPersistenceService(new CacheManagerPersistenceConfiguration(folder.newFolder())); fileService.start(null); diskResourceService = new DefaultDiskResourceService(); - ServiceProvider sp = mock(ServiceProvider.class); + @SuppressWarnings("unchecked") + ServiceProvider sp = mock(ServiceProvider.class); Mockito.when(sp.getService(LocalPersistenceService.class)).thenReturn(fileService); diskResourceService.start(sp); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineTest.java b/impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineTest.java index 6aa58a31d7..b9aae5f351 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/sizeof/DefaultSizeOfEngineTest.java @@ -37,7 +37,10 @@ public class DefaultSizeOfEngineTest { public void testMaxObjectGraphSizeExceededException() { SizeOfEngine sizeOfEngine = new DefaultSizeOfEngine(3, Long.MAX_VALUE); try { - sizeOfEngine.sizeof(new MaxDepthGreaterThanThree(), new CopiedOnHeapValueHolder(new MaxDepthGreaterThanThree(), 0l, true, new IdentityCopier())); + @SuppressWarnings("unchecked") + IdentityCopier valueCopier = new IdentityCopier(); + sizeOfEngine.sizeof(new MaxDepthGreaterThanThree(), + new CopiedOnHeapValueHolder(new MaxDepthGreaterThanThree(), 0L, true, valueCopier)); fail(); } catch (Exception limitExceededException) { assertThat(limitExceededException, instanceOf(LimitExceededException.class)); @@ -49,7 +52,9 @@ public void testMaxObjectSizeExceededException() { SizeOfEngine sizeOfEngine = new DefaultSizeOfEngine(Long.MAX_VALUE, 1000); try { String overSized = new String(new byte[1000]); - sizeOfEngine.sizeof(overSized, new CopiedOnHeapValueHolder("test", 0l, true, new IdentityCopier())); + @SuppressWarnings("unchecked") + IdentityCopier valueCopier = new IdentityCopier(); + sizeOfEngine.sizeof(overSized, new CopiedOnHeapValueHolder("test", 0L, true, valueCopier)); fail(); } catch (Exception limitExceededException) { assertThat(limitExceededException, instanceOf(LimitExceededException.class)); diff --git a/impl/src/test/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderTest.java index 8bd018301a..2e6e39aecc 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/spi/copy/DefaultCopyProviderTest.java @@ -42,6 +42,7 @@ public class DefaultCopyProviderTest { public void testCreateKeyCopierWithCustomCopierConfig() { DefaultCopyProvider provider = new DefaultCopyProvider(null); + @SuppressWarnings("unchecked") DefaultCopierConfiguration config = new DefaultCopierConfiguration( (Class)TestCopier.class, DefaultCopierConfiguration.Type.KEY); @@ -61,13 +62,16 @@ public void testCreateKeyCopierWithSerializer() { DefaultCopierConfiguration config = new DefaultCopierConfiguration( SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.KEY); - assertThat(copyProvider.createKeyCopier(Long.class, mock(Serializer.class), config), instanceOf(SerializingCopier.class)); + @SuppressWarnings("unchecked") + Serializer serializer = mock(Serializer.class); + assertThat(copyProvider.createKeyCopier(Long.class, serializer, config), instanceOf(SerializingCopier.class)); } @Test public void testCreateValueCopierWithCustomCopierConfig() { DefaultCopyProvider provider = new DefaultCopyProvider(null); + @SuppressWarnings("unchecked") DefaultCopierConfiguration config = new DefaultCopierConfiguration( (Class)TestCopier.class, DefaultCopierConfiguration.Type.VALUE); @@ -87,7 +91,9 @@ public void testCreateValueCopierWithSerializer() { DefaultCopierConfiguration config = new DefaultCopierConfiguration( SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE); - assertThat(copyProvider.createValueCopier(Long.class, mock(Serializer.class), config), instanceOf(SerializingCopier.class)); + @SuppressWarnings("unchecked") + Serializer serializer = mock(Serializer.class); + assertThat(copyProvider.createValueCopier(Long.class, serializer, config), instanceOf(SerializingCopier.class)); } @Test @@ -96,7 +102,9 @@ public void testUserProvidedCloseableCopierInstanceDoesNotCloseOnRelease() throw TestCloseableCopier testCloseableCopier = new TestCloseableCopier(); DefaultCopierConfiguration config = new DefaultCopierConfiguration(testCloseableCopier, DefaultCopierConfiguration.Type.KEY); - assertThat(copyProvider.createKeyCopier(Long.class, mock(Serializer.class), config), sameInstance((Copier)testCloseableCopier)); + @SuppressWarnings("unchecked") + Serializer serializer = mock(Serializer.class); + assertThat(copyProvider.createKeyCopier(Long.class, serializer, config), sameInstance((Copier)testCloseableCopier)); copyProvider.releaseCopier(testCloseableCopier); diff --git a/impl/src/test/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProviderTest.java index 43f477e500..386b9dd704 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProviderTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/spi/loaderwriter/DefaultCacheLoaderWriterProviderTest.java @@ -24,6 +24,7 @@ import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterConfiguration; import org.ehcache.impl.config.loaderwriter.DefaultCacheLoaderWriterProviderConfiguration; +import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.service.ServiceConfiguration; @@ -108,13 +109,17 @@ public void testCreationConfigurationPreservedAfterStopStart() { configuration.addLoaderFor("cache", MyLoader.class); DefaultCacheLoaderWriterProvider loaderWriterProvider = new DefaultCacheLoaderWriterProvider(configuration); - loaderWriterProvider.start(mock(ServiceProvider.class)); - assertThat(loaderWriterProvider.createCacheLoaderWriter("cache", mock(CacheConfiguration.class)), CoreMatchers.instanceOf(MyLoader.class)); + @SuppressWarnings("unchecked") + ServiceProvider serviceProvider = mock(ServiceProvider.class); + loaderWriterProvider.start(serviceProvider); + @SuppressWarnings("unchecked") + CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); + assertThat(loaderWriterProvider.createCacheLoaderWriter("cache", cacheConfiguration), CoreMatchers.instanceOf(MyLoader.class)); loaderWriterProvider.stop(); - loaderWriterProvider.start(mock(ServiceProvider.class)); + loaderWriterProvider.start(serviceProvider); - assertThat(loaderWriterProvider.createCacheLoaderWriter("cache", mock(CacheConfiguration.class)), CoreMatchers.instanceOf(MyLoader.class)); + assertThat(loaderWriterProvider.createCacheLoaderWriter("cache", cacheConfiguration), CoreMatchers.instanceOf(MyLoader.class)); } public static class MyLoader implements CacheLoaderWriter { @@ -140,7 +145,7 @@ public Map loadAll(final Iterable keys) throws Exception { @Override public void write(final Object key, final Object value) throws Exception { - this.lastWritten = value; + lastWritten = value; } @Override @@ -177,8 +182,8 @@ public Object load(final Object key) throws Exception { @Override public void write(final Object key, final Object value) throws Exception { - this.lastWritten = value; + lastWritten = value; } } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java index 246fe91cb0..f168ff2ef8 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/spi/serialization/DefaultSerializationProviderTest.java @@ -18,7 +18,6 @@ import org.ehcache.CachePersistenceException; import org.ehcache.core.spi.service.DiskResourceService; import org.ehcache.core.spi.service.FileBasedPersistenceContext; -import org.ehcache.core.spi.service.LocalPersistenceService; import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration; import org.ehcache.impl.config.serializer.DefaultSerializerConfiguration; import org.ehcache.impl.serialization.ByteArraySerializer; @@ -35,6 +34,7 @@ import org.ehcache.spi.serialization.SerializerException; import org.ehcache.spi.serialization.StatefulSerializer; import org.ehcache.spi.serialization.UnsupportedTypeException; +import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceProvider; import org.hamcrest.Matchers; import org.junit.Rule; @@ -61,7 +61,6 @@ import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** @@ -96,7 +95,8 @@ public void testCreateSerializerWithConfig() throws Exception { DefaultSerializationProvider dsp = new DefaultSerializationProvider(dspfConfig); dsp.start(providerContaining()); - DefaultSerializerConfiguration dspConfig = new DefaultSerializerConfiguration((Class) TestSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + @SuppressWarnings("unchecked") + DefaultSerializerConfiguration dspConfig = new DefaultSerializerConfiguration(getSerializerClass(), DefaultSerializerConfiguration.Type.VALUE); assertThat(dsp.createValueSerializer(String.class, ClassLoader.getSystemClassLoader(), dspConfig), instanceOf(TestSerializer.class)); assertThat(dsp.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), dspConfig), instanceOf(TestSerializer.class)); @@ -105,7 +105,8 @@ public void testCreateSerializerWithConfig() throws Exception { @Test public void testCreateSerializerWithFactoryConfig() throws Exception { DefaultSerializationProviderConfiguration dspfConfig = new DefaultSerializationProviderConfiguration(); - dspfConfig.addSerializerFor(Long.class, (Class) TestSerializer.class); + Class> serializerClass = getSerializerClass(); + dspfConfig.addSerializerFor(Long.class, serializerClass); DefaultSerializationProvider dsp = new DefaultSerializationProvider(dspfConfig); dsp.start(providerContaining()); @@ -113,10 +114,16 @@ public void testCreateSerializerWithFactoryConfig() throws Exception { assertThat(dsp.createValueSerializer(HashMap.class, ClassLoader.getSystemClassLoader()), instanceOf(CompactJavaSerializer.class)); } + @SuppressWarnings("unchecked") + private Class> getSerializerClass() { + return (Class) TestSerializer.class; + } + @Test public void testCreateTransientSerializers() throws Exception { DefaultSerializationProviderConfiguration dspfConfig = new DefaultSerializationProviderConfiguration(); - dspfConfig.addSerializerFor(String.class, (Class) TestSerializer.class); + Class> serializerClass = getSerializerClass(); + dspfConfig.addSerializerFor(String.class, serializerClass); DefaultSerializationProvider dsp = new DefaultSerializationProvider(dspfConfig); dsp.start(providerContaining()); @@ -129,7 +136,8 @@ public void testCreateTransientSerializers() throws Exception { @Test public void tesCreateTransientSerializersWithOverriddenSerializableType() throws Exception { DefaultSerializationProviderConfiguration dspfConfig = new DefaultSerializationProviderConfiguration(); - dspfConfig.addSerializerFor(Serializable.class, (Class) TestSerializer.class); + Class> serializerClass = getSerializerClass(); + dspfConfig.addSerializerFor(Serializable.class, serializerClass); DefaultSerializationProvider dsp = new DefaultSerializationProvider(dspfConfig); dsp.start(providerContaining()); @@ -142,12 +150,15 @@ public void tesCreateTransientSerializersWithOverriddenSerializableType() throws @Test public void testRemembersCreationConfigurationAfterStopStart() throws UnsupportedTypeException { DefaultSerializationProviderConfiguration configuration = new DefaultSerializationProviderConfiguration(); - configuration.addSerializerFor(String.class, (Class) TestSerializer.class); + Class> serializerClass = getSerializerClass(); + configuration.addSerializerFor(String.class, serializerClass); DefaultSerializationProvider serializationProvider = new DefaultSerializationProvider(configuration); - serializationProvider.start(mock(ServiceProvider.class)); + @SuppressWarnings("unchecked") + ServiceProvider serviceProvider = mock(ServiceProvider.class); + serializationProvider.start(serviceProvider); assertThat(serializationProvider.createKeySerializer(String.class, getSystemClassLoader()), instanceOf(TestSerializer.class)); serializationProvider.stop(); - serializationProvider.start(mock(ServiceProvider.class)); + serializationProvider.start(serviceProvider); assertThat(serializationProvider.createKeySerializer(String.class, getSystemClassLoader()), instanceOf(TestSerializer.class)); } @@ -163,7 +174,9 @@ public void testReleaseSerializerWithProvidedCloseableSerializerDoesNotClose() t @Test public void testReleaseSerializerWithInstantiatedCloseableSerializerDoesClose() throws Exception { - DefaultSerializerConfiguration config = new DefaultSerializerConfiguration(CloseableSerializer.class, DefaultSerializerConfiguration.Type.KEY); + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) CloseableSerializer.class; + DefaultSerializerConfiguration config = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.KEY); DefaultSerializationProvider provider = new DefaultSerializationProvider(null); Serializer serializer = provider.createKeySerializer(String.class, getSystemClassLoader(), config); @@ -192,8 +205,9 @@ public void testReleaseSameInstanceMultipleTimesThrows() throws Exception { @Test public void testCreateKeySerializerWithActualInstanceInServiceConfig() throws Exception { DefaultSerializationProvider provider = new DefaultSerializationProvider(null); - TestSerializer serializer = mock(TestSerializer.class); - DefaultSerializerConfiguration config = new DefaultSerializerConfiguration(serializer, DefaultSerializerConfiguration.Type.KEY); + @SuppressWarnings("unchecked") + TestSerializer serializer = mock(TestSerializer.class); + DefaultSerializerConfiguration config = new DefaultSerializerConfiguration(serializer, DefaultSerializerConfiguration.Type.KEY); Serializer created = provider.createKeySerializer(TestSerializer.class, getSystemClassLoader(), config); assertSame(serializer, created); } @@ -201,8 +215,9 @@ public void testCreateKeySerializerWithActualInstanceInServiceConfig() throws Ex @Test public void testSameInstanceRetrievedMultipleTimesUpdatesTheProvidedCount() throws Exception { DefaultSerializationProvider provider = new DefaultSerializationProvider(null); - TestSerializer serializer = mock(TestSerializer.class); - DefaultSerializerConfiguration config = new DefaultSerializerConfiguration(serializer, DefaultSerializerConfiguration.Type.KEY); + @SuppressWarnings("unchecked") + TestSerializer serializer = mock(TestSerializer.class); + DefaultSerializerConfiguration config = new DefaultSerializerConfiguration(serializer, DefaultSerializerConfiguration.Type.KEY); Serializer created = provider.createKeySerializer(TestSerializer.class, getSystemClassLoader(), config); assertSame(serializer, created); @@ -300,7 +315,9 @@ public void testCreateTransientSerializerWithoutConstructor() throws Exception { DefaultSerializationProvider provider = new DefaultSerializationProvider(null); provider.start(providerContaining()); - DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration((Class) BaseSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) BaseSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); } @@ -311,7 +328,9 @@ public void testCreatePersistentSerializerWithoutConstructor() throws Exception DefaultSerializationProvider provider = new DefaultSerializationProvider(null); provider.start(providerContaining()); - DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration((Class) BaseSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) BaseSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); } @@ -322,7 +341,9 @@ public void testCreateTransientStatefulSerializerWithoutConstructor() throws Exc DefaultSerializationProvider provider = new DefaultSerializationProvider(null); provider.start(providerContaining()); - DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration((Class) StatefulBaseSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) StatefulBaseSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); } @@ -333,7 +354,9 @@ public void testCreatePersistentStatefulSerializerWithoutConstructor() throws Ex DefaultSerializationProvider provider = new DefaultSerializationProvider(null); provider.start(providerContaining()); - DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration((Class) StatefulBaseSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) StatefulBaseSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); } @@ -343,7 +366,9 @@ public void testCreateTransientMinimalSerializer() throws Exception { provider.start(providerContaining()); MinimalSerializer.baseConstructorInvoked = false; - DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(MinimalSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) MinimalSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); Serializer valueSerializer = provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); assertThat(valueSerializer, instanceOf(MinimalSerializer.class)); @@ -356,7 +381,9 @@ public void testCreatePersistentMinimalSerializer() throws Exception { provider.start(providerContaining()); MinimalSerializer.baseConstructorInvoked = false; - DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(MinimalSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) MinimalSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); Serializer valueSerializer = provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); assertThat(valueSerializer, instanceOf(MinimalSerializer.class)); @@ -369,7 +396,9 @@ public void testTransientMinimalStatefulSerializer() throws Exception { provider.start(providerContaining()); MinimalStatefulSerializer.baseConstructorInvoked = false; - DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(MinimalStatefulSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) MinimalStatefulSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); Serializer valueSerializer = provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); assertThat(valueSerializer, instanceOf(MinimalStatefulSerializer.class)); @@ -382,7 +411,9 @@ public void testPersistentMinimalStatefulSerializer() throws Exception { provider.start(providerContaining()); MinimalStatefulSerializer.baseConstructorInvoked = false; - DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(MinimalStatefulSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) MinimalStatefulSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); Serializer valueSerializer = provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); assertThat(valueSerializer, instanceOf(MinimalStatefulSerializer.class)); @@ -396,7 +427,9 @@ public void testTransientLegacySerializer() throws Exception { DefaultSerializationProvider provider = new DefaultSerializationProvider(null); provider.start(providerContaining()); - DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(LegacySerializer.class, DefaultSerializerConfiguration.Type.VALUE); + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) LegacySerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); } @@ -405,7 +438,9 @@ public void testPersistentLegacySerializer() throws Exception { DefaultSerializationProvider provider = getStartedProvider(); LegacySerializer.legacyConstructorInvoked = false; - DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(LegacySerializer.class, DefaultSerializerConfiguration.Type.VALUE); + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) LegacySerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); expectedException.expect(RuntimeException.class); expectedException.expectMessage("does not have a constructor that takes in a ClassLoader."); Serializer valueSerializer = @@ -421,7 +456,9 @@ public void testTransientLegacyComboSerializer() throws Exception { LegacyComboSerializer.baseConstructorInvoked = false; LegacyComboSerializer.legacyConstructorInvoked = false; - DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(LegacyComboSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) LegacyComboSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); Serializer valueSerializer = provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); assertThat(valueSerializer, instanceOf(LegacyComboSerializer.class)); @@ -435,7 +472,9 @@ public void testPersistentLegacyComboSerializer() throws Exception { LegacyComboSerializer.baseConstructorInvoked = false; LegacyComboSerializer.legacyConstructorInvoked = false; - DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(LegacyComboSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) LegacyComboSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); Serializer valueSerializer = provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); assertThat(valueSerializer, instanceOf(LegacyComboSerializer.class)); @@ -450,7 +489,9 @@ public void testCreateTransientStatefulLegacySerializer() throws Exception { DefaultSerializationProvider provider = new DefaultSerializationProvider(null); provider.start(providerContaining()); - DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration((Class) StatefulLegacySerializer.class, DefaultSerializerConfiguration.Type.VALUE); + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) StatefulLegacySerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); } @@ -461,7 +502,9 @@ public void testCreatePersistentStatefulLegacySerializer() throws Exception { DefaultSerializationProvider provider = new DefaultSerializationProvider(null); provider.start(providerContaining()); - DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration((Class) StatefulLegacySerializer.class, DefaultSerializerConfiguration.Type.VALUE); + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) StatefulLegacySerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); } @@ -472,7 +515,9 @@ public void testTransientStatefulLegacyComboSerializer() throws Exception { StatefulLegacyComboSerializer.baseConstructorInvoked = false; StatefulLegacyComboSerializer.legacyConstructorInvoked = false; - DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(StatefulLegacyComboSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) StatefulLegacyComboSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); Serializer valueSerializer = provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration); assertThat(valueSerializer, instanceOf(StatefulLegacyComboSerializer.class)); @@ -486,7 +531,9 @@ public void testPersistentStatefulLegacyComboSerializer() throws Exception { StatefulLegacyComboSerializer.baseConstructorInvoked = false; StatefulLegacyComboSerializer.legacyConstructorInvoked = false; - DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(StatefulLegacyComboSerializer.class, DefaultSerializerConfiguration.Type.VALUE); + @SuppressWarnings("unchecked") + Class> serializerClass = (Class) StatefulLegacyComboSerializer.class; + DefaultSerializerConfiguration configuration = new DefaultSerializerConfiguration(serializerClass, DefaultSerializerConfiguration.Type.VALUE); Serializer valueSerializer = provider.createValueSerializer(Object.class, ClassLoader.getSystemClassLoader(), configuration, getPersistenceSpaceIdentifierMock()); assertThat(valueSerializer, instanceOf(StatefulLegacyComboSerializer.class)); @@ -503,9 +550,9 @@ private PersistableResourceService.PersistenceSpaceIdentifier getPersistenceSpac private DefaultSerializationProvider getStartedProvider() throws CachePersistenceException { DefaultSerializationProvider defaultProvider = new DefaultSerializationProvider(null); - ServiceProvider serviceProvider = mock(ServiceProvider.class); + @SuppressWarnings("unchecked") + ServiceProvider serviceProvider = mock(ServiceProvider.class); DiskResourceService diskResourceService = mock(DiskResourceService.class); - StateRepository stateRepository = mock(StateRepository.class); when(diskResourceService.createPersistenceContextWithin(any(PersistableResourceService.PersistenceSpaceIdentifier.class), anyString())) .thenReturn(new FileBasedPersistenceContext() { @Override @@ -540,7 +587,7 @@ public boolean equals(T object, ByteBuffer binary) { } } - public static class CloseableSerializer implements Serializer, Closeable { + public static class CloseableSerializer implements Serializer, Closeable { boolean closed = false; @@ -563,7 +610,7 @@ public ByteBuffer serialize(Object object) throws SerializerException { } @Override - public Object read(ByteBuffer binary) throws ClassNotFoundException, SerializerException { + public T read(ByteBuffer binary) throws ClassNotFoundException, SerializerException { return null; } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/EhcachePersistentConcurrentOffHeapClockCacheTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/disk/EhcachePersistentConcurrentOffHeapClockCacheTest.java index 4b665e45e7..9fd728fb5f 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/EhcachePersistentConcurrentOffHeapClockCacheTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/disk/EhcachePersistentConcurrentOffHeapClockCacheTest.java @@ -50,11 +50,13 @@ public class EhcachePersistentConcurrentOffHeapClockCacheTest extends AbstractEh public final TemporaryFolder folder = new TemporaryFolder(); @Override + @SuppressWarnings("unchecked") protected EhcachePersistentConcurrentOffHeapClockCache createTestSegment() throws IOException { return createTestSegment(noAdvice(), mock(EvictionListener.class)); } @Override + @SuppressWarnings("unchecked") protected EhcacheOffHeapBackingMap createTestSegment(EvictionAdvisor evictionPredicate) throws IOException { return createTestSegment(evictionPredicate, mock(EvictionListener.class)); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java index 813ba422d1..6aba43f141 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreProviderTest.java @@ -71,16 +71,17 @@ public void testStatisticsAssociations() throws Exception { OffHeapDiskStore store = provider.createStore(getStoreConfig(), mock(PersistableResourceService.PersistenceSpaceIdentifier.class)); - Query storeQuery = queryBuilder() - .children() - .filter(context(attributes(Matchers.>allOf( - hasAttribute("tags", new Matcher>() { - @Override - protected boolean matchesSafely(Set object) { - return object.containsAll(singleton("Disk")); - } - }))))) - .build(); + @SuppressWarnings("unchecked") + Query storeQuery = queryBuilder() + .children() + .filter(context(attributes(Matchers.>allOf( + hasAttribute("tags", new Matcher>() { + @Override + protected boolean matchesSafely(Set object) { + return object.containsAll(singleton("Disk")); + } + }))))) + .build(); Set nodes = singleton(ContextManager.nodeFor(store)); @@ -124,8 +125,9 @@ public ClassLoader getClassLoader() { public ResourcePools getResourcePools() { return new ResourcePools() { @Override - public ResourcePool getPoolForResource(ResourceType resourceType) { - return new SizedResourcePool() { + @SuppressWarnings("unchecked") + public

P getPoolForResource(ResourceType

resourceType) { + return (P) new SizedResourcePool() { @Override public ResourceType getType() { return ResourceType.Core.DISK; @@ -154,6 +156,7 @@ public void validateUpdate(ResourcePool newPool) { } @Override + @SuppressWarnings("unchecked") public Set> getResourceTypeSet() { return (Set) singleton(ResourceType.Core.OFFHEAP); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreSPITest.java index 8792d687b1..2d9736fc82 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreSPITest.java @@ -191,7 +191,7 @@ public String createValue(long seed) { public void close(final Store store) { String spaceName = createdStores.get(store); try { - OffHeapDiskStore.Provider.close((OffHeapDiskStore)store); + OffHeapDiskStore.Provider.close((OffHeapDiskStore)store); } catch (IOException ex) { throw new RuntimeException(ex); } @@ -210,7 +210,7 @@ public void close(final Store store) { public void tearDown() throws CachePersistenceException, IOException { try { for (Map.Entry, String> entry : createdStores.entrySet()) { - OffHeapDiskStore.Provider.close((OffHeapDiskStore) entry.getKey()); + OffHeapDiskStore.Provider.close((OffHeapDiskStore) entry.getKey()); diskResourceService.destroy(entry.getValue()); } } finally { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java index 6fc0e9e9c3..20d6d08f77 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/disk/OffHeapDiskStoreTest.java @@ -128,6 +128,7 @@ public void testRecoveryFailureWhenValueTypeChangesToIncompatibleClass() throws PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier("cache", cacheConfiguration); { + @SuppressWarnings("unchecked") Store.Configuration storeConfig1 = mock(Store.Configuration.class); when(storeConfig1.getKeyType()).thenReturn(Long.class); when(storeConfig1.getValueType()).thenReturn(String.class); @@ -143,6 +144,7 @@ public void testRecoveryFailureWhenValueTypeChangesToIncompatibleClass() throws } { + @SuppressWarnings("unchecked") Store.Configuration storeConfig2 = mock(Store.Configuration.class); when(storeConfig2.getKeyType()).thenReturn(Long.class); when(storeConfig2.getValueType()).thenReturn(Serializable.class); @@ -176,6 +178,7 @@ public void testRecoveryWithArrayType() throws Exception { PersistenceSpaceIdentifier space = diskResourceService.getPersistenceSpaceIdentifier("cache", cacheConfiguration); { + @SuppressWarnings("unchecked") Store.Configuration storeConfig1 = mock(Store.Configuration.class); when(storeConfig1.getKeyType()).thenReturn(Long.class); when(storeConfig1.getValueType()).thenReturn(Object[].class); @@ -191,6 +194,7 @@ public void testRecoveryWithArrayType() throws Exception { } { + @SuppressWarnings("unchecked") Store.Configuration storeConfig2 = mock(Store.Configuration.class); when(storeConfig2.getKeyType()).thenReturn(Long.class); when(storeConfig2.getValueType()).thenReturn(Object[].class); @@ -276,6 +280,7 @@ public void testStoreInitFailsWithoutLocalPersistenceService() throws Exception } @Test + @SuppressWarnings("unchecked") public void testAuthoritativeRank() throws Exception { OffHeapDiskStore.Provider provider = new OffHeapDiskStore.Provider(); assertThat(provider.rankAuthority(ResourceType.Core.DISK, EMPTY_LIST), is(1)); @@ -411,6 +416,7 @@ protected boolean matchesSafely(Set object) { } })))).filter(context(attributes(hasAttribute("name", "invalidateAll")))).ensureUnique().build(); + @SuppressWarnings("unchecked") OperationStatistic invalidateAll = (OperationStatistic) invalidateAllQuery.execute(singleton(nodeFor(cache))).iterator().next().getContext().attributes().get("this"); assertThat(invalidateAll.sum(), is(0L)); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentTest.java index 0e9054a6dd..8b27c44388 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/disk/factories/EhcachePersistentSegmentTest.java @@ -16,7 +16,6 @@ package org.ehcache.impl.internal.store.disk.factories; -import org.ehcache.config.Eviction; import org.ehcache.config.EvictionAdvisor; import org.ehcache.impl.internal.store.disk.factories.EhcachePersistentSegmentFactory.EhcachePersistentSegment; import org.ehcache.impl.internal.store.offheap.SwitchableEvictionAdvisor; @@ -52,10 +51,12 @@ public class EhcachePersistentSegmentTest { @Rule public final TemporaryFolder folder = new TemporaryFolder(); + @SuppressWarnings("unchecked") private EhcachePersistentSegmentFactory.EhcachePersistentSegment createTestSegment() throws IOException { return createTestSegment(noAdvice(), mock(EvictionListener.class)); } + @SuppressWarnings("unchecked") private EhcachePersistentSegmentFactory.EhcachePersistentSegment createTestSegment(EvictionAdvisor evictionPredicate) throws IOException { return createTestSegment(evictionPredicate, mock(EvictionListener.class)); } @@ -145,6 +146,7 @@ public void testAdviceAgainstEvictionPreventsEviction() throws IOException { @Test public void testEvictionFiresEvent() throws IOException { + @SuppressWarnings("unchecked") EvictionListener evictionListener = mock(EvictionListener.class); EhcachePersistentSegment segment = createTestSegment(evictionListener); try { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/BaseOnHeapStoreTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/BaseOnHeapStoreTest.java index fc7464e22d..1b5afe003e 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/BaseOnHeapStoreTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/BaseOnHeapStoreTest.java @@ -119,6 +119,7 @@ private String buildThreadDump() { }; @Before + @SuppressWarnings("unchecked") public void setUp() { eventDispatcher = mock(StoreEventDispatcher.class); eventSink = mock(StoreEventSink.class); @@ -128,19 +129,21 @@ public void setUp() { @Test public void testEvictEmptyStoreDoesNothing() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); assertThat(store.evict(eventSink), is(false)); - verify(eventSink, never()).evicted(anyString(), any(ValueSupplier.class)); + verify(eventSink, never()).evicted(anyString(), anyValueSupplier()); } @Test public void testEvictWithNoEvictionAdvisorDoesEvict() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); for (int i = 0; i < 100; i++) { store.put(Integer.toString(i), Integer.toString(i)); } assertThat(store.evict(eventSink), is(true)); assertThat(storeSize(store), is(99)); - verify(eventSink, times(1)).evicted(anyString(), any(ValueSupplier.class)); + verify(eventSink, times(1)).evicted(anyString(), anyValueSupplier()); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); } @@ -152,12 +155,13 @@ public boolean adviseAgainstEviction(String key, String value) { return true; } }); + StoreEventSink eventSink = getStoreEventSink(); for (int i = 0; i < 100; i++) { store.put(Integer.toString(i), Integer.toString(i)); } assertThat(store.evict(eventSink), is(true)); assertThat(storeSize(store), is(99)); - verify(eventSink, times(1)).evicted(anyString(), any(ValueSupplier.class)); + verify(eventSink, times(1)).evicted(anyString(), anyValueSupplier()); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); } @@ -169,12 +173,13 @@ public boolean adviseAgainstEviction(String key, String value) { throw new UnsupportedOperationException("Broken advisor!"); } }); + StoreEventSink eventSink = getStoreEventSink(); for (int i = 0; i < 100; i++) { store.put(Integer.toString(i), Integer.toString(i)); } assertThat(store.evict(eventSink), is(true)); assertThat(storeSize(store), is(99)); - verify(eventSink, times(1)).evicted(anyString(), any(ValueSupplier.class)); + verify(eventSink, times(1)).evicted(anyString(), anyValueSupplier()); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.EvictionOutcome.SUCCESS)); } @@ -196,6 +201,8 @@ public void testGetNoPut() throws Exception { @Test public void testGetExpired() throws Exception { TestTimeSource timeSource = new TestTimeSource(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); OnHeapStore store = newStore(timeSource, Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); store.put("key", "value"); @@ -214,10 +221,11 @@ public void testGetNoExpired() throws Exception { TestTimeSource timeSource = new TestTimeSource(); OnHeapStore store = newStore(timeSource, Expirations.timeToLiveExpiration(new Duration(2, TimeUnit.MILLISECONDS))); + StoreEventSink eventSink = getStoreEventSink(); store.put("key", "value"); timeSource.advanceTime(1); assertThat(store.get("key").value(), equalTo("value")); - verify(eventSink, never()).expired(anyString(), any(ValueSupplier.class)); + verify(eventSink, never()).expired(anyString(), anyValueSupplier()); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.GetOutcome.HIT)); } @@ -251,6 +259,8 @@ public void testNotContainsKey() throws Exception { @Test public void testContainsKeyExpired() throws Exception { TestTimeSource timeSource = new TestTimeSource(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); OnHeapStore store = newStore(timeSource, Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); @@ -266,6 +276,9 @@ public void testContainsKeyExpired() throws Exception { @Test public void testPut() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); + store.put("key", "value"); verify(eventSink).created(eq("key"), eq("value")); verifyListenerReleaseEventsInOrder(eventDispatcher); @@ -276,6 +289,9 @@ public void testPut() throws Exception { @Test public void testPutOverwrite() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); + store.put("key", "value"); store.put("key", "value2"); @@ -306,6 +322,8 @@ public void testInvalidate() throws Exception { @Test public void testPutIfAbsentNoValue() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); ValueHolder prev = store.putIfAbsent("key", "value"); @@ -347,13 +365,15 @@ public void testPutIfAbsentExpired() throws Exception { ValueHolder prev = store.putIfAbsent("key", "value2"); assertThat(prev, nullValue()); assertThat(store.get("key").value(), equalTo("value2")); - checkExpiryEvent(eventSink, "key", "value"); + checkExpiryEvent(getStoreEventSink(), "key", "value"); StatisticsTestUtils.validateStats(store, EnumSet.of(StoreOperationOutcomes.ExpirationOutcome.SUCCESS)); } @Test public void testRemove() throws StoreAccessException { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); store.put("key", "value"); store.remove("key"); @@ -365,6 +385,8 @@ public void testRemove() throws StoreAccessException { @Test public void testRemoveTwoArgMatch() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); store.put("key", "value"); @@ -389,8 +411,10 @@ public void testRemoveTwoArgNoMatch() throws Exception { @Test public void testRemoveTwoArgExpired() throws Exception { TestTimeSource timeSource = new TestTimeSource(); + StoreEventSink eventSink = getStoreEventSink(); OnHeapStore store = newStore(timeSource, Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); + store.put("key", "value"); assertThat(store.get("key").value(), equalTo("value")); timeSource.advanceTime(1); @@ -403,6 +427,8 @@ public void testRemoveTwoArgExpired() throws Exception { @Test public void testReplaceTwoArgPresent() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); store.put("key", "value"); @@ -427,6 +453,7 @@ public void testReplaceTwoArgAbsent() throws Exception { @Test public void testReplaceTwoArgExpired() throws Exception { TestTimeSource timeSource = new TestTimeSource(); + StoreEventSink eventSink = getStoreEventSink(); OnHeapStore store = newStore(timeSource, Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); @@ -442,6 +469,8 @@ public void testReplaceTwoArgExpired() throws Exception { @Test public void testReplaceThreeArgMatch() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); store.put("key", "value"); @@ -471,6 +500,7 @@ public void testReplaceThreeArgNoMatch() throws Exception { @Test public void testReplaceThreeArgExpired() throws Exception { TestTimeSource timeSource = new TestTimeSource(); + StoreEventSink eventSink = getStoreEventSink(); OnHeapStore store = newStore(timeSource, Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); @@ -548,6 +578,8 @@ public void testIteratorDoesNotUpdateAccessTime() throws Exception { public void testComputeReplaceTrue() throws Exception { TestTimeSource timeSource = new TestTimeSource(); OnHeapStore store = newStore(timeSource, Expirations.noExpiration()); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); store.put("key", "value"); ValueHolder installedHolder = store.get("key"); @@ -607,6 +639,8 @@ public Boolean apply() { @Test public void testCompute() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); ValueHolder newValue = store.compute("key", new BiFunction() { @Override @@ -627,6 +661,8 @@ public String apply(String mappedKey, String mappedValue) { @Test public void testComputeNull() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); ValueHolder newValue = store.compute("key", new BiFunction() { @Override @@ -677,6 +713,8 @@ public String apply(String mappedKey, String mappedValue) { @Test public void testComputeExistingValue() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); store.put("key", "value"); @@ -699,6 +737,7 @@ public String apply(String mappedKey, String mappedValue) { @Test public void testComputeExpired() throws Exception { TestTimeSource timeSource = new TestTimeSource(); + StoreEventSink eventSink = getStoreEventSink(); OnHeapStore store = newStore(timeSource, Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); store.put("key", "value"); @@ -828,6 +867,8 @@ public Boolean apply() { @Test public void testComputeIfAbsent() throws Exception { OnHeapStore store = newStore(); + StoreEventSink eventSink = getStoreEventSink(); + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); ValueHolder newValue = store.computeIfAbsent("key", new Function() { @Override @@ -1012,7 +1053,8 @@ public void testGetOrComputeIfAbsentExpiresOnHit() throws Exception { TestTimeSource timeSource = new TestTimeSource(); OnHeapStore store = newStore(timeSource, Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.MILLISECONDS))); - CachingTier.InvalidationListener invalidationListener = mock(CachingTier.InvalidationListener.class); + @SuppressWarnings("unchecked") + CachingTier.InvalidationListener invalidationListener = mock(CachingTier.InvalidationListener.class); store.setInvalidationListener(invalidationListener); store.put("key", "value"); @@ -1185,6 +1227,7 @@ public ValueHolder apply(String key) { } @Test + @SuppressWarnings("unchecked") public void testConcurrentFaultingAndInvalidate() throws Exception { final OnHeapStore store = newStore(); CachingTier.InvalidationListener invalidationListener = mock(CachingTier.InvalidationListener.class); @@ -1382,7 +1425,14 @@ public void onInvalidation(String key, ValueHolder valueHolder) { store.iterator(); } + @SuppressWarnings("unchecked") + private ValueSupplier anyValueSupplier() { + return any(ValueSupplier.class); + } + private void verifyListenerReleaseEventsInOrder(StoreEventDispatcher listener) { + StoreEventSink eventSink = getStoreEventSink(); + InOrder inOrder = inOrder(listener); inOrder.verify(listener).eventSink(); inOrder.verify(listener).releaseEventSink(eventSink); @@ -1403,6 +1453,7 @@ private StoreEventListener addListener(OnHeapStore store) { eventDispatcher = mock(StoreEventDispatcher.class); eventSink = mock(StoreEventSink.class); when(eventDispatcher.eventSink()).thenReturn(eventSink); + @SuppressWarnings("unchecked") StoreEventListener listener = mock(StoreEventListener.class); return listener; } @@ -1452,6 +1503,16 @@ private void advanceTime(long delta) { } } + @SuppressWarnings("unchecked") + protected StoreEventSink getStoreEventSink() { + return eventSink; + } + + @SuppressWarnings("unchecked") + protected StoreEventDispatcher getStoreEventDispatcher() { + return eventDispatcher; + } + protected OnHeapStore newStore() { return newStore(SystemTimeSource.INSTANCE, Expirations.noExpiration(), Eviction.noAdvice()); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByRefSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByRefSPITest.java index 1a1118d54b..fd5866176c 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByRefSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/ByteSizedOnHeapStoreByRefSPITest.java @@ -75,6 +75,7 @@ public Store newStoreWithEvictionAdvisor(EvictionAdvisor newStore(Long capacity, EvictionAdvisor evictionAdvisor, Expiry expiry, TimeSource timeSource) { ResourcePools resourcePools = buildResourcePools(capacity); Store.Configuration config = new StoreConfigurationImpl(getKeyType(), getValueType(), @@ -84,6 +85,7 @@ private Store newStore(Long capacity, EvictionAdvisor newValueHolder(final String value) { return new CopiedOnHeapValueHolder(value, SystemTimeSource.INSTANCE.getTimeMillis(), false, DEFAULT_COPIER); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByRefTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByRefTest.java index 53423c9cca..d58618eb4e 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByRefTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByRefTest.java @@ -44,6 +44,7 @@ protected void updateStoreCapacity(OnHeapStore store, int newCapacity) { } @Override + @SuppressWarnings("unchecked") protected OnHeapStore newStore(final TimeSource timeSource, final Expiry expiry, final EvictionAdvisor evictionAdvisor, final int capacity) { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByValueTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByValueTest.java index 8452067a5e..ff726e142a 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByValueTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/CountSizedOnHeapStoreByValueTest.java @@ -21,6 +21,7 @@ import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; import org.ehcache.config.units.EntryUnit; +import org.ehcache.core.events.StoreEventDispatcher; import org.ehcache.expiry.Expiry; import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; import org.ehcache.core.spi.time.TimeSource; @@ -47,6 +48,7 @@ protected void updateStoreCapacity(OnHeapStore store, int newCapacity) { protected OnHeapStore newStore(final TimeSource timeSource, final Expiry expiry, final EvictionAdvisor evictionAdvisor, final Copier keyCopier, final Copier valueCopier, final int capacity) { + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); return new OnHeapStore(new Store.Configuration() { @SuppressWarnings("unchecked") diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreBulkMethodsTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreBulkMethodsTest.java index 1688e58f51..464eb3fb9c 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreBulkMethodsTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreBulkMethodsTest.java @@ -62,14 +62,15 @@ protected Store.Configuration mockStoreConfig() { return config; } + @SuppressWarnings("unchecked") protected OnHeapStore newStore() { Store.Configuration configuration = mockStoreConfig(); return new OnHeapStore(configuration, SystemTimeSource.INSTANCE, DEFAULT_COPIER, DEFAULT_COPIER, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); } - @SuppressWarnings("unchecked") @Test + @SuppressWarnings("unchecked") public void testBulkComputeFunctionGetsValuesOfEntries() throws Exception { @SuppressWarnings("rawtypes") Store.Configuration config = mock(Store.Configuration.class); @@ -164,6 +165,7 @@ public void testBulkComputeHappyPath() throws Exception { public void testBulkComputeStoreRemovesValueWhenFunctionReturnsNullMappings() throws Exception { Store.Configuration configuration = mockStoreConfig(); + @SuppressWarnings("unchecked") OnHeapStore store = new OnHeapStore(configuration, SystemTimeSource.INSTANCE, DEFAULT_COPIER, DEFAULT_COPIER, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); store.put(1, "one"); store.put(2, "two"); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefSPITest.java index 41d4f0d10c..398d8dd1f8 100755 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreByRefSPITest.java @@ -81,6 +81,7 @@ public Store newStoreWithEvictionAdvisor(EvictionAdvisor newStore(Long capacity, EvictionAdvisor evictionAdvisor, Expiry expiry, TimeSource timeSource) { ResourcePools resourcePools = buildResourcePools(capacity); Store.Configuration config = new StoreConfigurationImpl(getKeyType(), getValueType(), @@ -89,6 +90,7 @@ private Store newStore(Long capacity, EvictionAdvisor newValueHolder(final String value) { return new CopiedOnHeapValueHolder(value, SystemTimeSource.INSTANCE.getTimeMillis(), false, DEFAULT_COPIER); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByRefSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByRefSPITest.java index 7251a473dc..6e997c2d79 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByRefSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreCachingTierByRefSPITest.java @@ -27,7 +27,6 @@ import org.ehcache.core.spi.time.SystemTimeSource; import org.ehcache.internal.tier.CachingTierFactory; import org.ehcache.internal.tier.CachingTierSPITest; -import org.ehcache.core.internal.service.ServiceLocator; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.tiering.CachingTier; @@ -54,6 +53,7 @@ protected CachingTierFactory getCachingTierFactory() { } @Before + @SuppressWarnings("unchecked") public void setUp() { cachingTierFactory = new CachingTierFactory() { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreEvictionTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreEvictionTest.java index dd45857b1c..96f6b6ffbe 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreEvictionTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreEvictionTest.java @@ -206,10 +206,12 @@ public static class OnHeapStoreForTests extends OnHeapStore { private static final Copier DEFAULT_COPIER = new IdentityCopier(); + @SuppressWarnings("unchecked") public OnHeapStoreForTests(final Configuration config, final TimeSource timeSource) { super(config, timeSource, DEFAULT_COPIER, DEFAULT_COPIER, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); } + @SuppressWarnings("unchecked") public OnHeapStoreForTests(final Configuration config, final TimeSource timeSource, final SizeOfEngine engine) { super(config, timeSource, DEFAULT_COPIER, DEFAULT_COPIER, engine, NullStoreEventDispatcher.nullStoreEventDispatcher()); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreKeyCopierTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreKeyCopierTest.java index bc53dba961..83b0d189c2 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreKeyCopierTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreKeyCopierTest.java @@ -93,6 +93,8 @@ public void setUp() { when(configuration.getKeyType()).thenReturn(Key.class); when(configuration.getValueType()).thenReturn(String.class); when(configuration.getExpiry()).thenReturn(Expirations.noExpiration()); + @SuppressWarnings("unchecked") + Store.Configuration config = configuration; Copier keyCopier = new Copier() { @Override @@ -112,7 +114,7 @@ public Key copyForWrite(Key obj) { } }; - store = new OnHeapStore(configuration, SystemTimeSource.INSTANCE, keyCopier, new IdentityCopier(), new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); + store = new OnHeapStore(config, SystemTimeSource.INSTANCE, keyCopier, new IdentityCopier(), new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); } @Test diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreProviderTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreProviderTest.java index d8bc9be01c..33274f4598 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreProviderTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreProviderTest.java @@ -30,7 +30,6 @@ import static java.util.Collections.EMPTY_LIST; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.*; /** * Basic tests for {@link org.ehcache.impl.internal.store.heap.OnHeapStore.Provider}. @@ -73,6 +72,7 @@ public int getTierHeight() { } @Test + @SuppressWarnings("unchecked") public void testRankCachingTier() throws Exception { OnHeapStore.Provider provider = new OnHeapStore.Provider(); @@ -86,4 +86,4 @@ private void assertRank(final Store.Provider provider, final int expectedRank, f Collections.>emptyList()), is(expectedRank)); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreValueCopierTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreValueCopierTest.java index 24579869bc..b236d40ce1 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreValueCopierTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/OnHeapStoreValueCopierTest.java @@ -92,6 +92,8 @@ public void setUp() { when(configuration.getKeyType()).thenReturn(Long.class); when(configuration.getValueType()).thenReturn(Value.class); when(configuration.getExpiry()).thenReturn(Expirations.noExpiration()); + @SuppressWarnings("unchecked") + Store.Configuration config = configuration; Copier valueCopier = new Copier() { @Override @@ -111,7 +113,7 @@ public Value copyForWrite(Value obj) { } }; - store = new OnHeapStore(configuration, SystemTimeSource.INSTANCE, new IdentityCopier(), valueCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); + store = new OnHeapStore(config, SystemTimeSource.INSTANCE, new IdentityCopier(), valueCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); } @Test diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteAccountingTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteAccountingTest.java index 89c511c1f6..cd86261d1f 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteAccountingTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteAccountingTest.java @@ -579,7 +579,7 @@ public String apply(String a, String b) { } }); - assertThat(store.getCurrentUsageInBytes(), is(0l)); + assertThat(store.getCurrentUsageInBytes(), is(0L)); } @Test @@ -780,7 +780,8 @@ public void testExpiry() throws StoreAccessException { @Test public void testEviction() throws StoreAccessException { OnHeapStoreForTests store = newStore(1); - StoreEventListener listener = mock(StoreEventListener.class); + @SuppressWarnings("unchecked") + StoreEventListener listener = mock(StoreEventListener.class); store.getStoreEventSource().addEventListener(listener); store.put(KEY, VALUE); @@ -794,7 +795,7 @@ public void testEviction() throws StoreAccessException { long requiredSize = getSize(key1, value1); store.put(key1, value1); - Matcher> matcher = eventType(EventType.EVICTED); + Matcher> matcher = eventType(EventType.EVICTED); verify(listener, times(1)).onEvent(argThat(matcher)); if (store.get(key1) != null) { assertThat(store.getCurrentUsageInBytes(), is(requiredSize)); @@ -805,7 +806,8 @@ public void testEviction() throws StoreAccessException { } static long getSize(String key, String value) { - CopiedOnHeapValueHolder valueHolder = new CopiedOnHeapValueHolder(value, 0l, 0l, true, DEFAULT_COPIER); + @SuppressWarnings("unchecked") + CopiedOnHeapValueHolder valueHolder = new CopiedOnHeapValueHolder(value, 0L, 0L, true, DEFAULT_COPIER); long size = 0L; try { size = SIZE_OF_ENGINE.sizeof(key, valueHolder); @@ -819,6 +821,7 @@ static class OnHeapStoreForTests extends OnHeapStore { private static final Copier DEFAULT_COPIER = new IdentityCopier(); + @SuppressWarnings("unchecked") OnHeapStoreForTests(final Configuration config, final TimeSource timeSource, final SizeOfEngine engine, StoreEventDispatcher eventDispatcher) { super(config, timeSource, DEFAULT_COPIER, DEFAULT_COPIER, engine, eventDispatcher); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByRefTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByRefTest.java index 9bc20543aa..1c102303ad 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByRefTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByRefTest.java @@ -48,6 +48,7 @@ protected void updateStoreCapacity(OnHeapStore store, int newCapacity) { } @Override + @SuppressWarnings("unchecked") protected OnHeapStore newStore(final TimeSource timeSource, final Expiry expiry, final EvictionAdvisor evictionAdvisor, final int capacity) { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByValueTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByValueTest.java index 6e24c971c2..0605201b3f 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByValueTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/ByteSizedOnHeapStoreByValueTest.java @@ -21,6 +21,7 @@ import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; import org.ehcache.config.units.MemoryUnit; +import org.ehcache.core.events.StoreEventDispatcher; import org.ehcache.expiry.Expiry; import org.ehcache.impl.internal.sizeof.DefaultSizeOfEngine; import org.ehcache.impl.internal.store.heap.OnHeapStore; @@ -52,6 +53,7 @@ protected OnHeapStore newStore(final TimeSource timeSource, final Expiry expiry, final EvictionAdvisor evictionAdvisor, final Copier keyCopier, final Copier valueCopier, final int capacity) { + StoreEventDispatcher eventDispatcher = getStoreEventDispatcher(); return new OnHeapStore(new Store.Configuration() { @SuppressWarnings("unchecked") diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreBulkMethodsTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreBulkMethodsTest.java index 8b6e9589f3..a72213bb1d 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreBulkMethodsTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreBulkMethodsTest.java @@ -54,6 +54,7 @@ protected Store.Configuration mockStoreConfig() { return config; } + @SuppressWarnings("unchecked") protected OnHeapStore newStore() { Store.Configuration configuration = mockStoreConfig(); return new OnHeapStore(configuration, SystemTimeSource.INSTANCE, DEFAULT_COPIER, DEFAULT_COPIER, diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByRefSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByRefSPITest.java index c20790d29f..425b4b4f85 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByRefSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/heap/bytesized/OnHeapStoreCachingTierByRefSPITest.java @@ -52,6 +52,7 @@ protected CachingTierFactory getCachingTierFactory() { } @Before + @SuppressWarnings("unchecked") public void setUp() { cachingTierFactory = new CachingTierFactory() { diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStoreTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStoreTest.java index b8abc3b4f4..141c930c1c 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStoreTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/AbstractOffHeapStoreTest.java @@ -52,7 +52,6 @@ import java.util.List; import java.util.Random; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import static org.ehcache.core.internal.util.ValueSuppliers.supplierOf; @@ -322,7 +321,7 @@ public void testFlushUpdatesHits() throws StoreAccessException { ((AbstractValueHolder)valueHolder).accessed(timeSource.getTimeMillis(), new Duration(1L, TimeUnit.MILLISECONDS)); assertThat(store.flush(key, new DelegatingValueHolder(valueHolder)), is(true)); } - assertThat(store.getAndFault(key).hits(), is(5l)); + assertThat(store.getAndFault(key).hits(), is(5L)); } @Test @@ -702,6 +701,7 @@ public void testIteratorOnEmptyStore() throws Exception { private void performEvictionTest(TestTimeSource timeSource, Expiry expiry, EvictionAdvisor evictionAdvisor) throws StoreAccessException {AbstractOffHeapStore offHeapStore = createAndInitStore(timeSource, expiry, evictionAdvisor); try { + @SuppressWarnings("unchecked") StoreEventListener listener = mock(StoreEventListener.class); offHeapStore.getStoreEventSource().addEventListener(listener); @@ -734,6 +734,7 @@ public void describeTo(Description description) { }; } + @SuppressWarnings("unchecked") private OperationStatistic getExpirationStatistic(Store store) { StatisticsManager statisticsManager = new StatisticsManager(); statisticsManager.root(store); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCacheTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCacheTest.java index 695142afb3..bce531f1ba 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCacheTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/EhcacheConcurrentOffHeapClockCacheTest.java @@ -42,11 +42,13 @@ public class EhcacheConcurrentOffHeapClockCacheTest extends AbstractEhcacheOffHeapBackingMapTest { @Override + @SuppressWarnings("unchecked") protected EhcacheConcurrentOffHeapClockCache createTestSegment() { return createTestSegment(Eviction.noAdvice(), mock(EhcacheSegmentFactory.EhcacheSegment.EvictionListener.class)); } @Override + @SuppressWarnings("unchecked") protected EhcacheConcurrentOffHeapClockCache createTestSegment(EvictionAdvisor evictionPredicate) { return createTestSegment(evictionPredicate, mock(EhcacheSegmentFactory.EhcacheSegment.EvictionListener.class)); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreTest.java index e16a7ee2f6..7dad593401 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/OffHeapStoreTest.java @@ -17,7 +17,6 @@ package org.ehcache.impl.internal.store.offheap; import org.ehcache.config.EvictionAdvisor; -import org.ehcache.config.ResourcePool; import org.ehcache.config.ResourceType; import org.ehcache.core.internal.store.StoreConfigurationImpl; import org.ehcache.config.units.MemoryUnit; @@ -81,6 +80,7 @@ protected OffHeapStore createAndInitStore(TimeSource timeSource, } @Test + @SuppressWarnings("unchecked") public void testRankAuthority() throws Exception { OffHeapStore.Provider provider = new OffHeapStore.Provider(); @@ -115,4 +115,4 @@ private void assertRank(final Store.Provider provider, final int expectedRank, f protected void destroyStore(AbstractOffHeapStore store) { OffHeapStore.Provider.close((OffHeapStore) store); } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/factories/EhcacheSegmentTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/factories/EhcacheSegmentTest.java index fee5301370..7e922b169f 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/offheap/factories/EhcacheSegmentTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/offheap/factories/EhcacheSegmentTest.java @@ -42,10 +42,12 @@ public class EhcacheSegmentTest { + @SuppressWarnings("unchecked") private EhcacheSegmentFactory.EhcacheSegment createTestSegment() { return createTestSegment(Eviction.noAdvice(), mock(EhcacheSegmentFactory.EhcacheSegment.EvictionListener.class)); } + @SuppressWarnings("unchecked") private EhcacheSegmentFactory.EhcacheSegment createTestSegment(EvictionAdvisor evictionPredicate) { return createTestSegment(evictionPredicate, mock(EhcacheSegmentFactory.EhcacheSegment.EvictionListener.class)); } @@ -135,6 +137,7 @@ public void testAdviceAgainstEvictionPreventsEviction() { @Test public void testEvictionFiresEvent() { + @SuppressWarnings("unchecked") EhcacheSegmentFactory.EhcacheSegment.EvictionListener evictionListener = mock(EhcacheSegmentFactory.EhcacheSegment.EvictionListener.class); EhcacheSegmentFactory.EhcacheSegment segment = createTestSegment(evictionListener); try { @@ -145,4 +148,4 @@ public void testEvictionFiresEvent() { segment.destroy(); } } -} \ No newline at end of file +} diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierTest.java index df5d547323..f57bda05e4 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/CompoundCachingTierTest.java @@ -52,6 +52,7 @@ public class CompoundCachingTierTest { @Test + @SuppressWarnings("unchecked") public void testGetOrComputeIfAbsentComputesWhenBothTiersEmpty() throws Exception { HigherCachingTier higherTier = mock(HigherCachingTier.class); LowerCachingTier lowerTier = mock(LowerCachingTier.class); @@ -83,6 +84,7 @@ public Store.ValueHolder apply(String s) { } @Test + @SuppressWarnings("unchecked") public void testGetOrComputeIfAbsentDoesNotComputesWhenHigherTierContainsValue() throws Exception { HigherCachingTier higherTier = mock(HigherCachingTier.class); LowerCachingTier lowerTier = mock(LowerCachingTier.class); @@ -107,6 +109,7 @@ public Store.ValueHolder apply(String s) { } @Test + @SuppressWarnings("unchecked") public void testGetOrComputeIfAbsentDoesNotComputesWhenLowerTierContainsValue() throws Exception { HigherCachingTier higherTier = mock(HigherCachingTier.class); LowerCachingTier lowerTier = mock(LowerCachingTier.class); @@ -138,6 +141,7 @@ public Store.ValueHolder apply(String s) { } @Test + @SuppressWarnings("unchecked") public void testGetOrComputeIfAbsentComputesWhenLowerTierExpires() throws Exception { HigherCachingTier higherTier = mock(HigherCachingTier.class); final LowerCachingTier lowerTier = mock(LowerCachingTier.class); @@ -186,6 +190,7 @@ public Store.ValueHolder apply(String s) { } @Test + @SuppressWarnings("unchecked") public void testInvalidateNoArg() throws Exception { HigherCachingTier higherTier = mock(HigherCachingTier.class); LowerCachingTier lowerTier = mock(LowerCachingTier.class); @@ -198,6 +203,7 @@ public void testInvalidateNoArg() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testInvalidateWhenNoValueDoesNotFireListener() throws Exception { HigherCachingTier higherTier = mock(HigherCachingTier.class); LowerCachingTier lowerTier = mock(LowerCachingTier.class); @@ -218,6 +224,7 @@ public void onInvalidation(String key, Store.ValueHolder valueHolder) { } @Test + @SuppressWarnings("unchecked") public void testInvalidateWhenValueInLowerTierFiresListener() throws Exception { HigherCachingTier higherTier = mock(HigherCachingTier.class); LowerCachingTier lowerTier = mock(LowerCachingTier.class); @@ -275,6 +282,7 @@ public void onInvalidation(String key, Store.ValueHolder valueHolder) { } @Test + @SuppressWarnings("unchecked") public void testInvalidateWhenValueInHigherTierFiresListener() throws Exception { HigherCachingTier higherTier = mock(HigherCachingTier.class); LowerCachingTier lowerTier = mock(LowerCachingTier.class); @@ -333,6 +341,7 @@ public void onInvalidation(String key, Store.ValueHolder valueHolder) { } @Test + @SuppressWarnings("unchecked") public void testInvalidateAllCoversBothTiers() throws Exception { HigherCachingTier higherTier = mock(HigherCachingTier.class); LowerCachingTier lowerTier = mock(LowerCachingTier.class); @@ -346,6 +355,7 @@ public void testInvalidateAllCoversBothTiers() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testRankCachingTier() throws Exception { CompoundCachingTier.Provider provider = new CompoundCachingTier.Provider(); HashSet> resourceTypes = new HashSet>(); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreFlushWhileShutdownTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreFlushWhileShutdownTest.java index fefb425603..576cc4d290 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreFlushWhileShutdownTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreFlushWhileShutdownTest.java @@ -61,17 +61,17 @@ public void testTieredStoreReleaseFlushesEntries() throws Exception { Store.Configuration configuration = new Store.Configuration() { @Override - public Class getKeyType() { + public Class getKeyType() { return Number.class; } @Override - public Class getValueType() { - return Serializable.class; + public Class getValueType() { + return String.class; } @Override - public EvictionAdvisor getEvictionAdvisor() { + public EvictionAdvisor getEvictionAdvisor() { return null; } @@ -81,7 +81,7 @@ public ClassLoader getClassLoader() { } @Override - public Expiry getExpiry() { + public Expiry getExpiry() { return Expirations.noExpiration(); } @@ -117,7 +117,7 @@ public int getDispatcherConcurrency() { DiskResourceService diskResourceService = serviceLocator.getService(DiskResourceService.class); PersistenceSpaceIdentifier persistenceSpace = diskResourceService.getPersistenceSpaceIdentifier("testTieredStoreReleaseFlushesEntries", cacheConfiguration); - Store tieredStore = tieredStoreProvider.createStore(configuration, new ServiceConfiguration[] {persistenceSpace}); + Store tieredStore = tieredStoreProvider.createStore(configuration, persistenceSpace); tieredStoreProvider.initStore(tieredStore); for (int i = 0; i < 100; i++) { tieredStore.put(i, "hello"); @@ -140,11 +140,11 @@ public int getDispatcherConcurrency() { DiskResourceService diskResourceService1 = serviceLocator1.getService(DiskResourceService.class); PersistenceSpaceIdentifier persistenceSpace1 = diskResourceService1.getPersistenceSpaceIdentifier("testTieredStoreReleaseFlushesEntries", cacheConfiguration); - tieredStore = tieredStoreProvider.createStore(configuration, new ServiceConfiguration[] {persistenceSpace1}); + tieredStore = tieredStoreProvider.createStore(configuration, persistenceSpace1); tieredStoreProvider.initStore(tieredStore); for(int i = 0; i < 20; i++) { - assertThat(tieredStore.get(i).hits(), is(21l)); + assertThat(tieredStore.get(i).hits(), is(21L)); } } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreSPITest.java index 4245372142..750c880d8c 100755 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreSPITest.java @@ -127,7 +127,8 @@ private Store newStore(Long capacity, EvictionAdvisor config = new StoreConfigurationImpl(getKeyType(), getValueType(), evictionAdvisor, getClass().getClassLoader(), expiry, buildResourcePools(capacity), 0, keySerializer, valueSerializer); - final Copier defaultCopier = new IdentityCopier(); + @SuppressWarnings("unchecked") + final Copier defaultCopier = new IdentityCopier(); OnHeapStore onHeapStore = new OnHeapStore(config, timeSource, defaultCopier, defaultCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); try { CacheConfiguration cacheConfiguration = mock(CacheConfiguration.class); @@ -330,6 +331,7 @@ private ResourcePools buildResourcePools(Comparable capacityConstraint) { public static class FakeCachingTierProvider implements CachingTier.Provider { @Override + @SuppressWarnings("unchecked") public CachingTier createCachingTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { return mock(CachingTier.class); } @@ -362,6 +364,7 @@ public void stop() { public static class FakeAuthoritativeTierProvider implements AuthoritativeTier.Provider { @Override + @SuppressWarnings("unchecked") public AuthoritativeTier createAuthoritativeTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { return mock(AuthoritativeTier.class); } diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java index 192a08eff2..74e1d9198c 100644 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreTest.java @@ -91,6 +91,7 @@ public void setUp() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testGetHitsCachingTier() throws Exception { when(numberCachingTier.getOrComputeIfAbsent(eq(1), any(Function.class))).thenReturn(newValueHolder("one")); @@ -102,6 +103,7 @@ public void testGetHitsCachingTier() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testGetHitsAuthoritativeTier() throws Exception { Store.ValueHolder valueHolder = newValueHolder("one"); when(numberAuthoritativeTier.getAndFault(eq(1))).thenReturn(valueHolder); @@ -123,6 +125,7 @@ public Store.ValueHolder answer(InvocationOnMock invocation) throw } @Test + @SuppressWarnings("unchecked") public void testGetMisses() throws Exception { when(numberAuthoritativeTier.getAndFault(eq(1))).thenReturn(null); when(numberCachingTier.getOrComputeIfAbsent(any(Number.class), any(Function.class))).then(new Answer>() { @@ -267,6 +270,7 @@ public void testClear() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testCompute2Args() throws Exception { when(numberAuthoritativeTier.compute(any(Number.class), any(BiFunction.class))).then(new Answer>() { @Override @@ -291,6 +295,7 @@ public CharSequence apply(Number number, CharSequence charSequence) { } @Test + @SuppressWarnings("unchecked") public void testCompute3Args() throws Exception { when(numberAuthoritativeTier.compute(any(Number.class), any(BiFunction.class), any(NullaryFunction.class))).then(new Answer>() { @Override @@ -320,6 +325,7 @@ public Boolean apply() { } @Test + @SuppressWarnings("unchecked") public void testComputeIfAbsent_computes() throws Exception { when(numberCachingTier.getOrComputeIfAbsent(any(Number.class), any(Function.class))).thenAnswer(new Answer>() { @Override @@ -352,6 +358,7 @@ public CharSequence apply(Number number) { } @Test + @SuppressWarnings("unchecked") public void testComputeIfAbsent_doesNotCompute() throws Exception { final Store.ValueHolder valueHolder = newValueHolder("one"); when(numberCachingTier.getOrComputeIfAbsent(any(Number.class), any(Function.class))).thenAnswer(new Answer>() { @@ -375,6 +382,7 @@ public CharSequence apply(Number number) { } @Test + @SuppressWarnings("unchecked") public void testBulkCompute2Args() throws Exception { when(numberAuthoritativeTier.bulkCompute(any(Set.class), any(Function.class))).thenAnswer(new Answer>>() { @Override @@ -419,6 +427,7 @@ public Map> answer(InvocationOnMock invo } @Test + @SuppressWarnings("unchecked") public void testBulkCompute3Args() throws Exception { when( numberAuthoritativeTier.bulkCompute(any(Set.class), any(Function.class), any(NullaryFunction.class))).thenAnswer(new Answer>>() { @@ -469,6 +478,7 @@ public Boolean apply() { } @Test + @SuppressWarnings("unchecked") public void testBulkComputeIfAbsent() throws Exception { when(numberAuthoritativeTier.bulkComputeIfAbsent(any(Set.class), any(Function.class))).thenAnswer(new Answer>>() { @Override @@ -548,6 +558,7 @@ public void run() { } @Test + @SuppressWarnings("unchecked") public void testReleaseStoreFlushes() throws Exception { TieredStore.Provider tieredStoreProvider = new TieredStore.Provider(); diff --git a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreWith3TiersSPITest.java b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreWith3TiersSPITest.java index d9da7f8441..d2c82fae66 100755 --- a/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreWith3TiersSPITest.java +++ b/impl/src/test/java/org/ehcache/impl/internal/store/tiering/TieredStoreWith3TiersSPITest.java @@ -127,7 +127,8 @@ private Store newStore(Long capacity, EvictionAdvisor valueSerializer = new JavaSerializer(getClass().getClassLoader()); Store.Configuration config = new StoreConfigurationImpl(getKeyType(), getValueType(), evictionAdvisor, getClass().getClassLoader(), expiry, buildResourcePools(capacity), 0, keySerializer, valueSerializer); - final Copier defaultCopier = new IdentityCopier(); + @SuppressWarnings("unchecked") + final Copier defaultCopier = new IdentityCopier(); StoreEventDispatcher noOpEventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); final OnHeapStore onHeapStore = new OnHeapStore(config, timeSource, defaultCopier, defaultCopier, new NoopSizeOfEngine(), noOpEventDispatcher); @@ -355,6 +356,7 @@ private ResourcePools buildResourcePools(Long capacityConstraint) { public static class FakeCachingTierProvider implements CachingTier.Provider { @Override + @SuppressWarnings("unchecked") public CachingTier createCachingTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { return mock(CachingTier.class); } @@ -387,6 +389,7 @@ public void stop() { public static class FakeAuthoritativeTierProvider implements AuthoritativeTier.Provider { @Override + @SuppressWarnings("unchecked") public AuthoritativeTier createAuthoritativeTier(Store.Configuration storeConfig, ServiceConfiguration... serviceConfigs) { return mock(AuthoritativeTier.class); } diff --git a/impl/src/test/java/org/ehcache/impl/persistence/DefaultDiskResourceServiceTest.java b/impl/src/test/java/org/ehcache/impl/persistence/DefaultDiskResourceServiceTest.java index aca37fc4b0..41486008cb 100644 --- a/impl/src/test/java/org/ehcache/impl/persistence/DefaultDiskResourceServiceTest.java +++ b/impl/src/test/java/org/ehcache/impl/persistence/DefaultDiskResourceServiceTest.java @@ -45,6 +45,7 @@ public static abstract class AbstractDefaultDiskResourceServiceTest { public ExpectedException expectedException = ExpectedException.none(); protected DefaultDiskResourceService service = new DefaultDiskResourceService(); + @SuppressWarnings("unchecked") protected ServiceProvider serviceProvider = mock(ServiceProvider.class); @Before diff --git a/impl/src/test/java/org/ehcache/impl/persistence/FileBasedStateRepositoryTest.java b/impl/src/test/java/org/ehcache/impl/persistence/FileBasedStateRepositoryTest.java index 63460e530c..c2484280a2 100644 --- a/impl/src/test/java/org/ehcache/impl/persistence/FileBasedStateRepositoryTest.java +++ b/impl/src/test/java/org/ehcache/impl/persistence/FileBasedStateRepositoryTest.java @@ -61,7 +61,9 @@ public void testHolderSave() throws Exception { assertThat(name, is(holderName)); FileBasedStateRepository.Tuple loadedTuple = (FileBasedStateRepository.Tuple) ois.readObject(); assertThat(loadedTuple.index, is(0)); - assertThat((StateHolder)loadedTuple.holder, is(myHolder)); + @SuppressWarnings("unchecked") + StateHolder stateHolder = (StateHolder) loadedTuple.holder; + assertThat(stateHolder, is(myHolder)); } finally { ois.close(); } diff --git a/impl/src/test/java/org/ehcache/impl/serialization/AddedFieldTest.java b/impl/src/test/java/org/ehcache/impl/serialization/AddedFieldTest.java index 390371a274..a81c79ba38 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/AddedFieldTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/AddedFieldTest.java @@ -41,6 +41,7 @@ public class AddedFieldTest { @Test public void addingSerializableField() throws Exception { + @SuppressWarnings("unchecked") StatefulSerializer serializer = new CompactJavaSerializer(null); serializer.init(new TransientStateRepository()); @@ -59,6 +60,7 @@ public void addingSerializableField() throws Exception { @Test public void addingExternalizableField() throws Exception { + @SuppressWarnings("unchecked") StatefulSerializer serializer = new CompactJavaSerializer(null); serializer.init(new TransientStateRepository()); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/AddedSuperClassTest.java b/impl/src/test/java/org/ehcache/impl/serialization/AddedSuperClassTest.java index 634798b206..57266bc738 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/AddedSuperClassTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/AddedSuperClassTest.java @@ -35,6 +35,7 @@ public class AddedSuperClassTest { @Test public void testAddedSuperClass() throws Exception { + @SuppressWarnings("unchecked") StatefulSerializer serializer = new CompactJavaSerializer(null); serializer.init(new TransientStateRepository()); @@ -52,6 +53,7 @@ public void testAddedSuperClass() throws Exception { @Test public void testAddedSuperClassNotHidden() throws Exception { + @SuppressWarnings("unchecked") StatefulSerializer serializer = new CompactJavaSerializer(null); serializer.init(new TransientStateRepository()); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/ArrayPackageScopeTest.java b/impl/src/test/java/org/ehcache/impl/serialization/ArrayPackageScopeTest.java index 6b3703e058..631a42ad1c 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/ArrayPackageScopeTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/ArrayPackageScopeTest.java @@ -37,6 +37,7 @@ public class ArrayPackageScopeTest { @Test public void testArrayPackageScope() throws Exception { + @SuppressWarnings("unchecked") StatefulSerializer serializer = new CompactJavaSerializer(null); serializer.init(new TransientStateRepository()); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/BasicSerializationTest.java b/impl/src/test/java/org/ehcache/impl/serialization/BasicSerializationTest.java index f08d6921b3..ace8b0c6d4 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/BasicSerializationTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/BasicSerializationTest.java @@ -40,6 +40,7 @@ public class BasicSerializationTest { @Test public void testSimpleObject() throws ClassNotFoundException { + @SuppressWarnings("unchecked") StatefulSerializer test = new CompactJavaSerializer(null); test.init(new TransientStateRepository()); @@ -52,6 +53,7 @@ public void testSimpleObject() throws ClassNotFoundException { @Test public void testComplexObject() throws ClassNotFoundException { + @SuppressWarnings("unchecked") StatefulSerializer test = new CompactJavaSerializer(null); test.init(new TransientStateRepository()); @@ -74,6 +76,7 @@ public void testComplexObject() throws ClassNotFoundException { @Test public void testPrimitiveClasses() throws ClassNotFoundException { + @SuppressWarnings("unchecked") StatefulSerializer s = new CompactJavaSerializer(null); s.init(new TransientStateRepository()); @@ -89,6 +92,7 @@ public void testProxyInstance() throws ClassNotFoundException { int foo = rand.nextInt(); float bar = rand.nextFloat(); + @SuppressWarnings("unchecked") StatefulSerializer s = new CompactJavaSerializer(null); s.init(new TransientStateRepository()); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassLoaderTest.java b/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassLoaderTest.java index 240d3617a5..df72be4c0b 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassLoaderTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassLoaderTest.java @@ -36,6 +36,7 @@ private static ClassLoader newLoader() { @Test public void testThreadContextLoader() throws Exception { + @SuppressWarnings("unchecked") StatefulSerializer serializer = new CompactJavaSerializer(null); serializer.init(new TransientStateRepository()); @@ -53,6 +54,7 @@ public void testThreadContextLoader() throws Exception { @Test public void testExplicitLoader() throws Exception { ClassLoader loader = newLoader(); + @SuppressWarnings("unchecked") StatefulSerializer serializer = new CompactJavaSerializer(loader); serializer.init(new TransientStateRepository()); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassUnloadingTest.java b/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassUnloadingTest.java index 793c554e2a..2555a497ec 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassUnloadingTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/CompactJavaSerializerClassUnloadingTest.java @@ -52,6 +52,7 @@ public void createSpecialObject() throws Exception { @Test public void testClassUnloadingAfterSerialization() throws Exception { + @SuppressWarnings("unchecked") StatefulSerializer serializer = new CompactJavaSerializer(null); serializer.init(new TransientStateRepository()); @@ -73,6 +74,7 @@ public void testClassUnloadingAfterSerialization() throws Exception { public void testClassUnloadingAfterSerializationAndDeserialization() throws Exception { Thread.currentThread().setContextClassLoader(specialObject.getClass().getClassLoader()); try { + @SuppressWarnings("unchecked") StatefulSerializer serializer = new CompactJavaSerializer(null); serializer.init(new TransientStateRepository()); specialObject = serializer.read(serializer.serialize(specialObject)); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/EnumTest.java b/impl/src/test/java/org/ehcache/impl/serialization/EnumTest.java index 63bdd6df69..c2562dfe11 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/EnumTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/EnumTest.java @@ -36,6 +36,7 @@ public class EnumTest { @Test public void basicInstanceSerialization() throws ClassNotFoundException { + @SuppressWarnings("unchecked") StatefulSerializer s = new CompactJavaSerializer(null); s.init(new TransientStateRepository()); @@ -46,6 +47,7 @@ public void basicInstanceSerialization() throws ClassNotFoundException { @Test public void classSerialization() throws ClassNotFoundException { + @SuppressWarnings("unchecked") StatefulSerializer s = new CompactJavaSerializer(null); s.init(new TransientStateRepository()); @@ -57,6 +59,7 @@ public void classSerialization() throws ClassNotFoundException { @Test public void shiftingInstanceSerialization() throws ClassNotFoundException { + @SuppressWarnings("unchecked") StatefulSerializer s = new CompactJavaSerializer(null); s.init(new TransientStateRepository()); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/FieldTypeChangeTest.java b/impl/src/test/java/org/ehcache/impl/serialization/FieldTypeChangeTest.java index 5746c1048d..f609534878 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/FieldTypeChangeTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/FieldTypeChangeTest.java @@ -35,6 +35,7 @@ public class FieldTypeChangeTest { @Test public void fieldTypeChangeWithOkayObject() throws Exception { + @SuppressWarnings("unchecked") StatefulSerializer s = new CompactJavaSerializer(null); s.init(new TransientStateRepository()); @@ -51,6 +52,7 @@ public void fieldTypeChangeWithOkayObject() throws Exception { @Test public void fieldTypeChangeWithIncompatibleObject() throws Exception { + @SuppressWarnings("unchecked") StatefulSerializer s = new CompactJavaSerializer(null); s.init(new TransientStateRepository()); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/GetFieldTest.java b/impl/src/test/java/org/ehcache/impl/serialization/GetFieldTest.java index 3fe3a4688a..13ee6e60f3 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/GetFieldTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/GetFieldTest.java @@ -37,6 +37,7 @@ public class GetFieldTest { @Test public void testGetField() throws Exception { + @SuppressWarnings("unchecked") StatefulSerializer s = new CompactJavaSerializer(null); s.init(new TransientStateRepository()); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/PutFieldTest.java b/impl/src/test/java/org/ehcache/impl/serialization/PutFieldTest.java index 61c1429d59..93ed704f93 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/PutFieldTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/PutFieldTest.java @@ -39,6 +39,7 @@ public class PutFieldTest { @Test public void testWithAllPrimitivesAndString() throws Exception { + @SuppressWarnings("unchecked") StatefulSerializer s = new CompactJavaSerializer(null); s.init(new TransientStateRepository()); @@ -66,6 +67,7 @@ public void testWithAllPrimitivesAndString() throws Exception { @Test public void testWithTwoStrings() throws Exception { + @SuppressWarnings("unchecked") StatefulSerializer s = new CompactJavaSerializer(null); s.init(new TransientStateRepository()); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/ReadObjectNoDataTest.java b/impl/src/test/java/org/ehcache/impl/serialization/ReadObjectNoDataTest.java index 8495387800..1742c8bb5d 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/ReadObjectNoDataTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/ReadObjectNoDataTest.java @@ -37,6 +37,7 @@ public class ReadObjectNoDataTest { @Test public void test() throws Exception { + @SuppressWarnings("unchecked") StatefulSerializer s = new CompactJavaSerializer(null); s.init(new TransientStateRepository()); ClassLoader loaderW = createClassNameRewritingLoader(C_W.class, B_W.class); diff --git a/impl/src/test/java/org/ehcache/impl/serialization/SerializeAfterEvolutionTest.java b/impl/src/test/java/org/ehcache/impl/serialization/SerializeAfterEvolutionTest.java index b3ae43e8aa..6bddd70c76 100644 --- a/impl/src/test/java/org/ehcache/impl/serialization/SerializeAfterEvolutionTest.java +++ b/impl/src/test/java/org/ehcache/impl/serialization/SerializeAfterEvolutionTest.java @@ -33,6 +33,7 @@ public class SerializeAfterEvolutionTest { @Test public void test() throws Exception { + @SuppressWarnings("unchecked") StatefulSerializer s = new CompactJavaSerializer(null); s.init(new TransientStateRepository()); From eef3dde8575ad54f6928241956a09f2167be45f6 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Tue, 8 Nov 2016 23:11:58 +0100 Subject: [PATCH 126/218] :shirt: Clean up warning clustered common * Add flag to fail build if warnings get introduced --- clustered/common/build.gradle | 4 ++++ .../clustered/common/internal/messages/ReconnectMessage.java | 2 +- .../common/internal/messages/ReconnectMessageCodecTest.java | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/clustered/common/build.gradle b/clustered/common/build.gradle index 2bc268e9f7..e75c48963a 100644 --- a/clustered/common/build.gradle +++ b/clustered/common/build.gradle @@ -19,3 +19,7 @@ apply plugin: EhDeploy dependencies { provided "org.terracotta:entity-common-api:$parent.entityApiVersion" } + +tasks.withType(JavaCompile) { + options.compilerArgs += ['-Werror'] +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessage.java index 4987b41d9b..8993de4b5e 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ReconnectMessage.java @@ -52,7 +52,7 @@ public void addInvalidationsInProgress(String cacheId, Set hashInvalidatio public Set getInvalidationsInProgress(String cacheId) { Set hashToInvalidate = hashInvalidationsInProgressPerCache.get(cacheId); - return hashToInvalidate == null ? Collections.EMPTY_SET : hashToInvalidate; + return hashToInvalidate == null ? Collections.emptySet() : hashToInvalidate; } public void addClearInProgress(String cacheId) { diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodecTest.java index 521936c8e3..8c8f8123d8 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodecTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ReconnectMessageCodecTest.java @@ -51,7 +51,7 @@ public void testCodec() { secondSetToInvalidate.add(222L); secondSetToInvalidate.add(2222L); reconnectMessage.addInvalidationsInProgress("test", firstSetToInvalidate); - reconnectMessage.addInvalidationsInProgress("test1", Collections.EMPTY_SET); + reconnectMessage.addInvalidationsInProgress("test1", Collections.emptySet()); reconnectMessage.addInvalidationsInProgress("test2", secondSetToInvalidate); reconnectMessage.addClearInProgress("test"); From 346174f2e637b63df511ec959ed3e2468e9aa498 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Thu, 10 Nov 2016 09:22:53 +0100 Subject: [PATCH 127/218] :shirt: Clean up warnings in clustered/client * Made the introduction of new warnings fail the build --- clustered/client/build.gradle | 6 ++ .../client/internal/EhcacheClientEntity.java | 7 +- .../internal/EhcacheClientEntityService.java | 2 +- .../service/ClusteredStateHolder.java | 3 + .../client/internal/store/ClusteredStore.java | 10 +- .../ConditionalReplaceOperation.java | 3 +- .../store/operations/RemoveOperation.java | 3 +- .../client/config/TimeoutDurationTest.java | 6 +- .../EhcacheClientEntityFactoryTest.java | 37 ++++---- .../internal/UnitTestConnectionService.java | 4 +- .../lock/VoltronReadWriteLockTest.java | 94 ++++--------------- .../service/DefaultClusteringServiceTest.java | 31 +----- .../store/ClusteredStoreProviderTest.java | 7 +- .../internal/store/ClusteredStoreTest.java | 69 ++++++++++---- .../operations/ChainResolverExpiryTest.java | 36 ++++--- .../ConditionalRemoveOperationTest.java | 4 +- .../store/operations/LazyValueHolderTest.java | 16 +++- 17 files changed, 159 insertions(+), 179 deletions(-) diff --git a/clustered/client/build.gradle b/clustered/client/build.gradle index 4096ab8a85..2dbfaa4a83 100644 --- a/clustered/client/build.gradle +++ b/clustered/client/build.gradle @@ -37,8 +37,14 @@ dependencies { compileTestJava { options.forkOptions.executable = Jvm.current().javacExecutable + sourceCompatibility = 1.8 + targetCompatibility = 1.8 } test { executable = Jvm.current().javaExecutable } + +tasks.withType(JavaCompile) { + options.compilerArgs += ['-Werror'] +} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java index 5126f4a020..32c9366c46 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java @@ -143,13 +143,14 @@ void setTimeouts(Timeouts timeouts) { this.timeouts = timeouts; } - private void fireResponseEvent(EhcacheEntityResponse response) { - List> responseListeners = this.responseListeners.get(response.getClass()); + private void fireResponseEvent(T response) { + @SuppressWarnings("unchecked") + List> responseListeners = (List) this.responseListeners.get(response.getClass()); if (responseListeners == null) { return; } LOGGER.debug("{} registered response listener(s) for {}", responseListeners.size(), response.getClass()); - for (ResponseListener responseListener : responseListeners) { + for (ResponseListener responseListener : responseListeners) { responseListener.onResponse(response); } } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityService.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityService.java index bc7c03e6c5..ddcdfcfd1f 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityService.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityService.java @@ -45,7 +45,7 @@ public UUID deserializeConfiguration(byte[] configuration) { } @Override - public EhcacheClientEntity create(EntityClientEndpoint endpoint) { + public EhcacheClientEntity create(EntityClientEndpoint endpoint) { return new EhcacheClientEntity(endpoint); } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateHolder.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateHolder.java index abcb2fa98d..8e18d37626 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateHolder.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/ClusteredStateHolder.java @@ -38,6 +38,7 @@ public ClusteredStateHolder(final String cacheId, final String mapId, final Ehca } @Override + @SuppressWarnings("unchecked") public V get(final Object key) { return (V) getResponse(messageFactory.getMessage(key)); } @@ -54,11 +55,13 @@ private Object getResponse(StateRepositoryOpMessage message) { } @Override + @SuppressWarnings("unchecked") public Set> entrySet() { return (Set>) getResponse(messageFactory.entrySetMessage()); } @Override + @SuppressWarnings("unchecked") public V putIfAbsent(final K key, final V value) { return (V) getResponse(messageFactory.putIfAbsentMessage(key, value)); } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java index e5f4b3b508..311f6c6607 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ClusteredStore.java @@ -646,7 +646,7 @@ public void initStore(final Store resource) { if (storeConfig == null) { throw new IllegalArgumentException("Given clustered tier is not managed by this provider : " + resource); } - final ClusteredStore clusteredStore = (ClusteredStore) resource; + final ClusteredStore clusteredStore = (ClusteredStore) resource; ClusteredCacheIdentifier cacheIdentifier = storeConfig.getCacheIdentifier(); try { clusteredStore.storeProxy = clusteringService.getServerStoreProxy(cacheIdentifier, storeConfig.getStoreConfig(), storeConfig.getConsistency()); @@ -678,7 +678,7 @@ public void initStore(final Store resource) { clusteredStore.storeProxy.addInvalidationListener(new ServerStoreProxy.InvalidationListener() { @Override public void onInvalidateHash(long hash) { - Enum result = StoreOperationOutcomes.EvictionOutcome.SUCCESS; + StoreOperationOutcomes.EvictionOutcome result = StoreOperationOutcomes.EvictionOutcome.SUCCESS; clusteredStore.evictionObserver.begin(); if (clusteredStore.invalidationValve != null) { try { @@ -773,16 +773,16 @@ public void initAuthoritativeTier(AuthoritativeTier resource) { private static class StoreConfig { private final ClusteredCacheIdentifier cacheIdentifier; - private final Store.Configuration storeConfig; + private final Store.Configuration storeConfig; private final Consistency consistency; - StoreConfig(ClusteredCacheIdentifier cacheIdentifier, Configuration storeConfig, Consistency consistency) { + StoreConfig(ClusteredCacheIdentifier cacheIdentifier, Configuration storeConfig, Consistency consistency) { this.cacheIdentifier = cacheIdentifier; this.storeConfig = storeConfig; this.consistency = consistency; } - public Configuration getStoreConfig() { + public Configuration getStoreConfig() { return this.storeConfig; } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ConditionalReplaceOperation.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ConditionalReplaceOperation.java index 96245e926d..70e1f0532f 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ConditionalReplaceOperation.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/ConditionalReplaceOperation.java @@ -147,7 +147,8 @@ public boolean equals(final Object obj) { return false; } - ConditionalReplaceOperation other = (ConditionalReplaceOperation)obj; + @SuppressWarnings("unchecked") + ConditionalReplaceOperation other = (ConditionalReplaceOperation) obj; if(this.getOpCode() != other.getOpCode()) { return false; } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/RemoveOperation.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/RemoveOperation.java index 5e0de52354..63385b7829 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/RemoveOperation.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/operations/RemoveOperation.java @@ -96,7 +96,8 @@ public boolean equals(final Object obj) { return false; } - RemoveOperation other = (RemoveOperation)obj; + @SuppressWarnings("unchecked") + RemoveOperation other = (RemoveOperation) obj; if(this.getOpCode() != other.getOpCode()) { return false; } diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/config/TimeoutDurationTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/config/TimeoutDurationTest.java index 042478aa02..dfd7b37bb0 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/config/TimeoutDurationTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/config/TimeoutDurationTest.java @@ -41,6 +41,7 @@ public class TimeoutDurationTest { @Test public void testEquals() throws Exception { + @SuppressWarnings("unchecked") List> equalPairs = Arrays.asList( Pair.of(TimeoutDuration.of(1, NANOSECONDS), TimeoutDuration.of(1, NANOSECONDS)), Pair.of(TimeoutDuration.of(1, MICROSECONDS), TimeoutDuration.of(1000, NANOSECONDS)), @@ -70,7 +71,7 @@ public void testEquals() throws Exception { Pair.of(TimeoutDuration.of(1, DAYS), TimeoutDuration.of(24L, HOURS)), - Pair.of(TimeoutDuration.of(7, NANOSECONDS), TimeoutDuration.of(1 * 7, NANOSECONDS)), + Pair.of(TimeoutDuration.of(7, NANOSECONDS), TimeoutDuration.of(7, NANOSECONDS)), Pair.of(TimeoutDuration.of(7, MICROSECONDS), TimeoutDuration.of(1000 * 7, NANOSECONDS)), Pair.of(TimeoutDuration.of(7, MILLISECONDS), TimeoutDuration.of(1000000 * 7, NANOSECONDS)), Pair.of(TimeoutDuration.of(7, SECONDS), TimeoutDuration.of(1000000000L * 7, NANOSECONDS)), @@ -128,6 +129,7 @@ public void testEquals() throws Exception { assertThat(pair.getFirst().hashCode(), is(equalTo(pair.getSecond().hashCode()))); } + @SuppressWarnings("unchecked") List> unEqualPairs = Arrays.asList( Pair.of(TimeoutDuration.of(Long.MAX_VALUE, DAYS), TimeoutDuration.of(Long.MAX_VALUE, HOURS)), Pair.of(TimeoutDuration.of(Long.MAX_VALUE, DAYS), TimeoutDuration.of(Long.MAX_VALUE, MINUTES)), @@ -238,4 +240,4 @@ public T getSecond() { return this.second; } } -} \ No newline at end of file +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactoryTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactoryTest.java index 765816809d..fa853b800a 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactoryTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactoryTest.java @@ -22,7 +22,10 @@ import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.lock.LockMessaging.HoldType; import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockClient; +import org.junit.Before; import org.junit.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; import org.terracotta.connection.Connection; import static org.hamcrest.core.Is.is; @@ -44,12 +47,21 @@ public class EhcacheClientEntityFactoryTest { + @Mock + private EntityRef entityRef; + @Mock + private EhcacheClientEntity entity; + @Mock + private Connection connection; + + @Before + public void setUp() { + MockitoAnnotations.initMocks(this); + } + @Test public void testCreate() throws Exception { - EhcacheClientEntity entity = mock(EhcacheClientEntity.class); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(entity); - Connection connection = mock(Connection.class); when(connection.getEntityRef(eq(EhcacheClientEntity.class), anyInt(), anyString())).thenReturn(entityRef); addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); @@ -63,11 +75,8 @@ public void testCreate() throws Exception { @Test public void testCreateBadConfig() throws Exception { - EhcacheClientEntity entity = mock(EhcacheClientEntity.class); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(entity); doThrow(ClusteredTierManagerConfigurationException.class).when(entity).configure(any(ServerSideConfiguration.class)); - Connection connection = mock(Connection.class); when(connection.getEntityRef(eq(EhcacheClientEntity.class), anyInt(), anyString())).thenReturn(entityRef); addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); @@ -87,9 +96,7 @@ public void testCreateBadConfig() throws Exception { @Test public void testCreateWhenExisting() throws Exception { - EntityRef entityRef = mock(EntityRef.class); doThrow(EntityAlreadyExistsException.class).when(entityRef).create(any()); - Connection connection = mock(Connection.class); when(connection.getEntityRef(eq(EhcacheClientEntity.class), anyInt(), anyString())).thenReturn(entityRef); addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); @@ -105,10 +112,7 @@ public void testCreateWhenExisting() throws Exception { @Test public void testRetrieve() throws Exception { - EhcacheClientEntity entity = mock(EhcacheClientEntity.class); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(entity); - Connection connection = mock(Connection.class); when(connection.getEntityRef(eq(EhcacheClientEntity.class), anyInt(), anyString())).thenReturn(entityRef); addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); @@ -121,11 +125,8 @@ public void testRetrieve() throws Exception { @Test public void testRetrieveFailedValidate() throws Exception { - EhcacheClientEntity entity = mock(EhcacheClientEntity.class); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(entity); doThrow(IllegalArgumentException.class).when(entity).validate(any(ServerSideConfiguration.class)); - Connection connection = mock(Connection.class); when(connection.getEntityRef(eq(EhcacheClientEntity.class), anyInt(), anyString())).thenReturn(entityRef); addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); @@ -142,11 +143,10 @@ public void testRetrieveFailedValidate() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testRetrieveWhenNotExisting() throws Exception { - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenThrow(EntityNotFoundException.class); doThrow(EntityAlreadyExistsException.class).when(entityRef).create(any()); - Connection connection = mock(Connection.class); when(connection.getEntityRef(eq(EhcacheClientEntity.class), anyInt(), anyString())).thenReturn(entityRef); addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); @@ -162,9 +162,7 @@ public void testRetrieveWhenNotExisting() throws Exception { @Test public void testDestroy() throws Exception { - EntityRef entityRef = mock(EntityRef.class); doReturn(Boolean.TRUE).when(entityRef).destroy(); - Connection connection = mock(Connection.class); when(connection.getEntityRef(eq(EhcacheClientEntity.class), anyInt(), anyString())).thenReturn(entityRef); addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); @@ -176,9 +174,7 @@ public void testDestroy() throws Exception { @Test public void testDestroyWhenNotExisting() throws Exception { - EntityRef entityRef = mock(EntityRef.class); doThrow(EntityNotFoundException.class).when(entityRef).destroy(); - Connection connection = mock(Connection.class); when(connection.getEntityRef(eq(EhcacheClientEntity.class), anyInt(), anyString())).thenReturn(entityRef); addMockUnlockedLock(connection, "VoltronReadWriteLock-EhcacheClientEntityFactory-AccessLock-test"); @@ -199,6 +195,7 @@ private static void addMockUnlockedLock(Connection connection, String lockname) private static void addMockLock(Connection connection, String lockname, boolean result, Boolean ... results) throws Exception { VoltronReadWriteLockClient lock = mock(VoltronReadWriteLockClient.class); when(lock.tryLock(any(HoldType.class))).thenReturn(result, results); + @SuppressWarnings("unchecked") EntityRef interlockRef = mock(EntityRef.class); when(connection.getEntityRef(eq(VoltronReadWriteLockClient.class), anyInt(), eq(lockname))).thenReturn(interlockRef); when(interlockRef.fetchEntity()).thenReturn(lock); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java index 26ef77111a..6a9476793b 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java @@ -228,7 +228,8 @@ public static PassthroughServer remove(URI uri) { PassthroughConnection connection = serverDescriptor.server.connectNewClient("destroy-connection"); for(Entry entry : serverDescriptor.knownEntities.entrySet()) { - Class type = (Class)entry.getKey(); + @SuppressWarnings("unchecked") + Class type = (Class) entry.getKey(); List args = (List)entry.getValue(); Long version = (Long)args.get(0); String stringArg = (String)args.get(1); @@ -531,6 +532,7 @@ private static final class ConnectionInvocationHandler implements InvocationHand } @Override + @SuppressWarnings("unchecked") public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { if (method.getName().equals("close")) { serverDescriptor.remove(connection); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockTest.java index 72c174e981..136d6c984d 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockTest.java @@ -16,7 +16,10 @@ package org.ehcache.clustered.client.internal.lock; +import org.junit.Before; import org.junit.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; import org.terracotta.connection.Connection; import org.terracotta.connection.entity.EntityRef; @@ -27,21 +30,30 @@ import static org.junit.Assert.assertThat; import static org.mockito.Matchers.any; import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import org.terracotta.exception.EntityAlreadyExistsException; public class VoltronReadWriteLockTest { + @Mock + private VoltronReadWriteLockClient client; + + @Mock + private EntityRef entityRef; + + @Mock + private Connection connection; + + @Before + public void setUp() { + MockitoAnnotations.initMocks(this); + } + @Test public void testCreateLockEntityWhenNotExisting() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); - - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -52,13 +64,9 @@ public void testCreateLockEntityWhenNotExisting() throws Exception { @Test public void testFetchExistingLockEntityWhenExists() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); - - EntityRef entityRef = mock(EntityRef.class); doThrow(EntityAlreadyExistsException.class).when(entityRef).create(any(Void.class)); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -67,12 +75,8 @@ public void testFetchExistingLockEntityWhenExists() throws Exception { @Test public void testWriteLockLocksWrite() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); - - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -83,12 +87,8 @@ public void testWriteLockLocksWrite() throws Exception { @Test public void testReadLockLocksRead() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); - - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -99,12 +99,8 @@ public void testReadLockLocksRead() throws Exception { @Test public void testWriteUnlockUnlocksWrite() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); - - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -115,12 +111,8 @@ public void testWriteUnlockUnlocksWrite() throws Exception { @Test public void testReadUnlockUnlocksRead() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); - - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -131,12 +123,8 @@ public void testReadUnlockUnlocksRead() throws Exception { @Test public void testWriteUnlockClosesEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); - - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -147,12 +135,8 @@ public void testWriteUnlockClosesEntity() throws Exception { @Test public void testReadUnlockClosesEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); - - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -163,12 +147,8 @@ public void testReadUnlockClosesEntity() throws Exception { @Test public void testWriteUnlockDestroysEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); - - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -179,12 +159,8 @@ public void testWriteUnlockDestroysEntity() throws Exception { @Test public void testReadUnlockDestroysEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); - - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -195,13 +171,10 @@ public void testReadUnlockDestroysEntity() throws Exception { @Test public void testTryWriteLockTryLocksWrite() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(WRITE)).thenReturn(true); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -211,13 +184,10 @@ public void testTryWriteLockTryLocksWrite() throws Exception { @Test public void testTryReadLockTryLocksRead() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(READ)).thenReturn(true); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -227,13 +197,10 @@ public void testTryReadLockTryLocksRead() throws Exception { @Test public void testTryWriteUnlockUnlocksWrite() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(WRITE)).thenReturn(true); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -244,13 +211,10 @@ public void testTryWriteUnlockUnlocksWrite() throws Exception { @Test public void testTryReadUnlockUnlocksRead() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(READ)).thenReturn(true); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -261,13 +225,10 @@ public void testTryReadUnlockUnlocksRead() throws Exception { @Test public void testTryWriteUnlockClosesEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(WRITE)).thenReturn(true); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -278,13 +239,10 @@ public void testTryWriteUnlockClosesEntity() throws Exception { @Test public void testTryReadUnlockClosesEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(READ)).thenReturn(true); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -295,13 +253,10 @@ public void testTryReadUnlockClosesEntity() throws Exception { @Test public void testTryWriteUnlockDestroysEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(WRITE)).thenReturn(true); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -312,13 +267,10 @@ public void testTryWriteUnlockDestroysEntity() throws Exception { @Test public void testTryReadUnlockDestroysEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(READ)).thenReturn(true); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -329,13 +281,10 @@ public void testTryReadUnlockDestroysEntity() throws Exception { @Test public void testTryWriteLockFailingClosesEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(WRITE)).thenReturn(false); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -345,13 +294,10 @@ public void testTryWriteLockFailingClosesEntity() throws Exception { @Test public void testTryReadLockFailingClosesEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(READ)).thenReturn(false); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -361,13 +307,10 @@ public void testTryReadLockFailingClosesEntity() throws Exception { @Test public void testTryWriteLockFailingDestroysEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(WRITE)).thenReturn(false); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); @@ -377,13 +320,10 @@ public void testTryWriteLockFailingDestroysEntity() throws Exception { @Test public void testTryReadLockFailingDestroysEntity() throws Exception { - VoltronReadWriteLockClient client = mock(VoltronReadWriteLockClient.class); when(client.tryLock(READ)).thenReturn(false); - EntityRef entityRef = mock(EntityRef.class); when(entityRef.fetchEntity()).thenReturn(client); - Connection connection = mock(Connection.class); when(connection.getEntityRef(VoltronReadWriteLockClient.class, 1, "VoltronReadWriteLock-TestLock")).thenReturn(entityRef); VoltronReadWriteLock lock = new VoltronReadWriteLock(connection, "TestLock"); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java index f5e58208f0..aaa14bbc7f 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java @@ -1759,7 +1759,7 @@ public void testStoreValidation_MismatchedPoolTypes_ConfiguredDedicatedValidateS DefaultClusteringService accessService = new DefaultClusteringService(accessConfig); accessService.start(null); - Store.Configuration accessStoreConfig = + Store.Configuration accessStoreConfig = getSharedStoreConfig("serverResource1", serializationProvider, Long.class, String.class); try { @@ -1910,30 +1910,8 @@ public void testGetServerStoreProxyReturnsEventualStore() throws Exception { ClusteringService.ClusteredCacheIdentifier cacheIdentifier = (ClusteredCacheIdentifier) service.getPersistenceSpaceIdentifier("my-cache", null); ResourcePools resourcePools = mock(ResourcePools.class); - Store.Configuration storeConfig = mock(Store.Configuration.class); - when(storeConfig.getResourcePools()).thenReturn(resourcePools); - when(resourcePools.getPoolForResource(eq(DEDICATED))).thenReturn(new DedicatedClusteredResourcePoolImpl("serverResource1", 1L, MemoryUnit.MB)); - when(storeConfig.getKeyType()).thenReturn(String.class); - when(storeConfig.getValueType()).thenReturn(Object.class); - - ServerStoreProxy serverStoreProxy = service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.EVENTUAL); - assertThat(serverStoreProxy, instanceOf(EventualServerStoreProxy.class)); - } - - @Test - public void testGetServerStoreProxyReturnsEventualStoreByDefault() throws Exception { - String entityIdentifier = "my-application"; - ClusteringServiceConfiguration configuration = - new ClusteringServiceConfiguration( - URI.create(CLUSTER_URI_BASE + entityIdentifier), - true, new ServerSideConfiguration(Collections.emptyMap())); - DefaultClusteringService service = new DefaultClusteringService(configuration); - service.start(null); - - ClusteringService.ClusteredCacheIdentifier cacheIdentifier = (ClusteredCacheIdentifier) service.getPersistenceSpaceIdentifier("my-cache", null); - - ResourcePools resourcePools = mock(ResourcePools.class); - Store.Configuration storeConfig = mock(Store.Configuration.class); + @SuppressWarnings("unchecked") + Store.Configuration storeConfig = mock(Store.Configuration.class); when(storeConfig.getResourcePools()).thenReturn(resourcePools); when(resourcePools.getPoolForResource(eq(DEDICATED))).thenReturn(new DedicatedClusteredResourcePoolImpl("serverResource1", 1L, MemoryUnit.MB)); when(storeConfig.getKeyType()).thenReturn(String.class); @@ -1956,7 +1934,8 @@ public void testGetServerStoreProxyReturnsStrongStore() throws Exception { ClusteringService.ClusteredCacheIdentifier cacheIdentifier = (ClusteredCacheIdentifier) service.getPersistenceSpaceIdentifier("my-cache", null); ResourcePools resourcePools = mock(ResourcePools.class); - Store.Configuration storeConfig = mock(Store.Configuration.class); + @SuppressWarnings("unchecked") + Store.Configuration storeConfig = mock(Store.Configuration.class); when(storeConfig.getResourcePools()).thenReturn(resourcePools); when(resourcePools.getPoolForResource(eq(DEDICATED))).thenReturn(new DedicatedClusteredResourcePoolImpl("serverResource1", 1L, MemoryUnit.MB)); when(storeConfig.getKeyType()).thenReturn(String.class); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java index 69dc50fc9a..8dcc68ea32 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreProviderTest.java @@ -123,9 +123,9 @@ public void testAuthoritativeRank() throws Exception { ServiceLocator serviceLocator = dependencySet().with(mock(ClusteringService.class)).build(); provider.start(serviceLocator); - assertThat(provider.rankAuthority(ClusteredResourceType.Types.DEDICATED, Collections.EMPTY_LIST), is(1)); - assertThat(provider.rankAuthority(ClusteredResourceType.Types.SHARED, Collections.EMPTY_LIST), is(1)); - assertThat(provider.rankAuthority(new UnmatchedResourceType(), Collections.EMPTY_LIST), is(0)); + assertThat(provider.rankAuthority(ClusteredResourceType.Types.DEDICATED, Collections.>emptyList()), is(1)); + assertThat(provider.rankAuthority(ClusteredResourceType.Types.SHARED, Collections.>emptyList()), is(1)); + assertThat(provider.rankAuthority(new UnmatchedResourceType(), Collections.>emptyList()), is(0)); } private void assertRank(final Store.Provider provider, final int expectedRank, final ResourceType... resources) { @@ -173,6 +173,7 @@ public ClassLoader getClassLoader() { } @Override + @SuppressWarnings("unchecked") public ResourcePools getResourcePools() { Map, DedicatedClusteredResourcePoolImpl> poolMap = Collections .singletonMap(ClusteredResourceType.Types.DEDICATED, new DedicatedClusteredResourcePoolImpl("test", 10, MemoryUnit.MB)); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java index 1464038cf7..07871dca25 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java @@ -74,7 +74,7 @@ public class ClusteredStoreTest { private static final String CACHE_IDENTIFIER = "testCache"; private static final URI CLUSTER_URI = URI.create("terracotta://localhost:9510"); - ClusteredStore store; + private ClusteredStore store; @Before public void setup() throws Exception { @@ -126,9 +126,10 @@ public void testPut() throws Exception { } @Test(expected = StoreAccessTimeoutException.class) + @SuppressWarnings("unchecked") public void testPutTimeout() throws Exception { ServerStoreProxy proxy = mock(ServerStoreProxy.class); - OperationsCodec codec = mock(OperationsCodec.class); + OperationsCodec codec = mock(OperationsCodec.class); TimeSource timeSource = mock(TimeSource.class); when(proxy.getAndAppend(anyLong(), any(ByteBuffer.class))).thenThrow(TimeoutException.class); ClusteredStore store = new ClusteredStore(codec, null, proxy, timeSource); @@ -146,8 +147,10 @@ public void testGet() throws Exception { @Test(expected = StoreAccessException.class) public void testGetThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") OperationsCodec codec = mock(OperationsCodec.class); - ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") + ChainResolver chainResolver = mock(ChainResolver.class); ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); TestTimeSource testTimeSource = mock(TestTimeSource.class); @@ -156,6 +159,7 @@ public void testGetThrowsOnlySAE() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testGetTimeout() throws Exception { ServerStoreProxy proxy = mock(ServerStoreProxy.class); when(proxy.get(1L)).thenThrow(TimeoutException.class); @@ -169,8 +173,11 @@ public void testGetThatCompactsInvokesReplace() throws Exception { TestTimeSource timeSource = new TestTimeSource(); timeSource.advanceTime(134556L); long now = timeSource.getTimeMillis(); + @SuppressWarnings("unchecked") OperationsCodec operationsCodec = new OperationsCodec(new LongSerializer(), new StringSerializer()); - ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") + ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") ResolvedChain resolvedChain = mock(ResolvedChain.class); when(resolvedChain.isCompacted()).thenReturn(true); when(chainResolver.resolve(any(Chain.class), eq(42L), eq(now))).thenReturn(resolvedChain); @@ -191,7 +198,9 @@ public void testGetThatDoesNotCompactsInvokesReplace() throws Exception { timeSource.advanceTime(134556L); long now = timeSource.getTimeMillis(); OperationsCodec operationsCodec = new OperationsCodec(new LongSerializer(), new StringSerializer()); - ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") + ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") ResolvedChain resolvedChain = mock(ResolvedChain.class); when(resolvedChain.isCompacted()).thenReturn(false); when(chainResolver.resolve(any(Chain.class), eq(42L), eq(now))).thenReturn(resolvedChain); @@ -217,8 +226,10 @@ public void testContainsKey() throws Exception { @Test(expected = StoreAccessException.class) public void testContainsKeyThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") OperationsCodec codec = mock(OperationsCodec.class); - ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") + ChainResolver chainResolver = mock(ChainResolver.class); ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); TestTimeSource testTimeSource = mock(TestTimeSource.class); @@ -238,8 +249,10 @@ public void testRemove() throws Exception { @Test(expected = StoreAccessException.class) public void testRemoveThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") OperationsCodec codec = mock(OperationsCodec.class); - ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") + ChainResolver chainResolver = mock(ChainResolver.class); ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); TestTimeSource testTimeSource = mock(TestTimeSource.class); @@ -248,9 +261,10 @@ public void testRemoveThrowsOnlySAE() throws Exception { } @Test(expected = StoreAccessTimeoutException.class) + @SuppressWarnings("unchecked") public void testRemoveTimeout() throws Exception { ServerStoreProxy proxy = mock(ServerStoreProxy.class); - OperationsCodec codec = mock(OperationsCodec.class); + OperationsCodec codec = mock(OperationsCodec.class); TimeSource timeSource = mock(TimeSource.class); when(proxy.getAndAppend(anyLong(), any(ByteBuffer.class))).thenThrow(TimeoutException.class); ClusteredStore store = new ClusteredStore(codec, null, proxy, timeSource); @@ -277,8 +291,10 @@ public void testClear() throws Exception { @Test(expected = StoreAccessException.class) public void testClearThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") OperationsCodec codec = mock(OperationsCodec.class); - ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") + ChainResolver chainResolver = mock(ChainResolver.class); ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); doThrow(new RuntimeException()).when(serverStoreProxy).clear(); TestTimeSource testTimeSource = mock(TestTimeSource.class); @@ -289,7 +305,8 @@ public void testClearThrowsOnlySAE() throws Exception { @Test(expected = StoreAccessTimeoutException.class) public void testClearTimeout() throws Exception { ServerStoreProxy proxy = mock(ServerStoreProxy.class); - OperationsCodec codec = mock(OperationsCodec.class); + @SuppressWarnings("unchecked") + OperationsCodec codec = mock(OperationsCodec.class); TimeSource timeSource = mock(TimeSource.class); doThrow(TimeoutException.class).when(proxy).clear(); ClusteredStore store = new ClusteredStore(codec, null, proxy, timeSource); @@ -306,8 +323,10 @@ public void testPutIfAbsent() throws Exception { @Test(expected = StoreAccessException.class) public void testPutIfAbsentThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") OperationsCodec codec = mock(OperationsCodec.class); - ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") + ChainResolver chainResolver = mock(ChainResolver.class); ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); TestTimeSource testTimeSource = mock(TestTimeSource.class); @@ -316,9 +335,10 @@ public void testPutIfAbsentThrowsOnlySAE() throws Exception { } @Test(expected = StoreAccessTimeoutException.class) + @SuppressWarnings("unchecked") public void testPutIfAbsentTimeout() throws Exception { ServerStoreProxy proxy = mock(ServerStoreProxy.class); - OperationsCodec codec = mock(OperationsCodec.class); + OperationsCodec codec = mock(OperationsCodec.class); TimeSource timeSource = mock(TimeSource.class); when(proxy.getAndAppend(anyLong(), any(ByteBuffer.class))).thenThrow(TimeoutException.class); ClusteredStore store = new ClusteredStore(codec, null, proxy, timeSource); @@ -339,8 +359,10 @@ public void testConditionalRemove() throws Exception { @Test(expected = StoreAccessException.class) public void testConditionalRemoveThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") OperationsCodec codec = mock(OperationsCodec.class); - ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") + ChainResolver chainResolver = mock(ChainResolver.class); ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); TestTimeSource testTimeSource = mock(TestTimeSource.class); @@ -349,9 +371,10 @@ public void testConditionalRemoveThrowsOnlySAE() throws Exception { } @Test(expected = StoreAccessTimeoutException.class) + @SuppressWarnings("unchecked") public void testConditionalRemoveTimeout() throws Exception { ServerStoreProxy proxy = mock(ServerStoreProxy.class); - OperationsCodec codec = mock(OperationsCodec.class); + OperationsCodec codec = mock(OperationsCodec.class); TimeSource timeSource = mock(TimeSource.class); when(proxy.getAndAppend(anyLong(), any(ByteBuffer.class))).thenThrow(TimeoutException.class); ClusteredStore store = new ClusteredStore(codec, null, proxy, timeSource); @@ -369,8 +392,10 @@ public void testReplace() throws Exception { @Test(expected = StoreAccessException.class) public void testReplaceThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") OperationsCodec codec = mock(OperationsCodec.class); - ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") + ChainResolver chainResolver = mock(ChainResolver.class); ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); TestTimeSource testTimeSource = mock(TestTimeSource.class); @@ -379,9 +404,10 @@ public void testReplaceThrowsOnlySAE() throws Exception { } @Test(expected = StoreAccessTimeoutException.class) + @SuppressWarnings("unchecked") public void testReplaceTimeout() throws Exception { ServerStoreProxy proxy = mock(ServerStoreProxy.class); - OperationsCodec codec = mock(OperationsCodec.class); + OperationsCodec codec = mock(OperationsCodec.class); TimeSource timeSource = mock(TimeSource.class); when(proxy.getAndAppend(anyLong(), any(ByteBuffer.class))).thenThrow(TimeoutException.class); ClusteredStore store = new ClusteredStore(codec, null, proxy, timeSource); @@ -403,8 +429,10 @@ public void testConditionalReplace() throws Exception { @Test(expected = StoreAccessException.class) public void testConditionalReplaceThrowsOnlySAE() throws Exception { + @SuppressWarnings("unchecked") OperationsCodec codec = mock(OperationsCodec.class); - ChainResolver chainResolver = mock(ChainResolver.class); + @SuppressWarnings("unchecked") + ChainResolver chainResolver = mock(ChainResolver.class); ServerStoreProxy serverStoreProxy = mock(ServerStoreProxy.class); when(serverStoreProxy.get(anyLong())).thenThrow(new RuntimeException()); TestTimeSource testTimeSource = mock(TestTimeSource.class); @@ -413,9 +441,10 @@ public void testConditionalReplaceThrowsOnlySAE() throws Exception { } @Test(expected = StoreAccessTimeoutException.class) + @SuppressWarnings("unchecked") public void testConditionalReplaceTimeout() throws Exception { ServerStoreProxy proxy = mock(ServerStoreProxy.class); - OperationsCodec codec = mock(OperationsCodec.class); + OperationsCodec codec = mock(OperationsCodec.class); TimeSource timeSource = mock(TimeSource.class); when(proxy.getAndAppend(anyLong(), any(ByteBuffer.class))).thenThrow(TimeoutException.class); ClusteredStore store = new ClusteredStore(codec, null, proxy, timeSource); @@ -458,6 +487,7 @@ public void testBulkComputeRemoveAll() throws Exception { @Test(expected = UnsupportedOperationException.class) public void testBulkComputeThrowsForGenericFunction() throws Exception { + @SuppressWarnings("unchecked") Function>, Iterable>> remappingFunction = mock(Function.class); store.bulkCompute(new HashSet(Arrays.asList(1L, 2L)), remappingFunction); @@ -478,9 +508,10 @@ public void testBulkComputeIfAbsentGetAll() throws Exception { @Test(expected = UnsupportedOperationException.class) public void testBulkComputeIfAbsentThrowsForGenericFunction() throws Exception { + @SuppressWarnings("unchecked") Function, Iterable>> mappingFunction = mock(Function.class); store.bulkComputeIfAbsent(new HashSet(Arrays.asList(1L, 2L)), mappingFunction); } -} \ No newline at end of file +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ChainResolverExpiryTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ChainResolverExpiryTest.java index 83357f23fd..155a439bd6 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ChainResolverExpiryTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ChainResolverExpiryTest.java @@ -51,7 +51,7 @@ public class ChainResolverExpiryTest { - private static final OperationsCodec codec = new OperationsCodec(new LongSerializer(), new StringSerializer()); + private static final OperationsCodec codec = new OperationsCodec(new LongSerializer(), new StringSerializer()); private static TestTimeSource timeSource = null; @@ -61,9 +61,10 @@ public void initialSetup() { } @Test + @SuppressWarnings("unchecked") public void testGetExpiryForAccessIsIgnored() { Expiry expiry = mock(Expiry.class); - ChainResolver chainResolver = new ChainResolver(codec, expiry); + ChainResolver chainResolver = new ChainResolver(codec, expiry); when(expiry.getExpiryForCreation(anyLong(), anyString())).thenReturn(Duration.INFINITE); @@ -83,9 +84,10 @@ public void testGetExpiryForAccessIsIgnored() { } @Test + @SuppressWarnings("unchecked") public void testGetExpiryForCreationIsInvokedOnlyOnce() { Expiry expiry = mock(Expiry.class); - ChainResolver chainResolver = new ChainResolver(codec, expiry); + ChainResolver chainResolver = new ChainResolver(codec, expiry); when(expiry.getExpiryForCreation(anyLong(), anyString())).thenReturn(Duration.INFINITE); @@ -108,9 +110,10 @@ public void testGetExpiryForCreationIsInvokedOnlyOnce() { } @Test + @SuppressWarnings("unchecked") public void testGetExpiryForCreationIsNotInvokedForReplacedChains() { Expiry expiry = mock(Expiry.class); - ChainResolver chainResolver = new ChainResolver(codec, expiry); + ChainResolver chainResolver = new ChainResolver(codec, expiry); when(expiry.getExpiryForCreation(anyLong(), anyString())).thenReturn(Duration.INFINITE); @@ -130,10 +133,11 @@ public void testGetExpiryForCreationIsNotInvokedForReplacedChains() { } @Test + @SuppressWarnings("unchecked") public void testGetExpiryForCreationIsInvokedAfterRemoveOperations() { Expiry expiry = mock(Expiry.class); - ChainResolver chainResolver = new ChainResolver(codec, expiry); + ChainResolver chainResolver = new ChainResolver(codec, expiry); when(expiry.getExpiryForCreation(anyLong(), anyString())).thenReturn(Duration.INFINITE); @@ -180,9 +184,10 @@ public void testGetExpiryForCreationIsInvokedAfterRemoveOperations() { } @Test + @SuppressWarnings("unchecked") public void testNullGetExpiryForCreation() { Expiry expiry = mock(Expiry.class); - ChainResolver chainResolver = new ChainResolver(codec, expiry); + ChainResolver chainResolver = new ChainResolver(codec, expiry); when(expiry.getExpiryForCreation(anyLong(), anyString())).thenReturn(null); @@ -198,9 +203,10 @@ public void testNullGetExpiryForCreation() { } @Test + @SuppressWarnings("unchecked") public void testNullGetExpiryForUpdate() { Expiry expiry = mock(Expiry.class); - ChainResolver chainResolver = new ChainResolver(codec, expiry); + ChainResolver chainResolver = new ChainResolver(codec, expiry); when(expiry.getExpiryForUpdate(anyLong(), any(ValueSupplier.class), anyString())).thenReturn(null); @@ -209,18 +215,19 @@ public void testNullGetExpiryForUpdate() { list.add(new PutOperation(1L, "New", timeSource.getTimeMillis())); Chain chain = getChainFromOperations(list); - ResolvedChain resolvedChain = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); + ResolvedChain resolvedChain = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); - assertThat(resolvedChain.getResolvedResult(1L).getValue().toString(), is("New")); + assertThat(resolvedChain.getResolvedResult(1L).getValue(), is("New")); assertTrue(getOperationsListFromChain(resolvedChain.getCompactedChain()).get(0).isExpiryAvailable()); assertThat(getOperationsListFromChain(resolvedChain.getCompactedChain()).get(0).expirationTime(), is(10L)); assertThat(resolvedChain.isCompacted(), is(true)); } @Test + @SuppressWarnings("unchecked") public void testGetExpiryForUpdateUpdatesExpirationTimeStamp() { Expiry expiry = mock(Expiry.class); - ChainResolver chainResolver = new ChainResolver(codec, expiry); + ChainResolver chainResolver = new ChainResolver(codec, expiry); when(expiry.getExpiryForUpdate(anyLong(), any(ValueSupplier.class), anyString())).thenReturn(new Duration(2L, TimeUnit.MILLISECONDS)); @@ -229,18 +236,19 @@ public void testGetExpiryForUpdateUpdatesExpirationTimeStamp() { list.add(new PutOperation(1L, "New", timeSource.getTimeMillis())); Chain chain = getChainFromOperations(list); - ResolvedChain resolvedChain = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); + ResolvedChain resolvedChain = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); - assertThat(resolvedChain.getResolvedResult(1L).getValue().toString(), is("New")); + assertThat(resolvedChain.getResolvedResult(1L).getValue(), is("New")); assertTrue(getOperationsListFromChain(resolvedChain.getCompactedChain()).get(0).isExpiryAvailable()); assertThat(getOperationsListFromChain(resolvedChain.getCompactedChain()).get(0).expirationTime(), is(2L)); assertThat(resolvedChain.isCompacted(), is(true)); } @Test + @SuppressWarnings("unchecked") public void testExpiryThrowsException() { Expiry expiry = mock(Expiry.class); - ChainResolver chainResolver = new ChainResolver(codec, expiry); + ChainResolver chainResolver = new ChainResolver(codec, expiry); when(expiry.getExpiryForUpdate(anyLong(), any(ValueSupplier.class), anyString())).thenThrow(new RuntimeException("Test Update Expiry")); when(expiry.getExpiryForCreation(anyLong(), anyString())).thenThrow(new RuntimeException("Test Create Expiry")); @@ -250,7 +258,7 @@ public void testExpiryThrowsException() { list.add(new PutOperation(1L, "Two", timeSource.getTimeMillis())); Chain chain = getChainFromOperations(list); - ResolvedChain resolvedChain = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); + ResolvedChain resolvedChain = chainResolver.resolve(chain, 1L, timeSource.getTimeMillis()); assertThat(resolvedChain.getResolvedResult(1L), nullValue()); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ConditionalRemoveOperationTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ConditionalRemoveOperationTest.java index dbc66039a5..8466768361 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ConditionalRemoveOperationTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/ConditionalRemoveOperationTest.java @@ -50,8 +50,8 @@ public void testApply() throws Exception { result = operation.apply(anotherOperation); assertNull(result); - PutIfAbsentOperation yetAnotherOperation = new PutIfAbsentOperation(1L, "two", System.currentTimeMillis()); + PutIfAbsentOperation yetAnotherOperation = new PutIfAbsentOperation(1L, "two", System.currentTimeMillis()); result = operation.apply(yetAnotherOperation); assertSame(yetAnotherOperation, result); } -} \ No newline at end of file +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/LazyValueHolderTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/LazyValueHolderTest.java index 2c2b04bb0c..e1d71e740c 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/LazyValueHolderTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/operations/LazyValueHolderTest.java @@ -17,7 +17,10 @@ package org.ehcache.clustered.client.internal.store.operations; import org.ehcache.spi.serialization.Serializer; +import org.junit.Before; import org.junit.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; import java.nio.ByteBuffer; import java.util.Date; @@ -32,11 +35,18 @@ public class LazyValueHolderTest { + @Mock + private Serializer serializer; + + @Before + public void setUp() { + MockitoAnnotations.initMocks(this); + } + @Test public void testGetValueDecodeOnlyOnce() throws Exception { Date date = mock(Date.class); ByteBuffer buffer = mock(ByteBuffer.class); - Serializer serializer = mock(Serializer.class); doReturn(date).when(serializer).read(buffer); LazyValueHolder valueHolder = new LazyValueHolder(buffer, serializer); @@ -51,7 +61,6 @@ public void testGetValueDecodeOnlyOnce() throws Exception { public void testEncodeEncodesOnlyOnce() throws Exception { Date date = mock(Date.class); ByteBuffer buffer = mock(ByteBuffer.class); - Serializer serializer = mock(Serializer.class); doReturn(buffer).when(serializer).serialize(date); LazyValueHolder valueHolder = new LazyValueHolder(date); @@ -65,11 +74,10 @@ public void testEncodeEncodesOnlyOnce() throws Exception { @Test public void testEncodeDoesNotEncodeAlreadyEncodedValue() throws Exception { ByteBuffer buffer = mock(ByteBuffer.class); - Serializer serializer = mock(Serializer.class); LazyValueHolder valueHolder = new LazyValueHolder(buffer, serializer); ByteBuffer encoded = valueHolder.encode(serializer); assertThat(encoded, sameInstance(buffer)); verify(serializer, never()).serialize(any(Date.class)); //Value not serialized as the serialized form was available on creation itself } -} \ No newline at end of file +} From 50dd3c10a38f257b152386bae72ddee7625bd722 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Thu, 10 Nov 2016 09:40:30 +0100 Subject: [PATCH 128/218] :shirt: Clean up warnings in clustered/server * Made the introduction of new warnings fail the build --- clustered/server/build.gradle | 4 ++++ .../server/repo/ServerStateRepository.java | 20 ++++++++----------- .../state/EhcacheStateServiceProvider.java | 8 +++++--- .../server/EhcacheActiveEntityTest.java | 6 ++++-- .../repo/ServerStateRepositoryTest.java | 14 ++++++++----- 5 files changed, 30 insertions(+), 22 deletions(-) diff --git a/clustered/server/build.gradle b/clustered/server/build.gradle index ced76398ae..62efa41fe8 100644 --- a/clustered/server/build.gradle +++ b/clustered/server/build.gradle @@ -53,3 +53,7 @@ compileTestJava { test { executable = Jvm.current().javaExecutable } + +tasks.withType(JavaCompile) { + options.compilerArgs += ['-Werror'] +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/repo/ServerStateRepository.java b/clustered/server/src/main/java/org/ehcache/clustered/server/repo/ServerStateRepository.java index 0b4aca94fc..ec6135df89 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/repo/ServerStateRepository.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/repo/ServerStateRepository.java @@ -18,33 +18,30 @@ import org.ehcache.clustered.common.internal.exceptions.ClusterException; import org.ehcache.clustered.common.internal.exceptions.IllegalMessageException; -import org.ehcache.clustered.common.internal.exceptions.LifecycleException; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; import java.util.AbstractMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; +import java.util.stream.Collectors; class ServerStateRepository { - private final ConcurrentMap concurrentMapRepo = new ConcurrentHashMap(); + private final ConcurrentMap> concurrentMapRepo = new ConcurrentHashMap<>(); EhcacheEntityResponse invoke(StateRepositoryOpMessage message) throws ClusterException { String mapId = message.getMapId(); ConcurrentMap map = concurrentMapRepo.get(mapId); if (map == null) { - ConcurrentHashMap newMap = new ConcurrentHashMap(); + ConcurrentHashMap newMap = new ConcurrentHashMap<>(); map = concurrentMapRepo.putIfAbsent(mapId, newMap); if (map == null) { map = newMap; } } - Object result = null; + Object result; switch (message.operation()) { case GET: StateRepositoryOpMessage.GetMessage getMessage = (StateRepositoryOpMessage.GetMessage) message; @@ -55,11 +52,10 @@ EhcacheEntityResponse invoke(StateRepositoryOpMessage message) throws ClusterExc result = map.putIfAbsent(putIfAbsentMessage.getKey(), putIfAbsentMessage.getValue()); break; case ENTRY_SET: - Set entrySet = new HashSet(); - for (Map.Entry entry : map.entrySet()) { - entrySet.add(new AbstractMap.SimpleEntry(entry.getKey(), entry.getValue())); - } - result = entrySet; + result = map.entrySet() + .stream() + .map(entry -> new AbstractMap.SimpleEntry<>(entry.getKey(), entry.getValue())) + .collect(Collectors.toSet()); break; default: throw new IllegalMessageException("Invalid operation: " + message.operation()); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java index 452b6a42d2..812b4e7305 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java @@ -38,7 +38,7 @@ @BuiltinService public class EhcacheStateServiceProvider implements ServiceProvider { - private ConcurrentMap serviceMap = new ConcurrentHashMap(); + private ConcurrentMap serviceMap = new ConcurrentHashMap<>(); @Override public boolean initialize(ServiceProviderConfiguration configuration, PlatformConfiguration platformConfiguration) { @@ -55,14 +55,16 @@ public T getService(long consumerID, ServiceConfiguration configuration) if (result == null) { result = storeManagerService; } - return (T) result; + @SuppressWarnings("unchecked") + T typedResult = (T) result; + return typedResult; } throw new IllegalArgumentException("Unexpected configuration type."); } @Override public Collection> getProvidedServiceTypes() { - List> classes = new ArrayList>(); + List> classes = new ArrayList<>(); classes.add(EhcacheStateService.class); return classes; } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index bbfde482cc..a470011102 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -2636,7 +2636,8 @@ public void testSyncToPassive() throws Exception { .shared("primary") .build())); - PassiveSynchronizationChannel syncChannel = mock(PassiveSynchronizationChannel.class); + @SuppressWarnings("unchecked") + PassiveSynchronizationChannel syncChannel = mock(PassiveSynchronizationChannel.class); activeEntity.synchronizeKeyToPassive(syncChannel, 1); ArgumentCaptor captor = ArgumentCaptor.forClass(EntityStateSyncMessage.class); @@ -2669,7 +2670,8 @@ public void testSyncToPassiveWithoutDefaultServerResource() throws Exception { activeEntity.invoke(client, MESSAGE_FACTORY.configureStoreManager(serverSideConfiguration)); - PassiveSynchronizationChannel syncChannel = mock(PassiveSynchronizationChannel.class); + @SuppressWarnings("unchecked") + PassiveSynchronizationChannel syncChannel = mock(PassiveSynchronizationChannel.class); activeEntity.synchronizeKeyToPassive(syncChannel, 1); } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/repo/ServerStateRepositoryTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/repo/ServerStateRepositoryTest.java index 98eaea0082..0ee8442c5b 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/repo/ServerStateRepositoryTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/repo/ServerStateRepositoryTest.java @@ -18,6 +18,7 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; +import org.hamcrest.Matcher; import org.junit.Test; import java.util.AbstractMap; @@ -42,7 +43,7 @@ public void testInvokeOnNonExistentRepositorySucceeds() throws Exception { assertThat(response.getValue(), nullValue()); response = (EhcacheEntityResponse.MapValue) repository.invoke( new StateRepositoryOpMessage.GetMessage("foo", "bar", "key1", CLIENT_ID)); - assertThat(response.getValue(), is((Object)"value1")); + assertThat(response.getValue(), is("value1")); } @Test @@ -54,7 +55,7 @@ public void testInvokePutIfAbsent() throws Exception { response = (EhcacheEntityResponse.MapValue) repository.invoke( new StateRepositoryOpMessage.PutIfAbsentMessage("foo", "bar", "key1", "value2", CLIENT_ID)); - assertThat(response.getValue(), is((Object)"value1")); + assertThat(response.getValue(), is("value1")); } @Test @@ -64,7 +65,7 @@ public void testInvokeGet() throws Exception { EhcacheEntityResponse.MapValue response = (EhcacheEntityResponse.MapValue) repository.invoke( new StateRepositoryOpMessage.GetMessage("foo", "bar", "key1", CLIENT_ID)); - assertThat(response.getValue(), is((Object)"value1")); + assertThat(response.getValue(), is("value1")); } @Test @@ -76,12 +77,15 @@ public void testInvokeEntrySet() throws Exception { EhcacheEntityResponse.MapValue response = (EhcacheEntityResponse.MapValue) repository.invoke( new StateRepositoryOpMessage.EntrySetMessage("foo", "bar", CLIENT_ID)); + @SuppressWarnings("unchecked") Set> entrySet = (Set>) response.getValue(); assertThat(entrySet.size(), is(3)); Map.Entry entry1 = new AbstractMap.SimpleEntry("key1", "value1"); Map.Entry entry2 = new AbstractMap.SimpleEntry("key2", "value2"); Map.Entry entry3 = new AbstractMap.SimpleEntry("key3", "value3"); - assertThat(entrySet, containsInAnyOrder(entry1, entry2, entry3)); + @SuppressWarnings("unchecked") + Matcher>> matcher = containsInAnyOrder(entry1, entry2, entry3); + assertThat(entrySet, matcher); } -} \ No newline at end of file +} From f1f6fc1c57c1f33e3d8cdb6d2853b5533168e40d Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Fri, 11 Nov 2016 09:48:27 +0100 Subject: [PATCH 129/218] :shirt: Clean up warnings in clustered/integration-test * Made the introduction of new warnings fail the build --- clustered/integration-test/build.gradle | 3 +++ .../clustered/management/ClusteredStatisticsRatioTest.java | 5 ++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/clustered/integration-test/build.gradle b/clustered/integration-test/build.gradle index f4546eb9de..20a21dc623 100644 --- a/clustered/integration-test/build.gradle +++ b/clustered/integration-test/build.gradle @@ -69,3 +69,6 @@ test { // testLogging.showStandardStreams = true } +tasks.withType(JavaCompile) { + options.compilerArgs += ['-Werror'] +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsRatioTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsRatioTest.java index 0ddf774cb4..df3bc4a2e4 100755 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsRatioTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsRatioTest.java @@ -16,6 +16,7 @@ package org.ehcache.clustered.management; import org.ehcache.Cache; +import org.hamcrest.collection.IsArray; import org.junit.Test; import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.management.model.stats.Sample; @@ -80,7 +81,9 @@ public void ratioTest() throws Exception { } } while (!Thread.currentThread().isInterrupted() && !Arrays.equals(ratios, new Double[]{.5d, .5d, .5d, .5d})); - assertThat(ratios, is(array(equalTo(.5d), equalTo(.5d), equalTo(.5d), equalTo(.5d)))); + @SuppressWarnings("unchecked") + IsArray array = array(equalTo(.5d), equalTo(.5d), equalTo(.5d), equalTo(.5d)); + assertThat(ratios, is(array)); } } From b5e121cd1cb506437e9f0ada28f1fc4b620b9ef2 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Tue, 15 Nov 2016 11:08:02 +0100 Subject: [PATCH 130/218] :memo: Fix start of file inclusion --- docs/src/docs/asciidoc/user/serializers-copiers.adoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/docs/asciidoc/user/serializers-copiers.adoc b/docs/src/docs/asciidoc/user/serializers-copiers.adoc index 7d20bdf047..c59d9d40d7 100644 --- a/docs/src/docs/asciidoc/user/serializers-copiers.adoc +++ b/docs/src/docs/asciidoc/user/serializers-copiers.adoc @@ -109,7 +109,7 @@ Implement the following interface, from package `org.ehcache.spi.serialization`: [source,java,indent=0] ---- -include::{sourcedir31}/api/src/main/java/org/ehcache/spi/serialization/Serializer.java[lines=21..-1] +include::{sourcedir31}/api/src/main/java/org/ehcache/spi/serialization/Serializer.java[lines=20..-1] ---- As the javadoc states, there are some constructor rules, see the <> for that. From 46a589303a01f40a790be88faaeb49e3a75f77b6 Mon Sep 17 00:00:00 2001 From: Albin Suresh Date: Tue, 15 Nov 2016 16:34:41 +0530 Subject: [PATCH 131/218] Closes #1593 Avoid response listener registration race during store creation --- .../client/internal/EhcacheClientEntity.java | 30 +++++++ .../service/DefaultClusteringService.java | 27 ++++--- .../store/EventualServerStoreProxy.java | 78 ++++++++++++------- .../store/NoInvalidationServerStoreProxy.java | 5 ++ .../internal/store/ServerStoreProxy.java | 5 ++ .../store/StrongServerStoreProxy.java | 44 ++++++++--- .../service/DefaultClusteringServiceTest.java | 77 ++++++++++++++++++ .../clustered/server/EhcacheActiveEntity.java | 4 +- 8 files changed, 222 insertions(+), 48 deletions(-) diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java index 32c9366c46..96ece606ba 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java @@ -105,6 +105,7 @@ public EhcacheClientEntity(EntityClientEndpoint void fireResponseEvent(T response) { @SuppressWarnings("unchecked") List> responseListeners = (List) this.responseListeners.get(response.getClass()); if (responseListeners == null) { + LOGGER.warn("Ignoring the response {} as no registered response listener could be found.", response); return; } LOGGER.debug("{} registered response listener(s) for {}", responseListeners.size(), response.getClass()); @@ -170,12 +172,30 @@ public void addDisconnectionListener(DisconnectionListener listener) { disconnectionListeners.add(listener); } + public void removeDisconnectionListener(DisconnectionListener listener) { + disconnectionListeners.remove(listener); + } + + public List getDisconnectionListeners() { + return Collections.unmodifiableList(disconnectionListeners); + } + public void addReconnectListener(ReconnectListener listener) { synchronized (lock) { reconnectListeners.add(listener); } } + public void removeReconnectListener(ReconnectListener listener) { + synchronized (lock) { + reconnectListeners.remove(listener); + } + } + + public List getReconnectListeners() { + return Collections.unmodifiableList(reconnectListeners); + } + public void addResponseListener(Class responseType, ResponseListener responseListener) { List> responseListeners = this.responseListeners.get(responseType); if (responseListeners == null) { @@ -185,6 +205,13 @@ public void addResponseListener(Class respo responseListeners.add(responseListener); } + public void removeResponseListener(Class responseType, ResponseListener responseListener) { + List> responseListeners = this.responseListeners.get(responseType); + if (responseListeners != null) { + responseListeners.remove(responseListener); + } + } + public UUID identity() { return ClusteredEhcacheIdentity.deserialize(endpoint.getEntityConfiguration()); } @@ -192,6 +219,9 @@ public UUID identity() { @Override public void close() { endpoint.close(); + this.responseListeners.clear(); + this.disconnectionListeners.clear(); + this.reconnectListeners.clear(); } public void validate(ServerSideConfiguration config) throws ClusteredTierManagerValidationException, TimeoutException { diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java index 9cd60ecafb..8a9fc2eabb 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/DefaultClusteringService.java @@ -80,7 +80,7 @@ class DefaultClusteringService implements ClusteringService, EntityService { private volatile Connection clusterConnection; private EhcacheClientEntityFactory entityFactory; - private EhcacheClientEntity entity; + EhcacheClientEntity entity; private volatile boolean inMaintenance = false; @@ -366,6 +366,19 @@ public ServerStoreProxy getServerStoreProxy(final ClusteredCacheIdentifie throw new IllegalStateException("A clustered resource is required for a clustered cache"); } + ServerStoreProxy serverStoreProxy; + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory(cacheId, entity.getClientId()); + switch (configuredConsistency) { + case STRONG: + serverStoreProxy = new StrongServerStoreProxy(messageFactory, entity); + break; + case EVENTUAL: + serverStoreProxy = new EventualServerStoreProxy(messageFactory, entity); + break; + default: + throw new AssertionError("Unknown consistency : " + configuredConsistency); + } + final ServerStoreConfiguration clientStoreConfiguration = new ServerStoreConfiguration( clusteredResourcePool.getPoolAllocation(), storeConfig.getKeyType().getName(), @@ -392,22 +405,16 @@ public ServerStoreProxy getServerStoreProxy(final ClusteredCacheIdentifie entity.validateCache(cacheId, clientStoreConfiguration); } } catch (ClusteredTierException e) { + serverStoreProxy.close(); throw new CachePersistenceException("Unable to create clustered tier proxy '" + cacheIdentifier.getId() + "' for entity '" + entityIdentifier + "'", e); } catch (TimeoutException e) { + serverStoreProxy.close(); throw new CachePersistenceException("Unable to create clustered tier proxy '" + cacheIdentifier.getId() + "' for entity '" + entityIdentifier + "'; validate operation timed out", e); } - ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory(cacheId, entity.getClientId()); - switch (configuredConsistency) { - case STRONG: - return new StrongServerStoreProxy(messageFactory, entity); - case EVENTUAL: - return new EventualServerStoreProxy(messageFactory, entity); - default: - throw new AssertionError("Unknown consistency : " + configuredConsistency); - } + return serverStoreProxy; } @Override diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxy.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxy.java index 7858717bc8..5780c4dff8 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxy.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxy.java @@ -24,6 +24,8 @@ import java.nio.ByteBuffer; import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.TimeoutException; @@ -36,10 +38,15 @@ public class EventualServerStoreProxy implements ServerStoreProxy { private final ServerStoreProxy delegate; private final List invalidationListeners = new CopyOnWriteArrayList(); + private final EhcacheClientEntity entity; + private final Map, EhcacheClientEntity.ResponseListener> responseListeners + = new ConcurrentHashMap, EhcacheClientEntity.ResponseListener>(); + @SuppressWarnings("unchecked") public EventualServerStoreProxy(final ServerStoreMessageFactory messageFactory, final EhcacheClientEntity entity) { + this.entity = entity; this.delegate = new NoInvalidationServerStoreProxy(messageFactory, entity); - entity.addResponseListener(EhcacheEntityResponse.ServerInvalidateHash.class, new EhcacheClientEntity.ResponseListener() { + this.responseListeners.put(EhcacheEntityResponse.ServerInvalidateHash.class, new EhcacheClientEntity.ResponseListener() { @Override public void onResponse(EhcacheEntityResponse.ServerInvalidateHash response) { if (response.getCacheId().equals(messageFactory.getCacheId())) { @@ -53,39 +60,45 @@ public void onResponse(EhcacheEntityResponse.ServerInvalidateHash response) { } } }); - entity.addResponseListener(EhcacheEntityResponse.ClientInvalidateHash.class, new EhcacheClientEntity.ResponseListener() { - @Override - public void onResponse(EhcacheEntityResponse.ClientInvalidateHash response) { - final String cacheId = response.getCacheId(); - final long key = response.getKey(); - final int invalidationId = response.getInvalidationId(); - - if (cacheId.equals(messageFactory.getCacheId())) { - LOGGER.debug("CLIENT: doing work to invalidate hash {} from cache {} (ID {})", key, cacheId, invalidationId); - for (InvalidationListener listener : invalidationListeners) { - listener.onInvalidateHash(key); - } - } else { + this.responseListeners.put(EhcacheEntityResponse.ClientInvalidateHash.class, new EhcacheClientEntity.ResponseListener() { + @Override + public void onResponse(EhcacheEntityResponse.ClientInvalidateHash response) { + final String cacheId = response.getCacheId(); + final long key = response.getKey(); + final int invalidationId = response.getInvalidationId(); + + if (cacheId.equals(messageFactory.getCacheId())) { + LOGGER.debug("CLIENT: doing work to invalidate hash {} from cache {} (ID {})", key, cacheId, invalidationId); + for (InvalidationListener listener : invalidationListeners) { + listener.onInvalidateHash(key); + } + } else { LOGGER.debug("CLIENT: on cache {}, ignoring invalidation on unrelated cache : {}", messageFactory.getCacheId(), response.getCacheId()); + } } - } }); - entity.addResponseListener(EhcacheEntityResponse.ClientInvalidateAll.class, new EhcacheClientEntity.ResponseListener() { - @Override - public void onResponse(EhcacheEntityResponse.ClientInvalidateAll response) { - final String cacheId = response.getCacheId(); - final int invalidationId = response.getInvalidationId(); - - if (cacheId.equals(messageFactory.getCacheId())) { - LOGGER.debug("CLIENT: doing work to invalidate all from cache {} (ID {})", cacheId, invalidationId); - for (InvalidationListener listener : invalidationListeners) { - listener.onInvalidateAll(); - } - } else { + this.responseListeners.put(EhcacheEntityResponse.ClientInvalidateAll.class, new EhcacheClientEntity.ResponseListener() { + @Override + public void onResponse(EhcacheEntityResponse.ClientInvalidateAll response) { + final String cacheId = response.getCacheId(); + final int invalidationId = response.getInvalidationId(); + + if (cacheId.equals(messageFactory.getCacheId())) { + LOGGER.debug("CLIENT: doing work to invalidate all from cache {} (ID {})", cacheId, invalidationId); + for (InvalidationListener listener : invalidationListeners) { + listener.onInvalidateAll(); + } + } else { LOGGER.debug("CLIENT: on cache {}, ignoring invalidation on unrelated cache : {}", messageFactory.getCacheId(), response.getCacheId()); + } } - } }); + + for (Map.Entry, EhcacheClientEntity.ResponseListener> classResponseListenerEntry : + this.responseListeners.entrySet()) { + this.entity.addResponseListener(classResponseListenerEntry.getKey(), (EhcacheClientEntity.ResponseListener) classResponseListenerEntry.getValue()); + } + } @Override @@ -103,6 +116,15 @@ public boolean removeInvalidationListener(InvalidationListener listener) { return invalidationListeners.remove(listener); } + @SuppressWarnings("unchecked") + @Override + public void close() { + for (Map.Entry, EhcacheClientEntity.ResponseListener> classResponseListenerEntry : + this.responseListeners.entrySet()) { + this.entity.removeResponseListener(classResponseListenerEntry.getKey(), (EhcacheClientEntity.ResponseListener) classResponseListenerEntry.getValue()); + } + } + @Override public Chain get(long key) throws TimeoutException { return delegate.get(key); diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxy.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxy.java index b2a6c4c1c0..61462b625a 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxy.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxy.java @@ -52,6 +52,11 @@ public boolean removeInvalidationListener(InvalidationListener listener) { throw new UnsupportedOperationException(); } + @Override + public void close() { + // No-op + } + @Override public Chain get(long key) throws TimeoutException { EhcacheEntityResponse response; diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ServerStoreProxy.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ServerStoreProxy.java index dadb81a480..c8b93f178e 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ServerStoreProxy.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/ServerStoreProxy.java @@ -60,4 +60,9 @@ interface InvalidationListener { */ boolean removeInvalidationListener(InvalidationListener listener); + /** + * Closes this proxy. + */ + void close(); + } diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java index 83ea76839d..53810ceb28 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java @@ -45,12 +45,17 @@ public class StrongServerStoreProxy implements ServerStoreProxy { private final Lock invalidateAllLock = new ReentrantLock(); private volatile CountDownLatch invalidateAllLatch; private final List invalidationListeners = new CopyOnWriteArrayList(); + private final Map, EhcacheClientEntity.ResponseListener> responseListeners + = new ConcurrentHashMap, EhcacheClientEntity.ResponseListener>(); private final EhcacheClientEntity entity; + private final EhcacheClientEntity.ReconnectListener reconnectListener; + private final EhcacheClientEntity.DisconnectionListener disconnectionListener; + @SuppressWarnings("unchecked") public StrongServerStoreProxy(final ServerStoreMessageFactory messageFactory, final EhcacheClientEntity entity) { this.delegate = new NoInvalidationServerStoreProxy(messageFactory, entity); this.entity = entity; - entity.addReconnectListener(new EhcacheClientEntity.ReconnectListener() { + this.reconnectListener = new EhcacheClientEntity.ReconnectListener() { @Override public void onHandleReconnect(ReconnectMessage reconnectMessage) { Set inflightInvalidations = hashInvalidationsInProgress.keySet(); @@ -59,8 +64,10 @@ public void onHandleReconnect(ReconnectMessage reconnectMessage) { reconnectMessage.addClearInProgress(delegate.getCacheId()); } } - }); - entity.addResponseListener(EhcacheEntityResponse.HashInvalidationDone.class, new EhcacheClientEntity.ResponseListener() { + }; + entity.addReconnectListener(reconnectListener); + + this.responseListeners.put(EhcacheEntityResponse.HashInvalidationDone.class, new EhcacheClientEntity.ResponseListener() { @Override public void onResponse(EhcacheEntityResponse.HashInvalidationDone response) { if (response.getCacheId().equals(messageFactory.getCacheId())) { @@ -75,7 +82,7 @@ public void onResponse(EhcacheEntityResponse.HashInvalidationDone response) { } } }); - entity.addResponseListener(EhcacheEntityResponse.AllInvalidationDone.class, new EhcacheClientEntity.ResponseListener() { + this.responseListeners.put(EhcacheEntityResponse.AllInvalidationDone.class, new EhcacheClientEntity.ResponseListener() { @Override public void onResponse(EhcacheEntityResponse.AllInvalidationDone response) { if (response.getCacheId().equals(messageFactory.getCacheId())) { @@ -99,7 +106,7 @@ public void onResponse(EhcacheEntityResponse.AllInvalidationDone response) { } } }); - entity.addResponseListener(EhcacheEntityResponse.ServerInvalidateHash.class, new EhcacheClientEntity.ResponseListener() { + this.responseListeners.put(EhcacheEntityResponse.ServerInvalidateHash.class, new EhcacheClientEntity.ResponseListener() { @Override public void onResponse(EhcacheEntityResponse.ServerInvalidateHash response) { if (response.getCacheId().equals(messageFactory.getCacheId())) { @@ -113,7 +120,7 @@ public void onResponse(EhcacheEntityResponse.ServerInvalidateHash response) { } } }); - entity.addResponseListener(EhcacheEntityResponse.ClientInvalidateHash.class, new EhcacheClientEntity.ResponseListener() { + this.responseListeners.put(EhcacheEntityResponse.ClientInvalidateHash.class, new EhcacheClientEntity.ResponseListener() { @Override public void onResponse(EhcacheEntityResponse.ClientInvalidateHash response) { final String cacheId = response.getCacheId(); @@ -138,7 +145,7 @@ public void onResponse(EhcacheEntityResponse.ClientInvalidateHash response) { } } }); - entity.addResponseListener(EhcacheEntityResponse.ClientInvalidateAll.class, new EhcacheClientEntity.ResponseListener() { + this.responseListeners.put(EhcacheEntityResponse.ClientInvalidateAll.class, new EhcacheClientEntity.ResponseListener() { @Override public void onResponse(EhcacheEntityResponse.ClientInvalidateAll response) { final String cacheId = response.getCacheId(); @@ -162,7 +169,13 @@ public void onResponse(EhcacheEntityResponse.ClientInvalidateAll response) { } } }); - entity.addDisconnectionListener(new EhcacheClientEntity.DisconnectionListener() { + + for (Map.Entry, EhcacheClientEntity.ResponseListener> classResponseListenerEntry : + this.responseListeners.entrySet()) { + this.entity.addResponseListener(classResponseListenerEntry.getKey(), (EhcacheClientEntity.ResponseListener) classResponseListenerEntry.getValue()); + } + + this.disconnectionListener = new EhcacheClientEntity.DisconnectionListener() { @Override public void onDisconnection() { for (Map.Entry entry : hashInvalidationsInProgress.entrySet()) { @@ -179,7 +192,8 @@ public void onDisconnection() { invalidateAllLock.unlock(); } } - }); + }; + entity.addDisconnectionListener(disconnectionListener); } private T performWaitingForHashInvalidation(long key, NullaryFunction c) throws InterruptedException, TimeoutException { @@ -197,6 +211,7 @@ private T performWaitingForHashInvalidation(long key, NullaryFunction c) try { T result = c.apply(); + LOGGER.debug("CLIENT: Waiting for invalidations on key {}", key); awaitOnLatch(latch); LOGGER.debug("CLIENT: key {} invalidated on all clients, unblocking call", key); return result; @@ -283,6 +298,17 @@ public boolean removeInvalidationListener(InvalidationListener listener) { return invalidationListeners.remove(listener); } + @SuppressWarnings("unchecked") + @Override + public void close() { + this.entity.removeDisconnectionListener(this.disconnectionListener); + this.entity.removeReconnectListener(this.reconnectListener); + for (Map.Entry, EhcacheClientEntity.ResponseListener> classResponseListenerEntry : + this.responseListeners.entrySet()) { + this.entity.removeResponseListener(classResponseListenerEntry.getKey(), (EhcacheClientEntity.ResponseListener) classResponseListenerEntry.getValue()); + } + } + @Override public Chain get(long key) throws TimeoutException { return delegate.get(key); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java index aaa14bbc7f..122eef6c6e 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/DefaultClusteringServiceTest.java @@ -1945,6 +1945,83 @@ public void testGetServerStoreProxyReturnsStrongStore() throws Exception { assertThat(serverStoreProxy, instanceOf(StrongServerStoreProxy.class)); } + @Test + public void testGetServerStoreProxyFailureClearsEntityListeners() throws Exception { + // Initial setup begin + String entityIdentifier = "my-application"; + ClusteringServiceConfiguration configuration = + new ClusteringServiceConfiguration( + URI.create(CLUSTER_URI_BASE + entityIdentifier), + true, new ServerSideConfiguration(Collections.emptyMap())); + DefaultClusteringService service = new DefaultClusteringService(configuration); + service.start(null); + + ClusteringService.ClusteredCacheIdentifier cacheIdentifier = (ClusteredCacheIdentifier) service.getPersistenceSpaceIdentifier("my-cache", null); + + ResourcePools resourcePools = mock(ResourcePools.class); + @SuppressWarnings("unchecked") + Store.Configuration storeConfig = mock(Store.Configuration.class); + when(storeConfig.getResourcePools()).thenReturn(resourcePools); + when(resourcePools.getPoolForResource(eq(DEDICATED))).thenReturn(new DedicatedClusteredResourcePoolImpl("serverResource1", 1L, MemoryUnit.MB)); + when(storeConfig.getKeyType()).thenReturn(String.class); + when(storeConfig.getValueType()).thenReturn(Object.class); + + service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.STRONG); // Creates the store + service.stop(); + // Initial setup end + + service.start(null); + when(resourcePools.getPoolForResource(eq(DEDICATED))).thenReturn(new DedicatedClusteredResourcePoolImpl("serverResource1", 2L, MemoryUnit.MB)); + try { + service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.STRONG); + fail("Server store proxy creation should have failed"); + } catch (CachePersistenceException cpe) { + assertThat(service.entity.getDisconnectionListeners().isEmpty(), is(true)); + assertThat(service.entity.getReconnectListeners().isEmpty(), is(true)); + } + } + + @Test + public void testGetServerStoreProxyFailureDoesNotClearOtherStoreEntityListeners() throws Exception { + // Initial setup begin + String entityIdentifier = "my-application"; + ClusteringServiceConfiguration configuration = + new ClusteringServiceConfiguration( + URI.create(CLUSTER_URI_BASE + entityIdentifier), + true, new ServerSideConfiguration(Collections.emptyMap())); + DefaultClusteringService service = new DefaultClusteringService(configuration); + service.start(null); + + ClusteringService.ClusteredCacheIdentifier cacheIdentifier = (ClusteredCacheIdentifier) service.getPersistenceSpaceIdentifier("my-cache", null); + + ResourcePools resourcePools = mock(ResourcePools.class); + @SuppressWarnings("unchecked") + Store.Configuration storeConfig = mock(Store.Configuration.class); + when(storeConfig.getResourcePools()).thenReturn(resourcePools); + when(resourcePools.getPoolForResource(eq(DEDICATED))).thenReturn(new DedicatedClusteredResourcePoolImpl("serverResource1", 1L, MemoryUnit.MB)); + when(storeConfig.getKeyType()).thenReturn(String.class); + when(storeConfig.getValueType()).thenReturn(Object.class); + + service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.STRONG); // Creates the store + service.stop(); + // Initial setup end + + service.start(null); + ClusteringService.ClusteredCacheIdentifier otherCacheIdentifier = (ClusteredCacheIdentifier) service.getPersistenceSpaceIdentifier("my-other-cache", null); + service.getServerStoreProxy(otherCacheIdentifier, storeConfig, Consistency.STRONG); // Creates one more store + int disconnectionListenersSize = service.entity.getDisconnectionListeners().size(); + int reconnectionListenersSize = service.entity.getReconnectListeners().size(); + + when(resourcePools.getPoolForResource(eq(DEDICATED))).thenReturn(new DedicatedClusteredResourcePoolImpl("serverResource1", 2L, MemoryUnit.MB)); + try { + service.getServerStoreProxy(cacheIdentifier, storeConfig, Consistency.STRONG); + fail("Server store proxy creation should have failed"); + } catch (CachePersistenceException cpe) { + assertThat(service.entity.getDisconnectionListeners().size(), is(disconnectionListenersSize)); + assertThat(service.entity.getReconnectListeners().size(), is(reconnectionListenersSize)); + } + } + @Test public void testGetStateRepositoryWithinTwiceWithSameName() throws Exception { ClusteringServiceConfiguration configuration = diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 229492049d..e904947a7b 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -495,11 +495,13 @@ private EhcacheEntityResponse invokeServerStoreOperation(ClientDescriptor client } case GET_AND_APPEND: { ServerStoreOpMessage.GetAndAppendMessage getAndAppendMessage = (ServerStoreOpMessage.GetAndAppendMessage)message; + LOGGER.trace("Message {} : GET_AND_APPEND on key {} from client {}", message, getAndAppendMessage.getKey(), getAndAppendMessage.getClientId()); if (!isMessageDuplicate(message)) { - + LOGGER.trace("Message {} : is not duplicate", message); Chain result = cacheStore.getAndAppend(getAndAppendMessage.getKey(), getAndAppendMessage.getPayload()); sendMessageToSelfAndDeferRetirement(getAndAppendMessage, cacheStore.get(getAndAppendMessage.getKey())); EhcacheEntityResponse response = responseFactory.response(result); + LOGGER.debug("Send invalidations for key {}", getAndAppendMessage.getKey()); invalidateHashForClient(clientDescriptor, getAndAppendMessage.getCacheId(), getAndAppendMessage.getKey()); return response; } From 65a6f3cd7cf60c59359123f42f2c5ccd61bba61b Mon Sep 17 00:00:00 2001 From: Anthony Dahanne Date: Tue, 15 Nov 2016 17:07:24 -0500 Subject: [PATCH 132/218] :memo: Replace non existing RuntimeCacheConfiguration with CacheRuntimeConfiguration --- docs/user/107.adoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/user/107.adoc b/docs/user/107.adoc index 96e2f9348f..2f6acdffbf 100644 --- a/docs/user/107.adoc +++ b/docs/user/107.adoc @@ -48,7 +48,7 @@ NOTE: You can also use the `CachingProvider.getCacheManager()` method that takes === Starting from JSR-107 created caches When you create a `Cache` on a `CacheManager` using a `MutableConfiguration` - that is you only use JSR-107 types - -you can still get to the underlying Ehcache `RuntimeCacheConfiguration`: +you can still get to the underlying Ehcache `CacheRuntimeConfiguration`: [source,java,indent=0] ---- @@ -58,7 +58,7 @@ include::../../107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegra <1> Create a JSR-107 cache using the `MutableConfiguration` from the specification <2> Get to the JSR-107 `CompleteConfiguration` <3> Get to the Ehcache JSR-107 configuration bridge -<4> Unwrap to the Ehcache `RuntimeCacheConfiguration` type +<4> Unwrap to the Ehcache `CacheRuntimeConfiguration` type === Building the configuration using Ehcache APIs From f5d1edddd4b360ea0b60951ebb8a97bd67089165 Mon Sep 17 00:00:00 2001 From: Albin Suresh Date: Fri, 11 Nov 2016 19:34:19 +0530 Subject: [PATCH 133/218] Closes #1476 Doc improvements around XML configuration --- docs/src/docs/asciidoc/user/xml.adoc | 3 ++- .../java/org/ehcache/config/builders/CacheManagerBuilder.java | 3 ++- xml/src/test/java/org/ehcache/docs/GettingStarted.java | 1 + 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/docs/src/docs/asciidoc/user/xml.adoc b/docs/src/docs/asciidoc/user/xml.adoc index 6c00ad7629..c8224f7e59 100644 --- a/docs/src/docs/asciidoc/user/xml.adoc +++ b/docs/src/docs/asciidoc/user/xml.adoc @@ -111,7 +111,8 @@ include::{sourcedir31}/xml/src/test/java/org/ehcache/docs/GettingStarted.java[ta <1> Obtain a `URL` to your XML file's location <2> Instantiate an `XmlConfiguration` passing the XML file's URL to it <3> Using the static `org.ehcache.config.builders.CacheManagerBuilder.newCacheManager(org.ehcache.config.Configuration)` allows you - to create your `CacheManager` instance using the `Configuration` from the `XmlConfiguration` + to create your `CacheManager` instance using the `Configuration` from the `XmlConfiguration`. +<4> Initialize the `cacheManager` before it is used. We can also use `` declared in the XML file to seed instances of `CacheConfigurationBuilder`. In order to use a `` element from an XML file, e.g. the `/my-config.xml` contains this XML fragment: diff --git a/impl/src/main/java/org/ehcache/config/builders/CacheManagerBuilder.java b/impl/src/main/java/org/ehcache/config/builders/CacheManagerBuilder.java index 2de7ef7c66..412924d2b5 100644 --- a/impl/src/main/java/org/ehcache/config/builders/CacheManagerBuilder.java +++ b/impl/src/main/java/org/ehcache/config/builders/CacheManagerBuilder.java @@ -100,7 +100,8 @@ private CacheManagerBuilder(CacheManagerBuilder builder, ConfigurationBuilder } /** - * Creates a new {@link CacheManager} based on the provided configuration + * Creates a new {@link CacheManager} based on the provided configuration. + * The returned {@code CacheManager} is uninitialized. * * @param configuration the configuration to use * @return a {@code CacheManager} diff --git a/xml/src/test/java/org/ehcache/docs/GettingStarted.java b/xml/src/test/java/org/ehcache/docs/GettingStarted.java index 2ebce8a713..62632740b2 100644 --- a/xml/src/test/java/org/ehcache/docs/GettingStarted.java +++ b/xml/src/test/java/org/ehcache/docs/GettingStarted.java @@ -36,6 +36,7 @@ public void xmlConfigSample() throws Exception { final URL myUrl = getClass().getResource("/configs/docs/getting-started.xml"); // <1> XmlConfiguration xmlConfig = new XmlConfiguration(myUrl); // <2> CacheManager myCacheManager = CacheManagerBuilder.newCacheManager(xmlConfig); // <3> + myCacheManager.init(); // <4> // end::xmlConfig[] } From fe2d09c1d5ea41330afd0c73d158b4a44d05f5ab Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Wed, 16 Nov 2016 14:56:19 +0100 Subject: [PATCH 134/218] :green_heart: Bumping stats tests timeouts --- .../ehcache/management/providers/statistics/EvictionTest.java | 2 +- .../ehcache/management/providers/statistics/HitCountTest.java | 2 +- .../ehcache/management/providers/statistics/HitRatioTest.java | 2 +- .../ehcache/management/providers/statistics/MissCountTest.java | 2 +- .../ehcache/management/providers/statistics/MissRatioTest.java | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java index 0dde5e56c2..bc8cb222ae 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java @@ -98,7 +98,7 @@ public static Collection data() { public final TemporaryFolder diskPath = new TemporaryFolder(); @Rule - public final Timeout globalTimeout = Timeout.seconds(10); + public final Timeout globalTimeout = Timeout.seconds(60); public EvictionTest(Builder resources, int iterations, List expected, byte[] value, List stats) { this.resources = resources.build(); diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java index db190a9c79..5d64e07b36 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java @@ -51,7 +51,7 @@ public class HitCountTest { @Rule - public final Timeout globalTimeout = Timeout.seconds(10); + public final Timeout globalTimeout = Timeout.seconds(60); @Rule public final TemporaryFolder diskPath = new TemporaryFolder(); diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java index aa77e7aed1..197af8f522 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java @@ -56,7 +56,7 @@ public class HitRatioTest { @Rule - public final Timeout globalTimeout = Timeout.seconds(30); + public final Timeout globalTimeout = Timeout.seconds(60); @Rule public final TemporaryFolder diskPath = new TemporaryFolder(); diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java index 2eb79c3ddd..16ef74e002 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java @@ -50,7 +50,7 @@ public class MissCountTest { @Rule - public final Timeout globalTimeout = Timeout.seconds(10); + public final Timeout globalTimeout = Timeout.seconds(60); @Rule public final TemporaryFolder diskPath = new TemporaryFolder(); diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java index 20dd6adc15..8fc63ef3cd 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java @@ -52,7 +52,7 @@ public class MissRatioTest { @Rule - public final Timeout globalTimeout = Timeout.seconds(10); + public final Timeout globalTimeout = Timeout.seconds(60); @Rule public final TemporaryFolder diskPath = new TemporaryFolder(); From 516513bdac7e2baf5057d023d3ddd3c2c4463789 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Tue, 15 Nov 2016 16:19:05 +0530 Subject: [PATCH 135/218] Closes #1605 Eventual proxy to send acks to server --- .../store/CommonServerStoreProxy.java | 221 ++++++++++++++++++ .../store/EventualServerStoreProxy.java | 83 +------ .../store/NoInvalidationServerStoreProxy.java | 127 ---------- .../store/StrongServerStoreProxy.java | 92 +------- .../internal/store/ClusteredStoreTest.java | 2 +- ...t.java => CommonServerStoreProxyTest.java} | 6 +- .../store/EventualServerStoreProxyTest.java | 32 ++- .../ObservableEhcacheServerEntityService.java | 6 + 8 files changed, 273 insertions(+), 296 deletions(-) create mode 100644 clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxy.java delete mode 100644 clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxy.java rename clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/{NoInvalidationServerStoreProxyTest.java => CommonServerStoreProxyTest.java} (96%) diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxy.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxy.java new file mode 100644 index 0000000000..1258b1eb48 --- /dev/null +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxy.java @@ -0,0 +1,221 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal.store; + +import org.ehcache.clustered.client.internal.EhcacheClientEntity; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; +import org.ehcache.clustered.common.internal.store.Chain; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.ByteBuffer; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.TimeoutException; + +/** + * Provides client-side access to the services of a {@code ServerStore}. + */ +class CommonServerStoreProxy implements ServerStoreProxy { + + private static final Logger LOGGER = LoggerFactory.getLogger(CommonServerStoreProxy.class); + + private final ServerStoreMessageFactory messageFactory; + private final EhcacheClientEntity entity; + + private final List invalidationListeners = new CopyOnWriteArrayList(); + private final Map, EhcacheClientEntity.ResponseListener> responseListeners + = new ConcurrentHashMap, EhcacheClientEntity.ResponseListener>(); + + CommonServerStoreProxy(final ServerStoreMessageFactory messageFactory, final EhcacheClientEntity entity) { + this.messageFactory = messageFactory; + this.entity = entity; + this.responseListeners.put(EhcacheEntityResponse.ServerInvalidateHash.class, new EhcacheClientEntity.ResponseListener() { + @Override + public void onResponse(EhcacheEntityResponse.ServerInvalidateHash response) { + if (response.getCacheId().equals(messageFactory.getCacheId())) { + long key = response.getKey(); + LOGGER.debug("CLIENT: on cache {}, server requesting hash {} to be invalidated", messageFactory.getCacheId(), key); + for (InvalidationListener listener : invalidationListeners) { + listener.onInvalidateHash(key); + } + } else { + LOGGER.debug("CLIENT: on cache {}, ignoring invalidation on unrelated cache : {}", messageFactory.getCacheId(), response.getCacheId()); + } + } + }); + this.responseListeners.put(EhcacheEntityResponse.ClientInvalidateHash.class, new EhcacheClientEntity.ResponseListener() { + @Override + public void onResponse(EhcacheEntityResponse.ClientInvalidateHash response) { + final String cacheId = response.getCacheId(); + final long key = response.getKey(); + final int invalidationId = response.getInvalidationId(); + + if (cacheId.equals(messageFactory.getCacheId())) { + LOGGER.debug("CLIENT: doing work to invalidate hash {} from cache {} (ID {})", key, cacheId, invalidationId); + for (InvalidationListener listener : invalidationListeners) { + listener.onInvalidateHash(key); + } + + try { + LOGGER.debug("CLIENT: ack'ing invalidation of hash {} from cache {} (ID {})", key, cacheId, invalidationId); + entity.invokeAsync(messageFactory.clientInvalidationAck(invalidationId), false); + } catch (Exception e) { + //TODO: what should be done here? + LOGGER.error("error acking client invalidation of hash {} on cache {}", key, cacheId, e); + } + } else { + LOGGER.debug("CLIENT: on cache {}, ignoring invalidation on unrelated cache : {}", messageFactory.getCacheId(), response.getCacheId()); + } + } + }); + this.responseListeners.put(EhcacheEntityResponse.ClientInvalidateAll.class, new EhcacheClientEntity.ResponseListener() { + @Override + public void onResponse(EhcacheEntityResponse.ClientInvalidateAll response) { + final String cacheId = response.getCacheId(); + final int invalidationId = response.getInvalidationId(); + + if (cacheId.equals(messageFactory.getCacheId())) { + LOGGER.debug("CLIENT: doing work to invalidate all from cache {} (ID {})", cacheId, invalidationId); + for (InvalidationListener listener : invalidationListeners) { + listener.onInvalidateAll(); + } + + try { + LOGGER.debug("CLIENT: ack'ing invalidation of all from cache {} (ID {})", cacheId, invalidationId); + entity.invokeAsync(messageFactory.clientInvalidationAck(invalidationId), false); + } catch (Exception e) { + //TODO: what should be done here? + LOGGER.error("error acking client invalidation of all on cache {}", cacheId, e); + } + } else { + LOGGER.debug("CLIENT: on cache {}, ignoring invalidation on unrelated cache : {}", messageFactory.getCacheId(), response.getCacheId()); + } + } + }); + + addResponseListenersToEntity(); + } + + @SuppressWarnings("unchecked") + private void addResponseListenersToEntity() { + for (Map.Entry, EhcacheClientEntity.ResponseListener> classResponseListenerEntry : + this.responseListeners.entrySet()) { + this.entity.addResponseListener(classResponseListenerEntry.getKey(), (EhcacheClientEntity.ResponseListener)classResponseListenerEntry.getValue()); + } + } + + @Override + public String getCacheId() { + return messageFactory.getCacheId(); + } + + @Override + public void addInvalidationListener(InvalidationListener listener) { + invalidationListeners.add(listener); + } + + @Override + public boolean removeInvalidationListener(InvalidationListener listener) { + return invalidationListeners.remove(listener); + } + + void addResponseListeners(Class listenerClass, EhcacheClientEntity.ResponseListener listener) { + this.responseListeners.put(listenerClass, listener); + this.entity.addResponseListener(listenerClass, listener); + } + + @SuppressWarnings("unchecked") + @Override + public void close() { + for (Map.Entry, EhcacheClientEntity.ResponseListener> classResponseListenerEntry : + this.responseListeners.entrySet()) { + this.entity.removeResponseListener(classResponseListenerEntry.getKey(), (EhcacheClientEntity.ResponseListener) classResponseListenerEntry.getValue()); + } + } + + @Override + public Chain get(long key) throws TimeoutException { + EhcacheEntityResponse response; + try { + response = entity.invoke(messageFactory.getOperation(key), false); + } catch (TimeoutException e) { + throw e; + } catch (Exception e) { + throw new ServerStoreProxyException(e); + } + if (response != null && response.getType() == EhcacheEntityResponse.Type.GET_RESPONSE) { + return ((EhcacheEntityResponse.GetResponse)response).getChain(); + } else { + throw new ServerStoreProxyException("Response for get operation was invalid : " + + (response != null ? response.getType().toString() : "null message")); + } + } + + @Override + public void append(long key, ByteBuffer payLoad) throws TimeoutException { + try { + entity.invoke(messageFactory.appendOperation(key, payLoad), true); + } catch (TimeoutException e) { + throw e; + } catch (Exception e) { + throw new ServerStoreProxyException(e); + } + } + + @Override + public Chain getAndAppend(long key, ByteBuffer payLoad) throws TimeoutException { + EhcacheEntityResponse response; + try { + response = entity.invoke(messageFactory.getAndAppendOperation(key, payLoad), true); + } catch (TimeoutException e) { + throw e; + } catch (Exception e) { + throw new ServerStoreProxyException(e); + } + if (response != null && response.getType() == EhcacheEntityResponse.Type.GET_RESPONSE) { + return ((EhcacheEntityResponse.GetResponse)response).getChain(); + } else { + throw new ServerStoreProxyException("Response for getAndAppend operation was invalid : " + + (response != null ? response.getType().toString() : "null message")); + } + } + + @Override + public void replaceAtHead(long key, Chain expect, Chain update) { + // TODO: Optimize this method to just send sequences for expect Chain + try { + entity.invokeAsync(messageFactory.replaceAtHeadOperation(key, expect, update), true); + } catch (Exception e) { + throw new ServerStoreProxyException(e); + } + } + + @Override + public void clear() throws TimeoutException { + try { + entity.invoke(messageFactory.clearOperation(), true); + } catch (TimeoutException e) { + throw e; + } catch (Exception e) { + throw new ServerStoreProxyException(e); + } + } +} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxy.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxy.java index 5780c4dff8..6920e447f6 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxy.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxy.java @@ -16,89 +16,18 @@ package org.ehcache.clustered.client.internal.store; import org.ehcache.clustered.client.internal.EhcacheClientEntity; -import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; import org.ehcache.clustered.common.internal.store.Chain; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import java.nio.ByteBuffer; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.TimeoutException; -/** - * @author Ludovic Orban - */ public class EventualServerStoreProxy implements ServerStoreProxy { - private static final Logger LOGGER = LoggerFactory.getLogger(EventualServerStoreProxy.class); - private final ServerStoreProxy delegate; - private final List invalidationListeners = new CopyOnWriteArrayList(); - private final EhcacheClientEntity entity; - private final Map, EhcacheClientEntity.ResponseListener> responseListeners - = new ConcurrentHashMap, EhcacheClientEntity.ResponseListener>(); - @SuppressWarnings("unchecked") public EventualServerStoreProxy(final ServerStoreMessageFactory messageFactory, final EhcacheClientEntity entity) { - this.entity = entity; - this.delegate = new NoInvalidationServerStoreProxy(messageFactory, entity); - this.responseListeners.put(EhcacheEntityResponse.ServerInvalidateHash.class, new EhcacheClientEntity.ResponseListener() { - @Override - public void onResponse(EhcacheEntityResponse.ServerInvalidateHash response) { - if (response.getCacheId().equals(messageFactory.getCacheId())) { - long key = response.getKey(); - LOGGER.debug("CLIENT: on cache {}, server requesting hash {} to be invalidated", messageFactory.getCacheId(), key); - for (InvalidationListener listener : invalidationListeners) { - listener.onInvalidateHash(key); - } - } else { - LOGGER.debug("CLIENT: on cache {}, ignoring invalidation on unrelated cache : {}", messageFactory.getCacheId(), response.getCacheId()); - } - } - }); - this.responseListeners.put(EhcacheEntityResponse.ClientInvalidateHash.class, new EhcacheClientEntity.ResponseListener() { - @Override - public void onResponse(EhcacheEntityResponse.ClientInvalidateHash response) { - final String cacheId = response.getCacheId(); - final long key = response.getKey(); - final int invalidationId = response.getInvalidationId(); - - if (cacheId.equals(messageFactory.getCacheId())) { - LOGGER.debug("CLIENT: doing work to invalidate hash {} from cache {} (ID {})", key, cacheId, invalidationId); - for (InvalidationListener listener : invalidationListeners) { - listener.onInvalidateHash(key); - } - } else { - LOGGER.debug("CLIENT: on cache {}, ignoring invalidation on unrelated cache : {}", messageFactory.getCacheId(), response.getCacheId()); - } - } - }); - this.responseListeners.put(EhcacheEntityResponse.ClientInvalidateAll.class, new EhcacheClientEntity.ResponseListener() { - @Override - public void onResponse(EhcacheEntityResponse.ClientInvalidateAll response) { - final String cacheId = response.getCacheId(); - final int invalidationId = response.getInvalidationId(); - - if (cacheId.equals(messageFactory.getCacheId())) { - LOGGER.debug("CLIENT: doing work to invalidate all from cache {} (ID {})", cacheId, invalidationId); - for (InvalidationListener listener : invalidationListeners) { - listener.onInvalidateAll(); - } - } else { - LOGGER.debug("CLIENT: on cache {}, ignoring invalidation on unrelated cache : {}", messageFactory.getCacheId(), response.getCacheId()); - } - } - }); - - for (Map.Entry, EhcacheClientEntity.ResponseListener> classResponseListenerEntry : - this.responseListeners.entrySet()) { - this.entity.addResponseListener(classResponseListenerEntry.getKey(), (EhcacheClientEntity.ResponseListener) classResponseListenerEntry.getValue()); - } - + this.delegate = new CommonServerStoreProxy(messageFactory, entity); } @Override @@ -108,21 +37,17 @@ public String getCacheId() { @Override public void addInvalidationListener(InvalidationListener listener) { - invalidationListeners.add(listener); + delegate.addInvalidationListener(listener); } @Override public boolean removeInvalidationListener(InvalidationListener listener) { - return invalidationListeners.remove(listener); + return delegate.removeInvalidationListener(listener); } - @SuppressWarnings("unchecked") @Override public void close() { - for (Map.Entry, EhcacheClientEntity.ResponseListener> classResponseListenerEntry : - this.responseListeners.entrySet()) { - this.entity.removeResponseListener(classResponseListenerEntry.getKey(), (EhcacheClientEntity.ResponseListener) classResponseListenerEntry.getValue()); - } + delegate.close(); } @Override diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxy.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxy.java deleted file mode 100644 index 61462b625a..0000000000 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxy.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.client.internal.store; - -import org.ehcache.clustered.client.internal.EhcacheClientEntity; -import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; -import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; -import org.ehcache.clustered.common.internal.store.Chain; - -import java.nio.ByteBuffer; -import java.util.concurrent.TimeoutException; - -/** - * Provides client-side access to the services of a {@code ServerStore}. - */ -class NoInvalidationServerStoreProxy implements ServerStoreProxy { - - private final ServerStoreMessageFactory messageFactory; - private final EhcacheClientEntity entity; - - NoInvalidationServerStoreProxy(ServerStoreMessageFactory messageFactory, final EhcacheClientEntity entity) { - this.messageFactory = messageFactory; - this.entity = entity; - } - - @Override - public String getCacheId() { - return messageFactory.getCacheId(); - } - - @Override - public void addInvalidationListener(InvalidationListener listener) { - throw new UnsupportedOperationException(); - } - - @Override - public boolean removeInvalidationListener(InvalidationListener listener) { - throw new UnsupportedOperationException(); - } - - @Override - public void close() { - // No-op - } - - @Override - public Chain get(long key) throws TimeoutException { - EhcacheEntityResponse response; - try { - response = entity.invoke(messageFactory.getOperation(key), false); - } catch (TimeoutException e) { - throw e; - } catch (Exception e) { - throw new ServerStoreProxyException(e); - } - if (response != null && response.getType() == EhcacheEntityResponse.Type.GET_RESPONSE) { - return ((EhcacheEntityResponse.GetResponse)response).getChain(); - } else { - throw new ServerStoreProxyException("Response for get operation was invalid : " + - (response != null ? response.getType().toString() : "null message")); - } - } - - @Override - public void append(long key, ByteBuffer payLoad) throws TimeoutException { - try { - entity.invoke(messageFactory.appendOperation(key, payLoad), true); - } catch (TimeoutException e) { - throw e; - } catch (Exception e) { - throw new ServerStoreProxyException(e); - } - } - - @Override - public Chain getAndAppend(long key, ByteBuffer payLoad) throws TimeoutException { - EhcacheEntityResponse response; - try { - response = entity.invoke(messageFactory.getAndAppendOperation(key, payLoad), true); - } catch (TimeoutException e) { - throw e; - } catch (Exception e) { - throw new ServerStoreProxyException(e); - } - if (response != null && response.getType() == EhcacheEntityResponse.Type.GET_RESPONSE) { - return ((EhcacheEntityResponse.GetResponse)response).getChain(); - } else { - throw new ServerStoreProxyException("Response for getAndAppend operation was invalid : " + - (response != null ? response.getType().toString() : "null message")); - } - } - - @Override - public void replaceAtHead(long key, Chain expect, Chain update) { - // TODO: Optimize this method to just send sequences for expect Chain - try { - entity.invokeAsync(messageFactory.replaceAtHeadOperation(key, expect, update), true); - } catch (Exception e) { - throw new ServerStoreProxyException(e); - } - } - - @Override - public void clear() throws TimeoutException { - try { - entity.invoke(messageFactory.clearOperation(), true); - } catch (TimeoutException e) { - throw e; - } catch (Exception e) { - throw new ServerStoreProxyException(e); - } - } -} diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java index 53810ceb28..f9bd1ff898 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/StrongServerStoreProxy.java @@ -24,12 +24,10 @@ import org.slf4j.LoggerFactory; import java.nio.ByteBuffer; -import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; @@ -40,20 +38,16 @@ public class StrongServerStoreProxy implements ServerStoreProxy { private static final Logger LOGGER = LoggerFactory.getLogger(StrongServerStoreProxy.class); - private final ServerStoreProxy delegate; + private final CommonServerStoreProxy delegate; private final ConcurrentMap hashInvalidationsInProgress = new ConcurrentHashMap(); private final Lock invalidateAllLock = new ReentrantLock(); private volatile CountDownLatch invalidateAllLatch; - private final List invalidationListeners = new CopyOnWriteArrayList(); - private final Map, EhcacheClientEntity.ResponseListener> responseListeners - = new ConcurrentHashMap, EhcacheClientEntity.ResponseListener>(); private final EhcacheClientEntity entity; private final EhcacheClientEntity.ReconnectListener reconnectListener; private final EhcacheClientEntity.DisconnectionListener disconnectionListener; - @SuppressWarnings("unchecked") public StrongServerStoreProxy(final ServerStoreMessageFactory messageFactory, final EhcacheClientEntity entity) { - this.delegate = new NoInvalidationServerStoreProxy(messageFactory, entity); + this.delegate = new CommonServerStoreProxy(messageFactory, entity); this.entity = entity; this.reconnectListener = new EhcacheClientEntity.ReconnectListener() { @Override @@ -67,7 +61,7 @@ public void onHandleReconnect(ReconnectMessage reconnectMessage) { }; entity.addReconnectListener(reconnectListener); - this.responseListeners.put(EhcacheEntityResponse.HashInvalidationDone.class, new EhcacheClientEntity.ResponseListener() { + delegate.addResponseListeners(EhcacheEntityResponse.HashInvalidationDone.class, new EhcacheClientEntity.ResponseListener() { @Override public void onResponse(EhcacheEntityResponse.HashInvalidationDone response) { if (response.getCacheId().equals(messageFactory.getCacheId())) { @@ -82,7 +76,7 @@ public void onResponse(EhcacheEntityResponse.HashInvalidationDone response) { } } }); - this.responseListeners.put(EhcacheEntityResponse.AllInvalidationDone.class, new EhcacheClientEntity.ResponseListener() { + delegate.addResponseListeners(EhcacheEntityResponse.AllInvalidationDone.class, new EhcacheClientEntity.ResponseListener() { @Override public void onResponse(EhcacheEntityResponse.AllInvalidationDone response) { if (response.getCacheId().equals(messageFactory.getCacheId())) { @@ -106,74 +100,6 @@ public void onResponse(EhcacheEntityResponse.AllInvalidationDone response) { } } }); - this.responseListeners.put(EhcacheEntityResponse.ServerInvalidateHash.class, new EhcacheClientEntity.ResponseListener() { - @Override - public void onResponse(EhcacheEntityResponse.ServerInvalidateHash response) { - if (response.getCacheId().equals(messageFactory.getCacheId())) { - long key = response.getKey(); - LOGGER.debug("CLIENT: on cache {}, server requesting hash {} to be invalidated", messageFactory.getCacheId(), key); - for (InvalidationListener listener : invalidationListeners) { - listener.onInvalidateHash(key); - } - } else { - LOGGER.debug("CLIENT: on cache {}, ignoring invalidation on unrelated cache : {}", messageFactory.getCacheId(), response.getCacheId()); - } - } - }); - this.responseListeners.put(EhcacheEntityResponse.ClientInvalidateHash.class, new EhcacheClientEntity.ResponseListener() { - @Override - public void onResponse(EhcacheEntityResponse.ClientInvalidateHash response) { - final String cacheId = response.getCacheId(); - final long key = response.getKey(); - final int invalidationId = response.getInvalidationId(); - - if (cacheId.equals(messageFactory.getCacheId())) { - LOGGER.debug("CLIENT: doing work to invalidate hash {} from cache {} (ID {})", key, cacheId, invalidationId); - for (InvalidationListener listener : invalidationListeners) { - listener.onInvalidateHash(key); - } - - try { - LOGGER.debug("CLIENT: ack'ing invalidation of hash {} from cache {} (ID {})", key, cacheId, invalidationId); - entity.invokeAsync(messageFactory.clientInvalidationAck(invalidationId), false); - } catch (Exception e) { - //TODO: what should be done here? - LOGGER.error("error acking client invalidation of hash {} on cache {}", key, cacheId, e); - } - } else { - LOGGER.debug("CLIENT: on cache {}, ignoring invalidation on unrelated cache : {}", messageFactory.getCacheId(), response.getCacheId()); - } - } - }); - this.responseListeners.put(EhcacheEntityResponse.ClientInvalidateAll.class, new EhcacheClientEntity.ResponseListener() { - @Override - public void onResponse(EhcacheEntityResponse.ClientInvalidateAll response) { - final String cacheId = response.getCacheId(); - final int invalidationId = response.getInvalidationId(); - - if (cacheId.equals(messageFactory.getCacheId())) { - LOGGER.debug("CLIENT: doing work to invalidate all from cache {} (ID {})", cacheId, invalidationId); - for (InvalidationListener listener : invalidationListeners) { - listener.onInvalidateAll(); - } - - try { - LOGGER.debug("CLIENT: ack'ing invalidation of all from cache {} (ID {})", cacheId, invalidationId); - entity.invokeAsync(messageFactory.clientInvalidationAck(invalidationId), false); - } catch (Exception e) { - //TODO: what should be done here? - LOGGER.error("error acking client invalidation of all on cache {}", cacheId, e); - } - } else { - LOGGER.debug("CLIENT: on cache {}, ignoring invalidation on unrelated cache : {}", messageFactory.getCacheId(), response.getCacheId()); - } - } - }); - - for (Map.Entry, EhcacheClientEntity.ResponseListener> classResponseListenerEntry : - this.responseListeners.entrySet()) { - this.entity.addResponseListener(classResponseListenerEntry.getKey(), (EhcacheClientEntity.ResponseListener) classResponseListenerEntry.getValue()); - } this.disconnectionListener = new EhcacheClientEntity.DisconnectionListener() { @Override @@ -290,23 +216,19 @@ public String getCacheId() { @Override public void addInvalidationListener(InvalidationListener listener) { - invalidationListeners.add(listener); + delegate.addInvalidationListener(listener); } @Override public boolean removeInvalidationListener(InvalidationListener listener) { - return invalidationListeners.remove(listener); + return delegate.removeInvalidationListener(listener); } - @SuppressWarnings("unchecked") @Override public void close() { this.entity.removeDisconnectionListener(this.disconnectionListener); this.entity.removeReconnectListener(this.reconnectListener); - for (Map.Entry, EhcacheClientEntity.ResponseListener> classResponseListenerEntry : - this.responseListeners.entrySet()) { - this.entity.removeResponseListener(classResponseListenerEntry.getKey(), (EhcacheClientEntity.ResponseListener) classResponseListenerEntry.getValue()); - } + delegate.close(); } @Override diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java index 07871dca25..5ec50e4569 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/ClusteredStoreTest.java @@ -101,7 +101,7 @@ public void setup() throws Exception { ); clientEntity.createCache(CACHE_IDENTIFIER, serverStoreConfiguration); ServerStoreMessageFactory factory = new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientEntity.getClientId()); - ServerStoreProxy serverStoreProxy = new NoInvalidationServerStoreProxy(factory, clientEntity); + ServerStoreProxy serverStoreProxy = new CommonServerStoreProxy(factory, clientEntity); TestTimeSource testTimeSource = new TestTimeSource(); diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxyTest.java similarity index 96% rename from clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java rename to clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxyTest.java index 4b58bb1f0e..d32c635208 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/NoInvalidationServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxyTest.java @@ -44,13 +44,13 @@ import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; -public class NoInvalidationServerStoreProxyTest { +public class CommonServerStoreProxyTest { private static final String CACHE_IDENTIFIER = "testCache"; private static final URI CLUSTER_URI = URI.create("terracotta://localhost:9510"); private static EhcacheClientEntity clientEntity; - private static NoInvalidationServerStoreProxy serverStoreProxy; + private static CommonServerStoreProxy serverStoreProxy; @BeforeClass public static void setUp() throws Exception { @@ -72,7 +72,7 @@ public static void setUp() throws Exception { clientEntity.createCache(CACHE_IDENTIFIER, new ServerStoreConfiguration(resourcePool.getPoolAllocation(), Long.class.getName(), Long.class.getName(), Long.class.getName(), Long.class.getName(), LongSerializer.class.getName(), LongSerializer.class .getName(), null)); - serverStoreProxy = new NoInvalidationServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientEntity.getClientId()), clientEntity); + serverStoreProxy = new CommonServerStoreProxy(new ServerStoreMessageFactory(CACHE_IDENTIFIER, clientEntity.getClientId()), clientEntity); } @AfterClass diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java index 52b0cfc674..f37fcaf29f 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/store/EventualServerStoreProxyTest.java @@ -19,13 +19,17 @@ import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; import org.ehcache.clustered.client.internal.EhcacheClientEntity; import org.ehcache.clustered.client.internal.EhcacheClientEntityFactory; +import org.ehcache.clustered.client.internal.EhcacheClientEntityService; import org.ehcache.clustered.client.internal.UnitTestConnectionService; import org.ehcache.clustered.client.internal.UnitTestConnectionService.PassthroughServerBuilder; +import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockEntityClientService; import org.ehcache.clustered.common.Consistency; import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; +import org.ehcache.clustered.server.ObservableEhcacheServerEntityService; import org.ehcache.config.units.MemoryUnit; import org.ehcache.impl.serialization.LongSerializer; import org.junit.AfterClass; @@ -37,6 +41,7 @@ import java.util.Collections; import java.util.List; import java.util.Properties; +import java.util.concurrent.CompletableFuture; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -60,11 +65,16 @@ public class EventualServerStoreProxyTest { private static EhcacheClientEntity clientEntity2; private static EventualServerStoreProxy serverStoreProxy1; private static EventualServerStoreProxy serverStoreProxy2; + private static ObservableEhcacheServerEntityService observableEhcacheServerEntityService = new ObservableEhcacheServerEntityService(); @BeforeClass public static void setUp() throws Exception { UnitTestConnectionService.add(CLUSTER_URI, new PassthroughServerBuilder() + .serverEntityService(observableEhcacheServerEntityService) + .clientEntityService(new EhcacheClientEntityService()) + .serverEntityService(new VoltronReadWriteLockServerEntityService()) + .clientEntityService(new VoltronReadWriteLockEntityClientService()) .resource("defaultResource", 128, MemoryUnit.MB) .build()); UnitTestConnectionService unitTestConnectionService = new UnitTestConnectionService(); @@ -85,7 +95,7 @@ public static void setUp() throws Exception { ServerStoreConfiguration serverStoreConfiguration = new ServerStoreConfiguration(resourcePool.getPoolAllocation(), Long.class.getName(), Long.class.getName(), Long.class.getName(), Long.class.getName(), LongSerializer.class.getName(), LongSerializer.class - .getName(), Consistency.STRONG); + .getName(), Consistency.EVENTUAL); clientEntity1.createCache(CACHE_IDENTIFIER, serverStoreConfiguration); // required to attach the store to the client @@ -168,6 +178,8 @@ public void onInvalidateAll() { // test that each time the server evicted, the other client got notified on top of normal invalidations assertThat(store2InvalidatedHashes.size(), is(ITERATIONS + evictionCount)); + assertThatClientsWaitingForInvalidationIsEmpty(); + serverStoreProxy1.removeInvalidationListener(listener1); serverStoreProxy2.removeInvalidationListener(listener2); } @@ -195,6 +207,7 @@ public void onInvalidateAll() { latch.await(5, TimeUnit.SECONDS); assertThat(invalidatedHash.get(), is(1L)); + assertThatClientsWaitingForInvalidationIsEmpty(); serverStoreProxy1.removeInvalidationListener(listener); } @@ -221,6 +234,7 @@ public void onInvalidateAll() { latch.await(5, TimeUnit.SECONDS); assertThat(invalidatedHash.get(), is(1L)); + assertThatClientsWaitingForInvalidationIsEmpty(); serverStoreProxy1.removeInvalidationListener(listener); } @@ -247,7 +261,23 @@ public void onInvalidateAll() { latch.await(5, TimeUnit.SECONDS); assertThat(invalidatedAll.get(), is(true)); + assertThatClientsWaitingForInvalidationIsEmpty(); serverStoreProxy1.removeInvalidationListener(listener); } + private static void assertThatClientsWaitingForInvalidationIsEmpty() throws Exception { + ObservableEhcacheServerEntityService.ObservableEhcacheActiveEntity activeEntity = observableEhcacheServerEntityService.getServedActiveEntities().get(0); + CompletableFuture future = CompletableFuture.supplyAsync(() -> { + while (true) { + try { + if (activeEntity.getClientsWaitingForInvalidation().size() == 0) { + return true; + } + } catch (Exception e) { + } + } + }); + assertThat(future.get(5, TimeUnit.SECONDS), is(true)); + } + } diff --git a/clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java b/clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java index e61b3895df..33a97bd18c 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java @@ -136,5 +136,11 @@ public Set getSharedResourcePoolIds() { public Set getDedicatedResourcePoolIds() { return ehcacheStateService.getDedicatedResourcePoolIds(); } + + public Map getClientsWaitingForInvalidation() throws Exception { + Field field = activeEntity.getClass().getDeclaredField("clientsWaitingForInvalidation"); + field.setAccessible(true); + return (Map)field.get(activeEntity); + } } } From 978cf5993c1766af5a603ea504ca454df9cd729f Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Fri, 11 Nov 2016 19:40:36 -0500 Subject: [PATCH 136/218] :heavy_plus_sign: Close #1575: Support management call to start stat collector Depends on Terracotta-OSS/terracotta-platform#178 --- clustered/client/build.gradle | 2 +- .../AbstractClusteringManagementTest.java | 63 +++++++- .../ClusteredStatisticsCountTest.java | 3 +- .../ClusteredStatisticsRatioTest.java | 2 +- .../ClusteringManagementServiceTest.java | 6 +- clustered/server/build.gradle | 6 +- .../management/AbstractExposedStatistics.java | 4 +- .../AbstractStatisticsManagementProvider.java | 4 +- .../server/management/ClientStateBinding.java | 2 +- ...ClientStateSettingsManagementProvider.java | 2 +- .../server/management/Management.java | 62 +------- .../management/OffHeapResourceBinding.java | 2 +- ...eapResourceSettingsManagementProvider.java | 4 +- .../server/management/PoolBinding.java | 2 +- .../PoolSettingsManagementProvider.java | 4 +- .../PoolStatisticsManagementProvider.java | 2 +- .../server/management/ServerStoreBinding.java | 2 +- ...ServerStoreSettingsManagementProvider.java | 4 +- ...rverStoreStatisticsManagementProvider.java | 2 +- .../StatisticCollectorManagementProvider.java | 147 ++++-------------- .../server/EhcacheActiveEntityTest.java | 4 +- .../server/EhcachePassiveEntityTest.java | 4 +- .../ehcache/management/CollectorService.java | 3 +- .../registry/DefaultCollectorService.java | 127 ++++----------- .../registry/DefaultCollectorServiceTest.java | 31 ---- 25 files changed, 150 insertions(+), 344 deletions(-) diff --git a/clustered/client/build.gradle b/clustered/client/build.gradle index 2dbfaa4a83..9553cc381a 100644 --- a/clustered/client/build.gradle +++ b/clustered/client/build.gradle @@ -23,7 +23,7 @@ dependencies { compileOnly project(':xml') compile project(':clustered:common') provided "org.terracotta:entity-client-api:$parent.entityApiVersion" - provided "org.terracotta.management:management-registry-service-api:$parent.managementVersion" // provided in management-server jar + provided "org.terracotta.management:monitoring-service-api:$parent.managementVersion" // provided in management-server jar testCompile project(':api') testCompile project(':xml') diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java index e59624bc92..5987b3dfcb 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java @@ -36,6 +36,9 @@ import org.terracotta.management.entity.management.client.ManagementAgentService; import org.terracotta.management.entity.monitoring.client.MonitoringServiceEntityFactory; import org.terracotta.management.entity.monitoring.client.MonitoringServiceProxyEntity; +import org.terracotta.management.entity.tms.TmsAgentConfig; +import org.terracotta.management.entity.tms.client.TmsAgentEntity; +import org.terracotta.management.entity.tms.client.TmsAgentEntityFactory; import org.terracotta.management.model.call.ContextualReturn; import org.terracotta.management.model.call.Parameter; import org.terracotta.management.model.cluster.Client; @@ -45,8 +48,6 @@ import org.terracotta.management.model.message.Message; import org.terracotta.management.model.notification.ContextualNotification; import org.terracotta.management.model.stats.ContextualStatistics; -import org.terracotta.management.model.stats.Statistic; -import org.terracotta.management.model.stats.StatisticHistory; import org.terracotta.testing.rules.BasicExternalCluster; import org.terracotta.testing.rules.Cluster; @@ -56,7 +57,6 @@ import java.util.Collection; import java.util.Collections; import java.util.List; -import java.util.Map; import java.util.Properties; import java.util.Scanner; import java.util.concurrent.Exchanger; @@ -194,7 +194,7 @@ public void init() throws Exception { } } - protected static ContextualReturn sendManagementCallToCollectStats(String... statNames) throws Exception { + protected static ContextualReturn sendManagementCallOnClientToCollectStats(String... statNames) throws Exception { Connection managementConnection = CLUSTER.newConnection(); try { ManagementAgentService agent = new ManagementAgentService(new ManagementAgentEntityFactory(managementConnection).retrieveOrCreate(new ManagementAgentConfig())); @@ -217,7 +217,7 @@ protected static ContextualReturn sendManagementCallToCollectStats(String... Context.create("cacheManagerName", "my-super-cache-manager"), "StatisticCollectorCapability", "updateCollectedStatistics", - Collection.class, + Void.TYPE, new Parameter("StatisticsCapability"), new Parameter(asList(statNames), Collection.class.getName()))); @@ -230,6 +230,57 @@ protected static ContextualReturn sendManagementCallToCollectStats(String... } } + protected static void sendManagementCallOnEntityToCollectStats() throws Exception { + Connection managementConnection = CLUSTER.newConnection(); + try { + TmsAgentEntityFactory entityFactory = new TmsAgentEntityFactory(managementConnection, AbstractClusteringManagementTest.class.getName()); + TmsAgentEntity tmsAgentEntity = entityFactory.retrieveOrCreate(new TmsAgentConfig()); + + // get the context from the topology for the ehcache server entity + Context context = tmsAgentEntity.readTopology().get().getSingleStripe().getActiveServerEntity(serverEntityIdentifier).get().getContext(); + + ContextualReturn result = tmsAgentEntity.call( + context, + "StatisticCollectorCapability", + "updateCollectedStatistics", + Void.TYPE, + new Parameter("PoolStatistics"), + new Parameter(asList( + "Pool:AllocatedSize" + ), Collection.class.getName()) + ).get(); + + assertThat(result.hasExecuted(), is(true)); + + result = tmsAgentEntity.call( + context, + "StatisticCollectorCapability", + "updateCollectedStatistics", + Void.TYPE, + new Parameter("ServerStoreStatistics"), + new Parameter(asList( + "Store:AllocatedMemory", + "Store:DataAllocatedMemory", + "Store:OccupiedMemory", + "Store:DataOccupiedMemory", + "Store:Entries", + "Store:UsedSlotCount", + "Store:DataVitalMemory", + "Store:VitalMemory", + "Store:ReprobeLength", + "Store:RemovedSlotCount", + "Store:DataSize", + "Store:TableCapacity" + ), Collection.class.getName()) + ).get(); + + assertThat(result.hasExecuted(), is(true)); + + } finally { + managementConnection.close(); + } + } + protected static List waitForNextStats() { // uses the monitoring consumre entity to get the content of the stat buffer when some stats are collected while (!Thread.currentThread().isInterrupted()) { @@ -238,7 +289,7 @@ protected static List waitForNextStats() { .filter(message -> message.getType().equals("STATISTICS")) .flatMap(message -> message.unwrap(ContextualStatistics.class).stream()) .collect(Collectors.toList()); - if(messages.isEmpty()) { + if (messages.isEmpty()) { Thread.yield(); } else { return messages; diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java index 239008fd6d..8d617067b0 100755 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java @@ -22,7 +22,6 @@ import org.ehcache.Cache; import org.junit.Assert; import org.junit.Test; -import org.terracotta.management.model.call.ContextualReturn; import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.management.model.stats.Sample; import org.terracotta.management.model.stats.history.CounterHistory; @@ -36,7 +35,7 @@ public class ClusteredStatisticsCountTest extends AbstractClusteringManagementTe @Test public void countTest() throws Exception { - sendManagementCallToCollectStats("Cache:HitCount","Clustered:HitCount","Cache:MissCount","Clustered:MissCount"); + sendManagementCallOnClientToCollectStats("Cache:HitCount","Clustered:HitCount","Cache:MissCount","Clustered:MissCount"); Cache cache = cacheManager.getCache("dedicated-cache-1", String.class, String.class); cache.put("one", "val1"); diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsRatioTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsRatioTest.java index df3bc4a2e4..dd05c4207d 100755 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsRatioTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsRatioTest.java @@ -36,7 +36,7 @@ public class ClusteredStatisticsRatioTest extends AbstractClusteringManagementTe @Test public void ratioTest() throws Exception { String[] statNames = {"Cache:HitRatio", "Clustered:HitRatio", "Cache:MissRatio", "Clustered:MissRatio"}; - sendManagementCallToCollectStats(statNames); + sendManagementCallOnClientToCollectStats(statNames); // When testing ratios, we need to wait for the first computation (we do not have any choice) to happen because ratio depends on 2 other sampled statistics. // If you do not wait, then you'll always get some NaN because the hits will be done within the 1st second, and the hits won't be done in the right "window". diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java index b0d09a5299..3260cf6c1d 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java @@ -23,7 +23,6 @@ import org.junit.Ignore; import org.junit.Test; import org.junit.runners.MethodSorters; -import org.terracotta.management.model.call.ContextualReturn; import org.terracotta.management.model.capabilities.Capability; import org.terracotta.management.model.capabilities.descriptors.Descriptor; import org.terracotta.management.model.capabilities.descriptors.Settings; @@ -124,7 +123,7 @@ public void test_D_server_capabilities_exposed() throws Exception { assertThat(capabilities[3].getName(), equalTo("PoolSettings")); assertThat(capabilities[4].getName(), equalTo("ServerStoreStatistics")); assertThat(capabilities[5].getName(), equalTo("PoolStatistics")); - assertThat(capabilities[6].getName(), equalTo("StatisticCollector")); + assertThat(capabilities[6].getName(), equalTo("StatisticCollectorCapability")); assertThat(capabilities[1].getDescriptors(), hasSize(3)); // time + 2 resources assertThat(capabilities[2].getDescriptors(), hasSize(4)); // time descriptor + 3 dedicated store @@ -208,7 +207,8 @@ public void test_F_notifs_on_remove_cache() throws Exception { @Test public void test_G_stats_collection() throws Exception { - sendManagementCallToCollectStats("Cache:HitCount"); + sendManagementCallOnEntityToCollectStats(); + sendManagementCallOnClientToCollectStats("Cache:HitCount"); Cache cache1 = cacheManager.getCache("dedicated-cache-1", String.class, String.class); cache1.put("key1", "val"); diff --git a/clustered/server/build.gradle b/clustered/server/build.gradle index 62efa41fe8..cbd86062ea 100644 --- a/clustered/server/build.gradle +++ b/clustered/server/build.gradle @@ -29,10 +29,8 @@ dependencies { compile group: 'org.terracotta', name: 'offheap-resource', version: parent.offheapResourceVersion compile group: 'org.terracotta', name: 'offheap-store', version: parent.offheapVersion compile group: 'org.slf4j', name: 'slf4j-api', version: parent.slf4jVersion - compile("org.terracotta.management:management-registry-service-api:$parent.managementVersion") { - // provided in management-server jar, but necessary so that ehcache can work without depending on management - exclude group: 'org.terracotta.management', module: 'management-registry' - exclude group: 'org.terracotta.management', module: 'management-model' + compile("org.terracotta.management:monitoring-service-api:$parent.managementVersion") { + transitive = false } compile ("org.terracotta:statistics:$parent.statisticVersion") { exclude group:'org.slf4j', module:'slf4j-api' diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractExposedStatistics.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractExposedStatistics.java index 76aff78ef3..8ab4bf4488 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractExposedStatistics.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractExposedStatistics.java @@ -30,8 +30,8 @@ import org.terracotta.management.model.stats.history.RateHistory; import org.terracotta.management.model.stats.history.RatioHistory; import org.terracotta.management.model.stats.history.SizeHistory; -import org.terracotta.management.service.registry.provider.AliasBinding; -import org.terracotta.management.service.registry.provider.AliasBindingManagementProvider; +import org.terracotta.management.service.monitoring.registry.provider.AliasBinding; +import org.terracotta.management.service.monitoring.registry.provider.AliasBindingManagementProvider; import org.terracotta.offheapstore.util.FindbugsSuppressWarnings; import org.terracotta.statistics.extended.SampleType; import org.terracotta.statistics.extended.SampledStatistic; diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractStatisticsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractStatisticsManagementProvider.java index 4242a6e355..bdfeacbb9b 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractStatisticsManagementProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractStatisticsManagementProvider.java @@ -22,8 +22,8 @@ import org.terracotta.management.registry.action.ExposedObject; import org.terracotta.management.registry.action.Named; import org.terracotta.management.registry.action.RequiredContext; -import org.terracotta.management.service.registry.provider.AliasBinding; -import org.terracotta.management.service.registry.provider.AliasBindingManagementProvider; +import org.terracotta.management.service.monitoring.registry.provider.AliasBinding; +import org.terracotta.management.service.monitoring.registry.provider.AliasBindingManagementProvider; import java.util.Collection; import java.util.HashMap; diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateBinding.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateBinding.java index 32e38ae518..bbb9f6a0a3 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateBinding.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateBinding.java @@ -17,7 +17,7 @@ import org.ehcache.clustered.server.ClientState; import org.terracotta.entity.ClientDescriptor; -import org.terracotta.management.service.registry.provider.ClientBinding; +import org.terracotta.management.service.monitoring.registry.provider.ClientBinding; final class ClientStateBinding extends ClientBinding { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateSettingsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateSettingsManagementProvider.java index 460d4ef7e1..9ad74992c2 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateSettingsManagementProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateSettingsManagementProvider.java @@ -22,7 +22,7 @@ import org.terracotta.management.model.context.Context; import org.terracotta.management.registry.action.Named; import org.terracotta.management.registry.action.RequiredContext; -import org.terracotta.management.service.registry.provider.ClientBindingManagementProvider; +import org.terracotta.management.service.monitoring.registry.provider.ClientBindingManagementProvider; import java.util.Collection; import java.util.Collections; diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java index 9929a80377..aad30b7fda 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java @@ -23,23 +23,18 @@ import org.slf4j.LoggerFactory; import org.terracotta.entity.ClientDescriptor; import org.terracotta.entity.ServiceRegistry; -import org.terracotta.management.model.call.Parameter; import org.terracotta.management.model.context.Context; -import org.terracotta.management.service.registry.ConsumerManagementRegistry; -import org.terracotta.management.service.registry.ConsumerManagementRegistryConfiguration; -import org.terracotta.management.service.registry.provider.ClientBinding; +import org.terracotta.management.service.monitoring.ConsumerManagementRegistry; +import org.terracotta.management.service.monitoring.ConsumerManagementRegistryConfiguration; +import org.terracotta.management.service.monitoring.registry.provider.ClientBinding; import org.terracotta.offheapresource.OffHeapResource; import org.terracotta.offheapresource.OffHeapResourceIdentifier; -import java.util.Collection; import java.util.Set; -import java.util.concurrent.ExecutionException; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.atomic.AtomicLong; -import static java.util.Arrays.asList; - public class Management { private static final Logger LOGGER = LoggerFactory.getLogger(Management.class); @@ -112,58 +107,11 @@ public void init() { managementRegistry.addManagementProvider(collectorManagementProvider); - // start collecting stats - collectorManagementProvider.start(); + // start the stat collector (it won't collect any stats though, because they need to be configured through a management call) + collectorManagementProvider.init(); // expose the management registry inside voltorn managementRegistry.refresh(); - - //TODO FIXME: following code should be triggered by a remote management call (https://github.com/Terracotta-OSS/terracotta-apis/issues/168) - try { - LOGGER.trace("init() - activating statistics"); - - Context entityContext = Context.create(managementRegistry.getContextContainer().getName(), managementRegistry.getContextContainer().getValue()); - - managementRegistry - .withCapability("StatisticCollector") - .call("updateCollectedStatistics", - new Parameter("PoolStatistics"), - new Parameter(asList( - "Pool:AllocatedSize" - ), Collection.class.getName())) - .on(entityContext) - .build() - .execute() - .getSingleResult() - .getValue(); - - managementRegistry - .withCapability("StatisticCollector") - .call("updateCollectedStatistics", - new Parameter("ServerStoreStatistics"), - new Parameter(asList( - "Store:AllocatedMemory", - "Store:DataAllocatedMemory", - "Store:OccupiedMemory", - "Store:DataOccupiedMemory", - "Store:Entries", - "Store:UsedSlotCount", - "Store:DataVitalMemory", - "Store:VitalMemory", - "Store:ReprobeLength", - "Store:RemovedSlotCount", - "Store:DataSize", - "Store:TableCapacity" - ), Collection.class.getName())) - .on(entityContext) - .build() - .execute() - .getSingleResult() - .getValue(); - - } catch (ExecutionException e) { - throw new RuntimeException(e.getCause()); - } } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceBinding.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceBinding.java index 2a60925c45..7db37d37de 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceBinding.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceBinding.java @@ -15,7 +15,7 @@ */ package org.ehcache.clustered.server.management; -import org.terracotta.management.service.registry.provider.AliasBinding; +import org.terracotta.management.service.monitoring.registry.provider.AliasBinding; import org.terracotta.offheapresource.OffHeapResource; class OffHeapResourceBinding extends AliasBinding { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceSettingsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceSettingsManagementProvider.java index ba1223309a..cd25fd2d49 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceSettingsManagementProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceSettingsManagementProvider.java @@ -20,7 +20,7 @@ import org.terracotta.management.model.context.Context; import org.terracotta.management.registry.action.Named; import org.terracotta.management.registry.action.RequiredContext; -import org.terracotta.management.service.registry.provider.AliasBindingManagementProvider; +import org.terracotta.management.service.monitoring.registry.provider.AliasBindingManagementProvider; import java.util.Collection; import java.util.Collections; @@ -44,7 +44,7 @@ public Collection getDescriptors() { @Override protected ExposedOffHeapResourceBinding wrap(OffHeapResourceBinding managedObject) { - return new ExposedOffHeapResourceBinding(managedObject, getConsumerId()); + return new ExposedOffHeapResourceBinding(managedObject, getMonitoringService().getConsumerId()); } private static class ExposedOffHeapResourceBinding extends ExposedAliasBinding { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolBinding.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolBinding.java index 14818e10c5..511bdadb16 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolBinding.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolBinding.java @@ -17,7 +17,7 @@ import org.ehcache.clustered.common.ServerSideConfiguration; import org.terracotta.management.model.Objects; -import org.terracotta.management.service.registry.provider.AliasBinding; +import org.terracotta.management.service.monitoring.registry.provider.AliasBinding; class PoolBinding extends AliasBinding { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java index db1bec6a0b..f5ca9d8bba 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java @@ -22,7 +22,7 @@ import org.terracotta.management.registry.action.ExposedObject; import org.terracotta.management.registry.action.Named; import org.terracotta.management.registry.action.RequiredContext; -import org.terracotta.management.service.registry.provider.AliasBindingManagementProvider; +import org.terracotta.management.service.monitoring.registry.provider.AliasBindingManagementProvider; import java.util.Collection; import java.util.Collections; @@ -55,7 +55,7 @@ public Collection> getExposedObjects() { @Override protected ExposedPoolBinding wrap(PoolBinding managedObject) { - return new ExposedPoolBinding(managedObject, getConsumerId()); + return new ExposedPoolBinding(managedObject, getMonitoringService().getConsumerId()); } private static class ExposedPoolBinding extends ExposedAliasBinding { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java index df1b1aa894..9b54fd5b2a 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java @@ -62,7 +62,7 @@ protected AbstractExposedStatistics internalWrap(PoolBinding manage Objects.requireNonNull(resourcePageSource, "Unable to locale pool " + poolName); } - return new PoolExposedStatistics(getConsumerId(), managedObject, getStatisticConfiguration(), executor, resourcePageSource); + return new PoolExposedStatistics(getMonitoringService().getConsumerId(), managedObject, getStatisticConfiguration(), executor, resourcePageSource); } private static class PoolExposedStatistics extends AbstractExposedStatistics { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreBinding.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreBinding.java index 2d2cdc185c..2734cf60ec 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreBinding.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreBinding.java @@ -16,7 +16,7 @@ package org.ehcache.clustered.server.management; import org.ehcache.clustered.server.ServerStoreImpl; -import org.terracotta.management.service.registry.provider.AliasBinding; +import org.terracotta.management.service.monitoring.registry.provider.AliasBinding; class ServerStoreBinding extends AliasBinding { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java index 027ce2d5c0..edd9e9b3d1 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java @@ -21,7 +21,7 @@ import org.terracotta.management.model.context.Context; import org.terracotta.management.registry.action.Named; import org.terracotta.management.registry.action.RequiredContext; -import org.terracotta.management.service.registry.provider.AliasBindingManagementProvider; +import org.terracotta.management.service.monitoring.registry.provider.AliasBindingManagementProvider; import java.util.Collection; import java.util.Collections; @@ -45,7 +45,7 @@ public Collection getDescriptors() { @Override protected ExposedServerStoreBinding wrap(ServerStoreBinding managedObject) { - return new ExposedServerStoreBinding(managedObject, getConsumerId()); + return new ExposedServerStoreBinding(managedObject, getMonitoringService().getConsumerId()); } private static class ExposedServerStoreBinding extends ExposedAliasBinding { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java index fdfe831aea..54d8d95184 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java @@ -39,7 +39,7 @@ class ServerStoreStatisticsManagementProvider extends AbstractStatisticsManageme @Override protected AbstractExposedStatistics internalWrap(ServerStoreBinding managedObject) { - return new ServerStoreExposedStatistics(getConsumerId(), managedObject, getStatisticConfiguration(), executor); + return new ServerStoreExposedStatistics(getMonitoringService().getConsumerId(), managedObject, getStatisticConfiguration(), executor); } private static class ServerStoreExposedStatistics extends AbstractExposedStatistics { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/StatisticCollectorManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/StatisticCollectorManagementProvider.java index 407fa040de..64a7ad37ff 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/StatisticCollectorManagementProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/StatisticCollectorManagementProvider.java @@ -15,67 +15,49 @@ */ package org.ehcache.clustered.server.management; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.terracotta.management.model.context.Context; import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.management.registry.ManagementRegistry; -import org.terracotta.management.registry.StatisticQuery; import org.terracotta.management.registry.action.ExposedObject; import org.terracotta.management.registry.action.Named; import org.terracotta.management.registry.action.RequiredContext; +import org.terracotta.management.registry.collect.DefaultStatisticCollector; import org.terracotta.management.registry.collect.StatisticCollector; import org.terracotta.management.registry.collect.StatisticCollectorProvider; -import org.terracotta.management.service.registry.MonitoringResolver; -import org.terracotta.management.service.registry.provider.ConsumerManagementProvider; +import org.terracotta.management.service.monitoring.MonitoringService; +import org.terracotta.management.service.monitoring.registry.provider.MonitoringServiceAware; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicLong; -import java.util.stream.Collectors; -@Named("StatisticCollector") +@Named("StatisticCollectorCapability") @RequiredContext({@Named("consumerId")}) -class StatisticCollectorManagementProvider extends StatisticCollectorProvider implements ConsumerManagementProvider { +class StatisticCollectorManagementProvider extends StatisticCollectorProvider implements MonitoringServiceAware { - private static final Logger LOGGER = LoggerFactory.getLogger(StatisticCollectorManagementProvider.class); - - private final ManagementRegistry managementRegistry; - private final StatisticConfiguration statisticConfiguration; - private final ScheduledExecutorService scheduledExecutorService; - private final String[] statsCapabilitynames; - private final ConcurrentMap selectedStatsPerCapability = new ConcurrentHashMap<>(); - - private volatile MonitoringResolver resolver; + private volatile MonitoringService monitoringService; + private final DefaultStatisticCollector statisticCollector; StatisticCollectorManagementProvider(ManagementRegistry managementRegistry, StatisticConfiguration statisticConfiguration, ScheduledExecutorService scheduledExecutorService, String[] statsCapabilitynames) { - super(StatisticCollector.class, null); - this.managementRegistry = managementRegistry; - this.statisticConfiguration = statisticConfiguration; - this.scheduledExecutorService = scheduledExecutorService; - this.statsCapabilitynames = statsCapabilitynames; - } - - @Override - public void accept(MonitoringResolver resolver) { - this.resolver = resolver; + super(StatisticCollector.class, Context.create(managementRegistry.getContextContainer().getName(), managementRegistry.getContextContainer().getValue())); + + long timeToDisableMs = TimeUnit.MILLISECONDS.convert(statisticConfiguration.timeToDisable(), statisticConfiguration.timeToDisableUnit()); + long pollingIntervalMs = Math.round(timeToDisableMs * 0.75); // we poll at 75% of the time to disable (before the time to disable happens) + + statisticCollector = new DefaultStatisticCollector( + managementRegistry, + scheduledExecutorService, + statistics -> monitoringService.pushServerEntityStatistics(statistics.toArray(new ContextualStatistics[statistics.size()])), + // TODO FIXME: there is no timesource service in voltron: https://github.com/Terracotta-OSS/terracotta-apis/issues/167 + System::currentTimeMillis, + pollingIntervalMs, + TimeUnit.MILLISECONDS, + statsCapabilitynames + ); } @Override - public boolean pushServerEntityNotification(StatisticCollector managedObjectSource, String type, Map attrs) { - return false; - } - - @Override - protected ExposedObject wrap(StatisticCollector managedObject) { - return new StatisticCollectorProvider.ExposedStatisticCollector<>(managedObject, Context.create("consumerId", String.valueOf(resolver.getConsumerId()))); + public void setMonitoringService(MonitoringService monitoringService) { + this.monitoringService = monitoringService; } @Override @@ -83,84 +65,9 @@ protected void dispose(ExposedObject exposedObject) { exposedObject.getTarget().stopStatisticCollector(); } - void start() { - StatisticCollector managedObject = new StatisticCollector() { - - private volatile ScheduledFuture task; - - @Override - public void startStatisticCollector() { - if (task == null) { - LOGGER.trace("startStatisticCollector()"); - - long timeToDisableMs = TimeUnit.MILLISECONDS.convert(statisticConfiguration.timeToDisable(), statisticConfiguration.timeToDisableUnit()); - long pollingIntervalMs = Math.round(timeToDisableMs * 0.75); // we poll at 75% of the time to disable (before the time to disable happens) - final AtomicLong lastPoll = new AtomicLong(getTimeMs()); - - task = scheduledExecutorService.scheduleWithFixedDelay(() -> { - try { - if (task != null && !selectedStatsPerCapability.isEmpty()) { - Collection statistics = new ArrayList<>(); - long since = lastPoll.get(); - - selectedStatsPerCapability.entrySet() - .stream() - .filter(entry -> Arrays.binarySearch(statsCapabilitynames, entry.getKey()) >= 0) - .forEach(entry -> { - AbstractStatisticsManagementProvider provider = (AbstractStatisticsManagementProvider) managementRegistry.getManagementProvidersByCapability(entry.getKey()) - .iterator().next(); - // note: .iterator().next() because the management registry is not shared, so there cannot be more than 1 capability with the same name. - Collection allContexts = provider.getExposedObjects().stream().map(ExposedObject::getContext).collect(Collectors.toList()); - for (ContextualStatistics contextualStatistics : entry.getValue().since(since).on(allContexts).build().execute()) { - statistics.add(contextualStatistics); - } - }); - - // next time, only poll history from this time - lastPoll.set(getTimeMs()); - - if (task != null && !statistics.isEmpty() && resolver != null) { - resolver.pushServerEntityStatistics(statistics.toArray(new ContextualStatistics[statistics.size()])); - } - } - } catch (RuntimeException e) { - LOGGER.error("StatisticCollector: " + e.getMessage(), e); - } - }, pollingIntervalMs, pollingIntervalMs, TimeUnit.MILLISECONDS); - } - } - - @Override - public void stopStatisticCollector() { - if (task != null) { - LOGGER.trace("stopStatisticCollector()"); - ScheduledFuture _task = task; - task = null; - _task.cancel(false); - } - } - - @Override - public void updateCollectedStatistics(String capabilityName, Collection statisticNames) { - if (!statisticNames.isEmpty()) { - LOGGER.trace("updateCollectedStatistics({}, {})", capabilityName, statisticNames); - StatisticQuery.Builder builder = managementRegistry.withCapability(capabilityName).queryStatistics(statisticNames); - selectedStatsPerCapability.put(capabilityName, builder); - } else { - // we clear the stats set - selectedStatsPerCapability.remove(capabilityName); - } - } - }; - - register(managedObject); - - managedObject.startStatisticCollector(); - } - - private long getTimeMs() { - // TODO FIXME: there is no timesource service in voltron: https://github.com/Terracotta-OSS/terracotta-apis/issues/167 - return System.currentTimeMillis(); + void init() { + register(statisticCollector); + statisticCollector.startStatisticCollector(); } } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index a470011102..007ed2ce94 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -59,8 +59,8 @@ import org.terracotta.entity.PassiveSynchronizationChannel; import org.terracotta.entity.ServiceConfiguration; import org.terracotta.entity.ServiceRegistry; -import org.terracotta.management.service.registry.ConsumerManagementRegistry; -import org.terracotta.management.service.registry.ConsumerManagementRegistryConfiguration; +import org.terracotta.management.service.monitoring.ConsumerManagementRegistry; +import org.terracotta.management.service.monitoring.ConsumerManagementRegistryConfiguration; import org.terracotta.offheapresource.OffHeapResource; import org.terracotta.offheapresource.OffHeapResourceIdentifier; import org.terracotta.offheapresource.OffHeapResources; diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java index 4d52527364..041af8fb6c 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java @@ -33,8 +33,8 @@ import org.terracotta.entity.IEntityMessenger; import org.terracotta.entity.ServiceConfiguration; import org.terracotta.entity.ServiceRegistry; -import org.terracotta.management.service.registry.ConsumerManagementRegistry; -import org.terracotta.management.service.registry.ConsumerManagementRegistryConfiguration; +import org.terracotta.management.service.monitoring.ConsumerManagementRegistry; +import org.terracotta.management.service.monitoring.ConsumerManagementRegistryConfiguration; import org.terracotta.offheapresource.OffHeapResource; import org.terracotta.offheapresource.OffHeapResourceIdentifier; import org.terracotta.offheapresource.OffHeapResources; diff --git a/management/src/main/java/org/ehcache/management/CollectorService.java b/management/src/main/java/org/ehcache/management/CollectorService.java index 9acf6f126b..69516b5f2d 100644 --- a/management/src/main/java/org/ehcache/management/CollectorService.java +++ b/management/src/main/java/org/ehcache/management/CollectorService.java @@ -19,7 +19,6 @@ import org.ehcache.spi.service.Service; import org.terracotta.management.model.notification.ContextualNotification; import org.terracotta.management.model.stats.ContextualStatistics; -import org.terracotta.management.registry.collect.StatisticCollector; import java.util.Collection; @@ -28,7 +27,7 @@ *

* The collecting time is automatically calculated from {@link StatisticsProviderConfiguration#timeToDisable()} */ -public interface CollectorService extends StatisticCollector, Service { +public interface CollectorService extends Service { interface Collector { diff --git a/management/src/main/java/org/ehcache/management/registry/DefaultCollectorService.java b/management/src/main/java/org/ehcache/management/registry/DefaultCollectorService.java index deca1b2e32..37a3c82e2d 100644 --- a/management/src/main/java/org/ehcache/management/registry/DefaultCollectorService.java +++ b/management/src/main/java/org/ehcache/management/registry/DefaultCollectorService.java @@ -31,24 +31,14 @@ import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.spi.service.ServiceProvider; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.terracotta.management.model.context.Context; import org.terracotta.management.model.notification.ContextualNotification; import org.terracotta.management.model.stats.ContextualStatistics; -import org.terracotta.management.registry.StatisticQuery; +import org.terracotta.management.registry.collect.DefaultStatisticCollector; +import org.terracotta.management.registry.collect.StatisticCollector; -import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicLong; import static org.ehcache.impl.internal.executor.ExecutorUtil.shutdownNow; @@ -63,11 +53,6 @@ private enum EhcacheNotification { CACHE_MANAGER_CLOSED, } - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultCollectorService.class); - - private volatile ScheduledFuture task; - - private final ConcurrentMap selectedStatsPerCapability = new ConcurrentHashMap(); private final Collector collector; private volatile TimeSource timeSource; @@ -76,6 +61,8 @@ private enum EhcacheNotification { private volatile InternalCacheManager cacheManager; private volatile ManagementRegistryServiceConfiguration configuration; + private volatile DefaultStatisticCollector statisticCollector; + public DefaultCollectorService() { this(Collector.EMPTY); } @@ -92,6 +79,30 @@ public synchronized void start(ServiceProvider serviceProvider) { cacheManager = serviceProvider.getService(CacheManagerProviderService.class).getCacheManager(); scheduledExecutorService = serviceProvider.getService(ExecutionService.class).getScheduledExecutor(configuration.getCollectorExecutorAlias()); + StatisticsProviderConfiguration providerConfiguration = configuration.getConfigurationFor(EhcacheStatisticsProvider.class); + long timeToDisableMs = TimeUnit.MILLISECONDS.convert(providerConfiguration.timeToDisable(), providerConfiguration.timeToDisableUnit()); + long pollingIntervalMs = Math.round(timeToDisableMs * 0.75); // we poll at 75% of the time to disable (before the time to disable happens) + + statisticCollector = new DefaultStatisticCollector( + managementRegistry, + scheduledExecutorService, + new StatisticCollector.Collector() { + @Override + public void onStatistics(Collection statistics) { + collector.onStatistics(statistics); + } + }, + new StatisticCollector.TimeProvider() { + @Override + public long getTimeMillis() { + return timeSource.getTimeMillis(); + } + }, + pollingIntervalMs, + TimeUnit.MILLISECONDS, + new String[]{"StatisticsCapability"} // the only stats capability available at the moment + ); + cacheManager.registerListener(this); } @@ -101,7 +112,7 @@ public synchronized void stop() { // so deregisterListener is done in the stateTransition listener //cacheManager.deregisterListener(this); - stopStatisticCollector(); + statisticCollector.stopStatisticCollector(); shutdownNow(scheduledExecutorService); } @@ -128,7 +139,7 @@ public void stateTransition(Status from, Status to) { case AVAILABLE: // .register() call should be there when CM is AVAILABLE // this is to expose the stats collector for management calls - managementRegistry.register(this); + managementRegistry.register(statisticCollector); collector.onNotification( new ContextualNotification( @@ -136,7 +147,7 @@ public void stateTransition(Status from, Status to) { EhcacheNotification.CACHE_MANAGER_AVAILABLE.name())); // auto-start stat collection - startStatisticCollector(); + statisticCollector.startStatisticCollector(); break; case MAINTENANCE: @@ -161,80 +172,4 @@ public void stateTransition(Status from, Status to) { } } - @Override - public synchronized void startStatisticCollector() { - if (task == null) { - StatisticsProviderConfiguration providerConfiguration = configuration.getConfigurationFor(EhcacheStatisticsProvider.class); - - long timeToDisableMs = TimeUnit.MILLISECONDS.convert(providerConfiguration.timeToDisable(), providerConfiguration.timeToDisableUnit()); - long pollingIntervalMs = Math.round(timeToDisableMs * 0.75); // we poll at 75% of the time to disable (before the time to disable happens) - final AtomicLong lastPoll = new AtomicLong(timeSource.getTimeMillis()); - - task = scheduledExecutorService.scheduleWithFixedDelay(new Runnable() { - @Override - public void run() { - try { - if (task != null && !selectedStatsPerCapability.isEmpty()) { - - // create the full context list from current caches - Collection cacheContexts = new ArrayList(); - for (String cacheAlias : new HashSet(cacheManager.getRuntimeConfiguration().getCacheConfigurations().keySet())) { - cacheContexts.add(configuration.getContext().with("cacheName", cacheAlias)); - } - - Collection statistics = new ArrayList(); - - // for each capability, call the management registry - long since = lastPoll.get(); - for (Map.Entry entry : selectedStatsPerCapability.entrySet()) { - for (ContextualStatistics contextualStatistics : entry.getValue().since(since).on(cacheContexts).build().execute()) { - statistics.add(contextualStatistics); - } - } - - // next time, only poll history from this time - lastPoll.set(timeSource.getTimeMillis()); - - if (task != null && !statistics.isEmpty()) { - collector.onStatistics(statistics); - } - } - } catch (RuntimeException e) { - LOGGER.error("StatisticCollector: " + e.getMessage(), e); - } - } - }, pollingIntervalMs, pollingIntervalMs, TimeUnit.MILLISECONDS); - } - } - - @Override - public synchronized void stopStatisticCollector() { - if (task != null) { - ScheduledFuture _task = task; - task = null; - _task.cancel(false); - } - } - - @Override - public void updateCollectedStatistics(String capabilityName, Collection statisticNames) { - if(!statisticNames.isEmpty()) { - StatisticQuery.Builder builder = managementRegistry.withCapability(capabilityName).queryStatistics(statisticNames); - selectedStatsPerCapability.put(capabilityName, builder); - } else { - // we clear the stats set - selectedStatsPerCapability.remove(capabilityName); - } - } - - // for test purposes - Map getSelectedStatsPerCapability() { - return Collections.unmodifiableMap(selectedStatsPerCapability); - } - - // for test purposes - void setManagementRegistry(ManagementRegistryService managementRegistry) { - this.managementRegistry = managementRegistry; - } - } diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultCollectorServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultCollectorServiceTest.java index 8b680f03c3..fb6223ed73 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultCollectorServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultCollectorServiceTest.java @@ -31,8 +31,6 @@ import org.terracotta.management.model.context.Context; import org.terracotta.management.model.notification.ContextualNotification; import org.terracotta.management.model.stats.ContextualStatistics; -import org.terracotta.management.registry.CapabilityManagement; -import org.terracotta.management.registry.StatisticQuery; import java.util.ArrayList; import java.util.Arrays; @@ -47,38 +45,9 @@ import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.hamcrest.CoreMatchers.equalTo; import static org.junit.Assert.assertThat; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class DefaultCollectorServiceTest { - - @Test - public void updateCollectedStatisticsTest__should_not_add_stats_when_selection_empty() throws Exception { - DefaultCollectorService defaultCollectorService = new DefaultCollectorService(); - defaultCollectorService.updateCollectedStatistics("PifCapability", new ArrayList()); - assertThat(defaultCollectorService.getSelectedStatsPerCapability().size(), equalTo(0)); - } - - @Test - public void updateCollectedStatisticsTest__add_stats_and_then_clear_them() throws Exception { - DefaultCollectorService defaultCollectorService = new DefaultCollectorService(); - ManagementRegistryService managementRegistryService = mock(ManagementRegistryService.class); - CapabilityManagement capability = mock(CapabilityManagement.class); - StatisticQuery.Builder builder = mock(StatisticQuery.Builder.class); - when(capability.queryStatistics(new ArrayList(){{add("SuperStat");}})).thenReturn(builder); - when(managementRegistryService.withCapability("PifCapability")).thenReturn(capability); - defaultCollectorService.setManagementRegistry(managementRegistryService); - defaultCollectorService.updateCollectedStatistics("PifCapability", new ArrayList(){{add("SuperStat");}}); - assertThat(defaultCollectorService.getSelectedStatsPerCapability().size(), equalTo(1)); - - - defaultCollectorService.updateCollectedStatistics("PifCapability", new ArrayList()); - assertThat(defaultCollectorService.getSelectedStatsPerCapability().size(), equalTo(0)); - - } - - @Test(timeout = 6000) public void test_collector() throws Exception { final Queue messages = new ConcurrentLinkedQueue(); From b1ed573efd390481416779b0684d7db58694c718 Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Fri, 11 Nov 2016 20:19:46 -0500 Subject: [PATCH 137/218] :arrow_up: Version upgrade --- build.gradle | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/build.gradle b/build.gradle index 3daa02581a..fac315fecc 100644 --- a/build.gradle +++ b/build.gradle @@ -28,15 +28,15 @@ ext { sizeofVersion = '0.3.0' // Clustered - terracottaPlatformVersion = '5.0.9.beta' + terracottaPlatformVersion = '5.0.11.beta2' managementVersion = terracottaPlatformVersion - terracottaApisVersion = '1.0.9.beta' - terracottaCoreVersion = '5.0.9-beta2' + terracottaApisVersion = '1.0.11.beta' + terracottaCoreVersion = '5.0.11-beta' offheapResourceVersion = terracottaPlatformVersion entityApiVersion = terracottaApisVersion - terracottaPassthroughTestingVersion = '1.0.9.beta2' + terracottaPassthroughTestingVersion = '1.0.11.beta' entityTestLibVersion = terracottaPassthroughTestingVersion - galvanVersion = '1.0.9-beta2' + galvanVersion = '1.0.11-beta' // Tools findbugsVersion = '3.0.1' From 86831450dc52ceaefc210110f91a51db9bab0ed3 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Thu, 17 Nov 2016 13:55:03 +0100 Subject: [PATCH 138/218] :bug: Test requires deterministic eviction --- .../management/providers/statistics/HitCountTest.java | 10 +++++++++- .../management/providers/statistics/HitRatioTest.java | 10 +++++++++- .../management/providers/statistics/MissRatioTest.java | 10 +++++++++- 3 files changed, 27 insertions(+), 3 deletions(-) diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java index 5d64e07b36..1fe2130a58 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java @@ -30,6 +30,7 @@ import org.ehcache.CacheManager; import org.ehcache.config.Builder; import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; @@ -100,7 +101,14 @@ public void test() throws InterruptedException, IOException { registryConfiguration.addConfiguration(new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES)); ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources).build(); + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources) + .withEvictionAdvisor(new EvictionAdvisor() { + @Override + public boolean adviseAgainstEviction(Long key, String value) { + return key.equals(2L); + } + }) + .build(); cacheManager = CacheManagerBuilder.newCacheManagerBuilder() .withCache("myCache", cacheConfiguration) diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java index 197af8f522..19fd56aad9 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java @@ -19,6 +19,7 @@ import org.ehcache.CacheManager; import org.ehcache.config.Builder; import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; @@ -140,7 +141,14 @@ public void test() throws InterruptedException, IOException { registryConfiguration.addConfiguration(new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.SECONDS,10,TimeUnit.MINUTES)); final ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources).build(); + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources) + .withEvictionAdvisor(new EvictionAdvisor() { + @Override + public boolean adviseAgainstEviction(Long key, String value) { + return key.equals(2L); + } + }) + .build(); cacheManager = CacheManagerBuilder.newCacheManagerBuilder() .withCache("myCache", cacheConfiguration) diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java index 8fc63ef3cd..8ef3259dd7 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java @@ -30,6 +30,7 @@ import org.ehcache.CacheManager; import org.ehcache.config.Builder; import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.EvictionAdvisor; import org.ehcache.config.ResourcePools; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; @@ -125,7 +126,14 @@ public void test() throws InterruptedException, IOException { registryConfiguration.addConfiguration(new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.SECONDS,10,TimeUnit.MINUTES)); ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); - CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources).build(); + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources) + .withEvictionAdvisor(new EvictionAdvisor() { + @Override + public boolean adviseAgainstEviction(Long key, String value) { + return key.equals(2L); + } + }) + .build(); cacheManager = CacheManagerBuilder.newCacheManagerBuilder() .withCache("myCache", cacheConfiguration) From bb7f2392ca783dd65cb421722d74c4d0452d4250 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Thu, 17 Nov 2016 13:43:01 +0100 Subject: [PATCH 139/218] :shirt: Clean up warnings and dead code --- .../providers/statistics/HitCountTest.java | 12 ++-- .../providers/statistics/HitRatioTest.java | 61 ++++++++----------- .../providers/statistics/MissCountTest.java | 11 ++-- .../providers/statistics/MissRatioTest.java | 52 +++++++--------- .../providers/statistics/StatsUtil.java | 11 ++-- 5 files changed, 64 insertions(+), 83 deletions(-) diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java index 1fe2130a58..eff9a7d146 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java @@ -16,6 +16,7 @@ package org.ehcache.management.providers.statistics; import static java.util.Arrays.asList; +import static java.util.Collections.singletonList; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.ehcache.config.units.MemoryUnit.MB; import static org.ehcache.config.units.EntryUnit.ENTRIES; @@ -68,14 +69,13 @@ public class HitCountTest { public static Collection data() { return asList(new Object[][] { //1 tier - { newResourcePoolsBuilder().heap(1, MB), Arrays.asList("OnHeap:HitCount"), Arrays.asList(CACHE_HIT_TOTAL), CACHE_HIT_TOTAL }, - { newResourcePoolsBuilder().offheap(1, MB), Arrays.asList("OffHeap:HitCount"), Arrays.asList(CACHE_HIT_TOTAL), CACHE_HIT_TOTAL }, - { newResourcePoolsBuilder().disk(1, MB), Arrays.asList("Disk:HitCount"), Arrays.asList(CACHE_HIT_TOTAL), CACHE_HIT_TOTAL }, + { newResourcePoolsBuilder().heap(1, MB), singletonList("OnHeap:HitCount"), singletonList(CACHE_HIT_TOTAL), CACHE_HIT_TOTAL }, + { newResourcePoolsBuilder().offheap(1, MB), singletonList("OffHeap:HitCount"), singletonList(CACHE_HIT_TOTAL), CACHE_HIT_TOTAL }, + { newResourcePoolsBuilder().disk(1, MB), singletonList("Disk:HitCount"), singletonList(CACHE_HIT_TOTAL), CACHE_HIT_TOTAL }, //2 tiers { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), Arrays.asList("OnHeap:HitCount","OffHeap:HitCount"), Arrays.asList(2L,2L), CACHE_HIT_TOTAL}, { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), Arrays.asList("OnHeap:HitCount","Disk:HitCount"), Arrays.asList(2L,2L), CACHE_HIT_TOTAL}, - //offheap and disk configuration below is not valid. Throws IllegalStateException no Store.Provider found to handle configured resource types [offheap,disk] //3 tiers { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB).disk(3, MB), Arrays.asList("OnHeap:HitCount","OffHeap:HitCount","Disk:HitCount"), Arrays.asList(2L,0L,2L), CACHE_HIT_TOTAL}, @@ -135,10 +135,10 @@ public boolean adviseAgainstEviction(Long key, String value) { long tierHitCountSum = 0; for (int i = 0; i < statNames.size(); i++) { - tierHitCountSum += StatsUtil.getExpectedValueFromCounterHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); + tierHitCountSum += StatsUtil.getAndAssertExpectedValueFromCounterHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); } - long cacheHitCount = StatsUtil.getExpectedValueFromCounterHistory("Cache:HitCount", context, managementRegistry, cacheExpectedValue); + long cacheHitCount = StatsUtil.getAndAssertExpectedValueFromCounterHistory("Cache:HitCount", context, managementRegistry, cacheExpectedValue); Assert.assertThat(tierHitCountSum, is(cacheHitCount)); } diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java index 19fd56aad9..b816952db6 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java @@ -28,7 +28,6 @@ import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryService; -import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -36,22 +35,18 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.terracotta.management.model.context.Context; -import org.terracotta.management.model.stats.Sample; -import org.terracotta.management.model.stats.Statistic; -import org.terracotta.management.model.stats.StatisticHistory; import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.List; -import java.util.Map; import java.util.concurrent.TimeUnit; import static java.util.Arrays.asList; +import static java.util.Collections.singletonList; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.ehcache.config.units.EntryUnit.ENTRIES; import static org.ehcache.config.units.MemoryUnit.MB; -import static org.hamcrest.CoreMatchers.is; @RunWith(Parameterized.class) public class HitRatioTest { @@ -71,30 +66,30 @@ public class HitRatioTest { @Parameterized.Parameters public static Collection data() { - List statNamesOnHeap = Arrays.asList("OnHeap:HitRatio"); - List statNamesOffHeap = Arrays.asList("OffHeap:HitRatio"); - List statNamesDisk = Arrays.asList("Disk:HitRatio"); + List statNamesOnHeap = singletonList("OnHeap:HitRatio"); + List statNamesOffHeap = singletonList("OffHeap:HitRatio"); + List statNamesDisk = singletonList("Disk:HitRatio"); List statNamesOnHeapOffHeap = Arrays.asList("OnHeap:HitRatio","OffHeap:HitRatio"); List statNamesOnHeapDisk = Arrays.asList("OnHeap:HitRatio","Disk:HitRatio"); List statNamesThreeTiers = Arrays.asList("OnHeap:HitRatio","OffHeap:HitRatio","Disk:HitRatio"); return asList(new Object[][] { //1 tier - { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(1l,2l,3l) , Arrays.asList(1d), 1d }, //3 hits, 0 misses - { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(1l,2l,4l,5l) , Arrays.asList(.5d), .5d }, //2 hits, 2 misses - { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(4l,5l) , Arrays.asList(0d), 0d }, //0 hits, 2 misses + { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(1L, 2L, 3L) , singletonList(1d), 1d }, //3 hits, 0 misses + { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(1L, 2L, 4L, 5L) , singletonList(.5d), .5d }, //2 hits, 2 misses + { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(4L, 5L) , singletonList(0d), 0d }, //0 hits, 2 misses - { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(1l,2l,3l), Arrays.asList(1d), 1d }, //3 hits, 0 misses - { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(1l,2l,4l,5l) , Arrays.asList(.5d), .5d }, //2 hits, 2 misses - { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(4l,5l) , Arrays.asList(0d), 0d }, //0 hits, 2 misses + { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(1L, 2L, 3L), singletonList(1d), 1d }, //3 hits, 0 misses + { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(1L, 2L, 4L, 5L) , singletonList(.5d), .5d }, //2 hits, 2 misses + { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(4L, 5L) , singletonList(0d), 0d }, //0 hits, 2 misses - { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(1l,2l,3l) , Arrays.asList(1d), 1d }, //3 hits, 0 misses - { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(1l,2l,4l,5l) , Arrays.asList(.5d), .5d }, //2 hits, 2 misses - { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(4l,5l) , Arrays.asList(0d), 0d }, //0 hits, 2 misses + { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(1L, 2L, 3L) , singletonList(1d), 1d }, //3 hits, 0 misses + { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(1L, 2L, 4L, 5L) , singletonList(.5d), .5d }, //2 hits, 2 misses + { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(4L, 5L) , singletonList(0d), 0d }, //0 hits, 2 misses //2 tiers - { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(1l,2l,3l) , Arrays.asList(0d,1d), 1d }, //3 offheap hits, 0 misses + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(1L, 2L, 3L) , Arrays.asList(0d,1d), 1d }, //3 offheap hits, 0 misses /* explanation of ratio calc: @@ -107,18 +102,17 @@ public static Collection data() { This test checks the offheap tier on the first 3 gets, and finds the key on each check. So there are 3 hits. Thus offHeapHitRatio = 3 hits / 3 attempts = 1 */ - { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(1l,2l,3l,1L) , Arrays.asList(.25d,1d), 1d },//3 offheap hits, 1 heap hit, 0 misses - { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(1l,2l,4l,5l) , Arrays.asList(0d,.5), .5d }, //2 offheap hits, 2 misses - { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(4l,5l) , Arrays.asList(0d,0d), 0d }, //0 hits, 2 misses + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(1L, 2L, 3L,1L) , Arrays.asList(.25d,1d), 1d },//3 offheap hits, 1 heap hit, 0 misses + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(1L, 2L, 4L, 5L) , Arrays.asList(0d,.5), .5d }, //2 offheap hits, 2 misses + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(4L, 5L) , Arrays.asList(0d,0d), 0d }, //0 hits, 2 misses - { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(1l,2l,3l,1L) , Arrays.asList(.25d,1d), 1d }, //3 disk hits, 1 heap hit, 0 misses - { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(1l,2l,4l,5l) , Arrays.asList(0d,.5), .5d }, //2 disk hits, 2 misses - { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(4l,5l) , Arrays.asList(0d,0d), 0d }, //0 hits, 2 misses - //offheap and disk configuration is not valid. Throws IllegalStateException no Store.Provider found to handle configured resource types [offheap,disk] + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(1L, 2L, 3L,1L) , Arrays.asList(.25d,1d), 1d }, //3 disk hits, 1 heap hit, 0 misses + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(1L, 2L, 4L, 5L) , Arrays.asList(0d,.5), .5d }, //2 disk hits, 2 misses + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(4L, 5L) , Arrays.asList(0d,0d), 0d }, //0 hits, 2 misses //3 tiers - { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB).disk(3, MB), statNamesThreeTiers, Arrays.asList(1l,2l,3l,1L) , Arrays.asList(.25d,0d,1d), 1d }, //3 disk hits, 1 heap hit, 0 misses - { newResourcePoolsBuilder().heap(1, ENTRIES).offheap(2, MB).disk(3, MB), statNamesThreeTiers, Arrays.asList(1l,2l,2L,1L), Arrays.asList(.25d,(1d/3d),1d), 1d},//3 disk hits, 1 offheap hit, 1 heap hit, 0 misses + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB).disk(3, MB), statNamesThreeTiers, Arrays.asList(1L, 2L, 3L,1L) , Arrays.asList(.25d,0d,1d), 1d }, //3 disk hits, 1 heap hit, 0 misses + { newResourcePoolsBuilder().heap(1, ENTRIES).offheap(2, MB).disk(3, MB), statNamesThreeTiers, Arrays.asList(1L, 2L,2L,1L), Arrays.asList(.25d,(1d / 3d),1d), 1d},//3 disk hits, 1 offheap hit, 1 heap hit, 0 misses }); } @@ -162,24 +156,19 @@ public boolean adviseAgainstEviction(Long key, String value) { Cache cache = cacheManager.getCache("myCache", Long.class, String.class); - //System.out.println("put() 1, 2, 3"); cache.put(1L, "1");//put in lowest tier cache.put(2L, "2");//put in lowest tier cache.put(3L, "3");//put in lowest tier for(Long key : getKeys) { - String v = cache.get(key); - //System.out.println("get(" + key + "): " + (v == null ? "miss" : "hit")); + cache.get(key); } - double tierHitRatio = 0; for (int i = 0; i < statNames.size(); i++) { - tierHitRatio = StatsUtil.getExpectedValueFromRatioHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); - Assert.assertThat(tierHitRatio, is(tierExpectedValues.get(i))); + StatsUtil.assertExpectedValueFromRatioHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); } - double hitRatio = StatsUtil.getExpectedValueFromRatioHistory("Cache:HitRatio", context, managementRegistry, cacheExpectedValue); - Assert.assertThat(hitRatio, is(cacheExpectedValue)); + StatsUtil.assertExpectedValueFromRatioHistory("Cache:HitRatio", context, managementRegistry, cacheExpectedValue); } finally { diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java index 16ef74e002..9606d08840 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java @@ -16,6 +16,7 @@ package org.ehcache.management.providers.statistics; import static java.util.Arrays.asList; +import static java.util.Collections.singletonList; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.ehcache.config.units.MemoryUnit.MB; import static org.hamcrest.CoreMatchers.is; @@ -64,9 +65,9 @@ public class MissCountTest { public static Collection data() { return asList(new Object[][] { //1 tier - { newResourcePoolsBuilder().heap(1, MB), Arrays.asList("OnHeap:MissCount"), Arrays.asList(2L), 2L }, - { newResourcePoolsBuilder().offheap(1, MB), Arrays.asList("OffHeap:MissCount"), Arrays.asList(2L), 2L }, - { newResourcePoolsBuilder().disk(1, MB), Arrays.asList("Disk:MissCount"), Arrays.asList(2L), 2L }, + { newResourcePoolsBuilder().heap(1, MB), singletonList("OnHeap:MissCount"), singletonList(2L), 2L }, + { newResourcePoolsBuilder().offheap(1, MB), singletonList("OffHeap:MissCount"), singletonList(2L), 2L }, + { newResourcePoolsBuilder().disk(1, MB), singletonList("Disk:MissCount"), singletonList(2L), 2L }, //2 tiers { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), Arrays.asList("OnHeap:MissCount","OffHeap:MissCount"), Arrays.asList(2L,2L), 2L}, @@ -118,10 +119,10 @@ public void test() throws InterruptedException, IOException { long tierMissCountSum = 0; for (int i = 0; i < statNames.size(); i++) { - tierMissCountSum += StatsUtil.getExpectedValueFromCounterHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); + tierMissCountSum += StatsUtil.getAndAssertExpectedValueFromCounterHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); } - long cacheMissCount = StatsUtil.getExpectedValueFromCounterHistory("Cache:MissCount", context, managementRegistry, cacheExpectedValue); + long cacheMissCount = StatsUtil.getAndAssertExpectedValueFromCounterHistory("Cache:MissCount", context, managementRegistry, cacheExpectedValue); //A cache.get() checks every tier, so there is one miss per tier. However the cache miss count only counts 1 miss regardless of the number of tiers. Assert.assertThat(tierMissCountSum/statNames.size(), is(cacheMissCount)); diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java index 8ef3259dd7..c321acdc3d 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java @@ -16,10 +16,10 @@ package org.ehcache.management.providers.statistics; import static java.util.Arrays.asList; +import static java.util.Collections.singletonList; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.ehcache.config.units.EntryUnit.ENTRIES; import static org.ehcache.config.units.MemoryUnit.MB; -import static org.hamcrest.CoreMatchers.is; import java.io.IOException; import java.util.Arrays; @@ -40,7 +40,6 @@ import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryService; import org.junit.Assert; -import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -67,43 +66,41 @@ public class MissRatioTest { @Parameterized.Parameters public static Collection data() { - List statNamesOnHeap = Arrays.asList("OnHeap:MissRatio"); - List statNamesOffHeap = Arrays.asList("OffHeap:MissRatio"); - List statNamesDisk = Arrays.asList("Disk:MissRatio"); + List statNamesOnHeap = singletonList("OnHeap:MissRatio"); + List statNamesOffHeap = singletonList("OffHeap:MissRatio"); + List statNamesDisk = singletonList("Disk:MissRatio"); List statNamesOnHeapOffHeap = Arrays.asList("OnHeap:MissRatio","OffHeap:MissRatio"); List statNamesOnHeapDisk = Arrays.asList("OnHeap:MissRatio","Disk:MissRatio"); List statNamesThreeTiers = Arrays.asList("OnHeap:MissRatio","OffHeap:MissRatio","Disk:MissRatio"); return asList(new Object[][] { //1 tier - { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(1l,2l,3l) , Arrays.asList(0d), 0d }, //0 misses, 3 hits - { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(1l,2l,4l,5l) , Arrays.asList(.5d), .5d }, //2 misses, 2 hits - { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(4l,5l) , Arrays.asList(1d), 1d }, //0 hits, 2 misses + { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(1L, 2L, 3L) , singletonList(0d), 0d }, //0 misses, 3 hits + { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(1L, 2L, 4L, 5L) , singletonList(.5d), .5d }, //2 misses, 2 hits + { newResourcePoolsBuilder().heap(1, MB), statNamesOnHeap, Arrays.asList(4L, 5L) , singletonList(1d), 1d }, //0 hits, 2 misses - { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(1l,2l,3l), Arrays.asList(0d), 0d }, //0 misses, 3 hits - { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(1l,2l,4l,5l) , Arrays.asList(.5d), .5d }, //2 misses, 2 hits - { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(4l,5l) , Arrays.asList(1d), 1d }, //2 misses, 0 hits + { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(1L, 2L, 3L), singletonList(0d), 0d }, //0 misses, 3 hits + { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(1L, 2L, 4L, 5L) , singletonList(.5d), .5d }, //2 misses, 2 hits + { newResourcePoolsBuilder().offheap(1, MB), statNamesOffHeap, Arrays.asList(4L, 5L) , singletonList(1d), 1d }, //2 misses, 0 hits - { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(1l,2l,3l) , Arrays.asList(0d), 0d }, //0 misses, 3 hits - { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(1l,2l,4l,5l) , Arrays.asList(.5d), .5d }, //2 misses, 2 hits - { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(4l,5l) , Arrays.asList(1d), 1d }, //2 misses, 0 hits + { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(1L, 2L, 3L) , singletonList(0d), 0d }, //0 misses, 3 hits + { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(1L, 2L, 4L, 5L) , singletonList(.5d), .5d }, //2 misses, 2 hits + { newResourcePoolsBuilder().disk(1, MB), statNamesDisk, Arrays.asList(4L, 5L) , singletonList(1d), 1d }, //2 misses, 0 hits //2 tiers - { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(1l,2l,3l) , Arrays.asList(1d,0d), 0d }, //3 heap misses, 0 offheap misses, 3 hits - { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(1l,2l,3l,4L) , Arrays.asList(1d,.25d), .25d },//4 heap misses, 1 offheap miss, 3 hits + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(1L, 2L, 3L) , Arrays.asList(1d,0d), 0d }, //3 heap misses, 0 offheap misses, 3 hits + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(1L, 2L, 3L,4L) , Arrays.asList(1d,.25d), .25d },//4 heap misses, 1 offheap miss, 3 hits { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), statNamesOnHeapOffHeap, Arrays.asList(4L,5L) , Arrays.asList(1d,1d), 1d }, //2 heap misses, 2 offheap misses, 0 hits - { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(1l,2l,3l) , Arrays.asList(1d,0d), 0d }, //3 heap misses, 0 disk misses, 3 hits - { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(1l,2l,3l,4L) , Arrays.asList(1d,.25d), .25d },//4 heap misses, 1 disk miss, 3 hits + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(1L, 2L, 3L) , Arrays.asList(1d,0d), 0d }, //3 heap misses, 0 disk misses, 3 hits + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(1L, 2L, 3L,4L) , Arrays.asList(1d,.25d), .25d },//4 heap misses, 1 disk miss, 3 hits { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), statNamesOnHeapDisk, Arrays.asList(4L,5L) , Arrays.asList(1d,1d), 1d }, //2 heap misses, 2 disk misses, 0 hits - //offheap and disk configuration is not valid. Throws IllegalStateException no Store.Provider found to handle configured resource types [offheap,disk] - //3 tiers - { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB).disk(3, MB), statNamesThreeTiers, Arrays.asList(1l,2l,3l,1L) , Arrays.asList(.75d,1d,0d), 0d }, //3 heap misses, 3 offheap misses, 0 disk misses, 4 hits - { newResourcePoolsBuilder().heap(1, ENTRIES).offheap(2, MB).disk(3, MB), statNamesThreeTiers, Arrays.asList(1l,2l,2L,4L), Arrays.asList(.75d,1d,1d/3d), 1d/4d},//3 heap misses, 3 offheap misses, 1 disk miss, 3 hits + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB).disk(3, MB), statNamesThreeTiers, Arrays.asList(1L, 2L, 3L,1L) , Arrays.asList(.75d,1d,0d), 0d }, //3 heap misses, 3 offheap misses, 0 disk misses, 4 hits + { newResourcePoolsBuilder().heap(1, ENTRIES).offheap(2, MB).disk(3, MB), statNamesThreeTiers, Arrays.asList(1L, 2L,2L,4L), Arrays.asList(.75d,1d, 1d / 3d), 1d / 4d},//3 heap misses, 3 offheap misses, 1 disk miss, 3 hits }); } @@ -147,24 +144,19 @@ public boolean adviseAgainstEviction(Long key, String value) { Cache cache = cacheManager.getCache("myCache", Long.class, String.class); - //System.out.println("put() 1, 2, 3"); cache.put(1L, "1");//put in lowest tier cache.put(2L, "2");//put in lowest tier cache.put(3L, "3");//put in lowest tier for(Long key : getKeys) { - String v = cache.get(key); - //System.out.println("get(" + key + "): " + (v == null ? "miss" : "hit")); + cache.get(key); } - double tierMissRatio = 0; for (int i = 0; i < statNames.size(); i++) { - tierMissRatio = StatsUtil.getExpectedValueFromRatioHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); - Assert.assertThat(tierMissRatio, is(tierExpectedValues.get(i))); + StatsUtil.assertExpectedValueFromRatioHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); } - double hitRatio = StatsUtil.getExpectedValueFromRatioHistory("Cache:MissRatio", context, managementRegistry, cacheExpectedValue); - Assert.assertThat(hitRatio, is(cacheExpectedValue)); + StatsUtil.assertExpectedValueFromRatioHistory("Cache:MissRatio", context, managementRegistry, cacheExpectedValue); } finally { diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java b/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java index 78c6f975e5..e89e53b366 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java @@ -31,6 +31,7 @@ import java.util.Arrays; import java.util.Map; +import static java.util.Collections.singletonList; import static org.junit.Assert.assertThat; public class StatsUtil { @@ -65,10 +66,10 @@ public static Context createContext(ManagementRegistryService managementRegistry This should only occur if the stats value is different from your expectedResult, which may happen if the stats calculations change, the stats value isn't accessible or if you enter the wrong expectedResult. */ - public static long getExpectedValueFromCounterHistory(String statName, Context context, ManagementRegistryService managementRegistry, long expectedResult) { + public static long getAndAssertExpectedValueFromCounterHistory(String statName, Context context, ManagementRegistryService managementRegistry, long expectedResult) { StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") - .queryStatistics(Arrays.asList(statName)) + .queryStatistics(singletonList(statName)) .on(context) .build(); @@ -99,10 +100,10 @@ public static long getExpectedValueFromCounterHistory(String statName, Context c This should only occur if the stats value is different from your expectedResult, which may happen if the stats calculations change, the stats value isn't accessible or if you enter the wrong expectedResult. */ - public static double getExpectedValueFromRatioHistory(String statName, Context context, ManagementRegistryService managementRegistry, double expectedResult) { + public static void assertExpectedValueFromRatioHistory(String statName, Context context, ManagementRegistryService managementRegistry, double expectedResult) { StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") - .queryStatistics(Arrays.asList(statName)) + .queryStatistics(singletonList(statName)) .on(context) .build(); @@ -123,8 +124,6 @@ public static double getExpectedValueFromRatioHistory(String statName, Context c } while (!Thread.currentThread().isInterrupted() && value != expectedResult); assertThat(value, Matchers.is(expectedResult)); - - return value; } // When testing ratios, we need to wait for the first computation (we do not have any choice) to happen because ratio depends on 2 other sampled statistics. From f46403dbdd4936eb734fb30ef2494b3de11eb281 Mon Sep 17 00:00:00 2001 From: Anthony Dahanne Date: Thu, 17 Nov 2016 13:34:07 -0500 Subject: [PATCH 140/218] :art: Close #1631 Remove the trailing Ratio in RatioRatio --- .../ClusteringManagementServiceTest.java | 20 +++++++++---------- .../statistics/StandardEhcacheStatistics.java | 2 +- .../DefaultManagementRegistryServiceTest.java | 16 +++++++-------- 3 files changed, 19 insertions(+), 19 deletions(-) diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java index 3260cf6c1d..e55b4e7ca0 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java @@ -318,8 +318,8 @@ public static void initDescriptors() throws ClassNotFoundException { ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissCount" , StatisticType.COUNTER_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissLatencyMaximum" , StatisticType.DURATION_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionRate" , StatisticType.RATE_HISTORY)); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitRatioRatio" , StatisticType.RATIO_HISTORY)); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissRatioRatio" , StatisticType.RATIO_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitRatio" , StatisticType.RATIO_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissRatio" , StatisticType.RATIO_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MappingCount" , StatisticType.COUNTER_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyAverage" , StatisticType.AVERAGE_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyMinimum" , StatisticType.DURATION_HISTORY)); @@ -344,8 +344,8 @@ public static void initDescriptors() throws ClassNotFoundException { OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionLatencyMaximum", StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MappingCount", StatisticType.COUNTER_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRate", StatisticType.RATE_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRatioRatio", StatisticType.RATIO_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissRatioRatio", StatisticType.RATIO_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRatio", StatisticType.RATIO_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissRatio", StatisticType.RATIO_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionLatencyMinimum", StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionCount", StatisticType.COUNTER_HISTORY)); @@ -370,8 +370,8 @@ public static void initDescriptors() throws ClassNotFoundException { DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitCount", StatisticType.COUNTER_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionCount", StatisticType.COUNTER_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitRatioRatio", StatisticType.RATIO_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissRatioRatio", StatisticType.RATIO_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitRatio", StatisticType.RATIO_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissRatio", StatisticType.RATIO_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissCount", StatisticType.COUNTER_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MappingCount", StatisticType.COUNTER_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissRate", StatisticType.RATE_HISTORY)); @@ -391,8 +391,8 @@ public static void initDescriptors() throws ClassNotFoundException { CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:EvictionLatencyMaximum", StatisticType.DURATION_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissLatencyAverage", StatisticType.AVERAGE_HISTORY)); - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitRatioRatio", StatisticType.RATIO_HISTORY)); - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissRatioRatio", StatisticType.RATIO_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitRatio", StatisticType.RATIO_HISTORY)); + CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissRatio", StatisticType.RATIO_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:AllocatedByteSize", StatisticType.SIZE_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MappingCount", StatisticType.COUNTER_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:EvictionRate", StatisticType.RATE_HISTORY)); @@ -402,7 +402,7 @@ public static void initDescriptors() throws ClassNotFoundException { CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitRate", StatisticType.RATE_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitLatencyMinimum", StatisticType.DURATION_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitCount", StatisticType.COUNTER_HISTORY)); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitRatioRatio", StatisticType.RATIO_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitRatio", StatisticType.RATIO_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearLatencyAverage", StatisticType.AVERAGE_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitLatencyMaximum", StatisticType.DURATION_HISTORY)); @@ -414,7 +414,7 @@ public static void initDescriptors() throws ClassNotFoundException { CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearCount", StatisticType.COUNTER_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearLatencyMinimum", StatisticType.DURATION_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissCount", StatisticType.COUNTER_HISTORY)); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissRatioRatio", StatisticType.RATIO_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissRatio", StatisticType.RATIO_HISTORY)); POOL_DESCRIPTORS.add(new StatisticDescriptor("Pool:AllocatedSize", StatisticType.SIZE_HISTORY)); diff --git a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java index 255cc69463..6d2e5beba0 100644 --- a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java +++ b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java @@ -154,7 +154,7 @@ public Collection getDescriptors() { capabilities.add(new StatisticDescriptor(statisticName, StatisticType.COUNTER_HISTORY)); break; case RATIO: - capabilities.add(new StatisticDescriptor(entry.getKey() + "Ratio", StatisticType.RATIO_HISTORY)); + capabilities.add(new StatisticDescriptor(statisticName, StatisticType.RATIO_HISTORY)); break; case SIZE: capabilities.add(new StatisticDescriptor(statisticName, StatisticType.SIZE_HISTORY)); diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java index 14523ef1db..a2d6c75e63 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java @@ -530,8 +530,8 @@ public static void loadStatsUtil() throws ClassNotFoundException { ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissCount" , StatisticType.COUNTER_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissLatencyMaximum" , StatisticType.DURATION_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionRate" , StatisticType.RATE_HISTORY)); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitRatioRatio" , StatisticType.RATIO_HISTORY)); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissRatioRatio" , StatisticType.RATIO_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitRatio" , StatisticType.RATIO_HISTORY)); + ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissRatio" , StatisticType.RATIO_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MappingCount" , StatisticType.COUNTER_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyAverage" , StatisticType.AVERAGE_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyMinimum" , StatisticType.DURATION_HISTORY)); @@ -556,8 +556,8 @@ public static void loadStatsUtil() throws ClassNotFoundException { OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionLatencyMaximum", StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MappingCount", StatisticType.COUNTER_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRate", StatisticType.RATE_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRatioRatio", StatisticType.RATIO_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissRatioRatio", StatisticType.RATIO_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRatio", StatisticType.RATIO_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissRatio", StatisticType.RATIO_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionLatencyMinimum", StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionCount", StatisticType.COUNTER_HISTORY)); @@ -582,8 +582,8 @@ public static void loadStatsUtil() throws ClassNotFoundException { DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitCount", StatisticType.COUNTER_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionCount", StatisticType.COUNTER_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitRatioRatio", StatisticType.RATIO_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissRatioRatio", StatisticType.RATIO_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitRatio", StatisticType.RATIO_HISTORY)); + DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissRatio", StatisticType.RATIO_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissCount", StatisticType.COUNTER_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MappingCount", StatisticType.COUNTER_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissRate", StatisticType.RATE_HISTORY)); @@ -592,7 +592,7 @@ public static void loadStatsUtil() throws ClassNotFoundException { CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitRate", StatisticType.RATE_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitLatencyMinimum", StatisticType.DURATION_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitCount", StatisticType.COUNTER_HISTORY)); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitRatioRatio", StatisticType.RATIO_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitRatio", StatisticType.RATIO_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearLatencyAverage", StatisticType.AVERAGE_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitLatencyMaximum", StatisticType.DURATION_HISTORY)); @@ -604,7 +604,7 @@ public static void loadStatsUtil() throws ClassNotFoundException { CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearCount", StatisticType.COUNTER_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearLatencyMinimum", StatisticType.DURATION_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissCount", StatisticType.COUNTER_HISTORY)); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissRatioRatio", StatisticType.RATIO_HISTORY)); + CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissRatio", StatisticType.RATIO_HISTORY)); } From 8c7ae5793ca6c918080c75d9c43c5067dd9a932e Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Tue, 25 Oct 2016 18:25:02 +0200 Subject: [PATCH 141/218] :construction: #1482 Introduce single enum for message types --- clustered/common/build.gradle | 1 + .../internal/messages/EhcacheMessageType.java | 99 +++++++++++++++++++ 2 files changed, 100 insertions(+) create mode 100644 clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheMessageType.java diff --git a/clustered/common/build.gradle b/clustered/common/build.gradle index e75c48963a..6bd98eabc6 100644 --- a/clustered/common/build.gradle +++ b/clustered/common/build.gradle @@ -18,6 +18,7 @@ apply plugin: EhDeploy dependencies { provided "org.terracotta:entity-common-api:$parent.entityApiVersion" + provided "org.terracotta:runnel:$parent.terracottaPlatformVersion" } tasks.withType(JavaCompile) { diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheMessageType.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheMessageType.java new file mode 100644 index 0000000000..04b278e4f0 --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheMessageType.java @@ -0,0 +1,99 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.terracotta.runnel.EnumMapping; + +import java.util.EnumSet; + +import static java.util.EnumSet.of; +import static org.terracotta.runnel.EnumMappingBuilder.newEnumMappingBuilder; + +/** + * EhcacheMessageType + */ +public enum EhcacheMessageType { + // Lifecycle messages + CONFIGURE, + VALIDATE, + CREATE_SERVER_STORE, + VALIDATE_SERVER_STORE, + RELEASE_SERVER_STORE, + DESTROY_SERVER_STORE, + + // ServerStore operation messages + GET_AND_APPEND, + APPEND, + REPLACE, + CLIENT_INVALIDATION_ACK, + CLEAR, + GET_STORE, + + // StateRepository operation messages + GET_STATE_REPO, + PUT_IF_ABSENT, + ENTRY_SET, + + // Passive synchronization messages + CHAIN_REPLICATION_OP, + CLIENT_ID_TRACK_OP; + + public static final String MESSAGE_TYPE_FIELD_NAME = "opCode"; + public static final int MESSAGE_TYPE_FIELD_INDEX = 10; + public static final EnumMapping EHCACHE_MESSAGE_TYPES_ENUM_MAPPING = newEnumMappingBuilder(EhcacheMessageType.class) + .mapping(CONFIGURE, 1) + .mapping(VALIDATE, 2) + .mapping(CREATE_SERVER_STORE, 3) + .mapping(VALIDATE_SERVER_STORE, 4) + .mapping(RELEASE_SERVER_STORE, 5) + .mapping(DESTROY_SERVER_STORE, 6) + + .mapping(GET_AND_APPEND, 21) + .mapping(APPEND, 22) + .mapping(REPLACE, 23) + .mapping(CLIENT_INVALIDATION_ACK, 24) + .mapping(CLEAR, 25) + .mapping(GET_STORE, 26) + + .mapping(GET_STATE_REPO, 41) + .mapping(PUT_IF_ABSENT, 42) + .mapping(ENTRY_SET, 43) + + .mapping(CHAIN_REPLICATION_OP, 61) + .mapping(CLIENT_ID_TRACK_OP, 62) + .build(); + + public static final EnumSet LIFECYCLE_MESSAGES = of(CONFIGURE, VALIDATE, CREATE_SERVER_STORE, VALIDATE_SERVER_STORE, RELEASE_SERVER_STORE, DESTROY_SERVER_STORE); + public static boolean isLifecycleMessage(EhcacheMessageType value) { + return LIFECYCLE_MESSAGES.contains(value); + } + + public static final EnumSet STORE_OPERATION_MESSAGES = of(GET_AND_APPEND, APPEND, REPLACE, CLIENT_INVALIDATION_ACK, CLEAR, GET_STORE); + public static boolean isStoreOperationMessage(EhcacheMessageType value) { + return STORE_OPERATION_MESSAGES.contains(value); + } + + public static final EnumSet STATE_REPO_OPERATION_MESSAGES = of(GET_STATE_REPO, PUT_IF_ABSENT, ENTRY_SET); + public static boolean isStateRepoOperationMessage(EhcacheMessageType value) { + return STATE_REPO_OPERATION_MESSAGES.contains(value); + } + + public static final EnumSet PASSIVE_SYNC_MESSAGES = of(CHAIN_REPLICATION_OP, CLIENT_ID_TRACK_OP); + public static boolean isPassiveSynchroMessage(EhcacheMessageType value) { + return PASSIVE_SYNC_MESSAGES.contains(value); + } +} From 4813e81b7c87a10452ff76756bfc26f18de9e761 Mon Sep 17 00:00:00 2001 From: Albin Suresh Date: Thu, 17 Nov 2016 18:15:43 +0530 Subject: [PATCH 142/218] Closes #1582 Throw on runtime updation of clustered resources --- .../config/ClusteredResourcePoolImpl.java | 2 +- .../DedicatedClusteredResourcePoolImpl.java | 6 ++ .../SharedClusteredResourcePoolImpl.java | 6 ++ .../ClusteredResourcePoolUpdationTest.java | 102 ++++++++++++++++++ .../config/ClusteredResourcePoolImplTest.java | 32 ++++++ ...edicatedClusteredResourcePoolImplTest.java | 33 ++++++ .../SharedClusteredResourcePoolImplTest.java | 32 ++++++ 7 files changed, 212 insertions(+), 1 deletion(-) create mode 100644 clustered/client/src/test/java/org/ehcache/clustered/ClusteredResourcePoolUpdationTest.java create mode 100644 clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImplTest.java create mode 100644 clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImplTest.java create mode 100644 clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImplTest.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImpl.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImpl.java index 8c74da7ead..9d1da5223a 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImpl.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImpl.java @@ -50,7 +50,7 @@ public boolean isPersistent() { @Override public void validateUpdate(ResourcePool newPool) { - super.validateUpdate(newPool); + throw new UnsupportedOperationException("Updating CLUSTERED resource is not supported"); } @Override diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImpl.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImpl.java index f5ea3c82f3..e49d2b99b0 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImpl.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImpl.java @@ -18,6 +18,7 @@ import org.ehcache.clustered.client.config.ClusteredResourceType; import org.ehcache.clustered.common.PoolAllocation; +import org.ehcache.config.ResourcePool; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.config.SizedResourcePoolImpl; @@ -64,6 +65,11 @@ public PoolAllocation getPoolAllocation() { return new PoolAllocation.Dedicated(this.getFromResource(), this.getUnit().toBytes(this.getSize())); } + @Override + public void validateUpdate(final ResourcePool newPool) { + throw new UnsupportedOperationException("Updating CLUSTERED resource is not supported"); + } + @Override public String toString() { final StringBuilder sb = new StringBuilder("Pool {"); diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImpl.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImpl.java index 7d394beea5..647f133654 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImpl.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImpl.java @@ -19,6 +19,7 @@ import org.ehcache.clustered.client.config.ClusteredResourceType; import org.ehcache.clustered.client.config.SharedClusteredResourcePool; import org.ehcache.clustered.common.PoolAllocation; +import org.ehcache.config.ResourcePool; import org.ehcache.core.config.AbstractResourcePool; /** @@ -60,6 +61,11 @@ public PoolAllocation getPoolAllocation() { return new PoolAllocation.Shared(this.getSharedResourcePool()); } + @Override + public void validateUpdate(final ResourcePool newPool) { + throw new UnsupportedOperationException("Updating CLUSTERED resource is not supported"); + } + @Override public String toString() { return "Pool {" diff --git a/clustered/client/src/test/java/org/ehcache/clustered/ClusteredResourcePoolUpdationTest.java b/clustered/client/src/test/java/org/ehcache/clustered/ClusteredResourcePoolUpdationTest.java new file mode 100644 index 0000000000..6a80761325 --- /dev/null +++ b/clustered/client/src/test/java/org/ehcache/clustered/ClusteredResourcePoolUpdationTest.java @@ -0,0 +1,102 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered; + +import org.ehcache.Cache; +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.net.URI; + +public class ClusteredResourcePoolUpdationTest { + + private static final URI CLUSTER_URI = URI.create("terracotta://example.com:9540/my-application"); + + private static PersistentCacheManager cacheManager; + private static Cache dedicatedCache; + private static Cache sharedCache; + + @Rule + public ExpectedException expectedException = ExpectedException.none(); + + @BeforeClass + public static void setUp() throws Exception { + UnitTestConnectionService.add(CLUSTER_URI, + new UnitTestConnectionService.PassthroughServerBuilder() + .resource("primary-server-resource", 8, MemoryUnit.MB) + .resource("secondary-server-resource", 8, MemoryUnit.MB) + .build()); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER_URI).autoCreate() + .defaultServerResource("primary-server-resource") + .resourcePool("resource-pool-a", 2, MemoryUnit.MB, "secondary-server-resource") + .resourcePool("resource-pool-b", 4, MemoryUnit.MB)) + .withCache("dedicated-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 4, MemoryUnit.MB)))) + .withCache("shared-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredShared("resource-pool-a")))) + .build(); + cacheManager.init(); + + dedicatedCache = cacheManager.getCache("dedicated-cache", Long.class, String.class); + sharedCache = cacheManager.getCache("shared-cache", Long.class, String.class); + } + + @AfterClass + public static void tearDown() throws Exception { + cacheManager.close(); + UnitTestConnectionService.remove(CLUSTER_URI); + } + + @Test + public void testClusteredDedicatedResourcePoolUpdation() throws Exception { + expectedException.expect(UnsupportedOperationException.class); + expectedException.expectMessage("Updating CLUSTERED resource is not supported"); + dedicatedCache.getRuntimeConfiguration().updateResourcePools( + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredDedicated("primary-server-resource", 32, MemoryUnit.MB)) + .build() + ); + } + + @Test + public void testClusteredSharedResourcePoolUpdation() throws Exception { + expectedException.expect(UnsupportedOperationException.class); + expectedException.expectMessage("Updating CLUSTERED resource is not supported"); + sharedCache.getRuntimeConfiguration().updateResourcePools( + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(ClusteredResourcePoolBuilder.clusteredShared("resource-pool-a")) + .build() + ); + } +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImplTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImplTest.java new file mode 100644 index 0000000000..12fd179431 --- /dev/null +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/ClusteredResourcePoolImplTest.java @@ -0,0 +1,32 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal.config; + +import org.ehcache.config.ResourcePool; +import org.junit.Test; + +import static org.mockito.Mockito.mock; + +public class ClusteredResourcePoolImplTest { + + @Test(expected = UnsupportedOperationException.class) + public void validateUpdate() throws Exception { + ClusteredResourcePoolImpl resourcePool = new ClusteredResourcePoolImpl(); + resourcePool.validateUpdate(mock(ResourcePool.class)); + } + +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImplTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImplTest.java new file mode 100644 index 0000000000..99daba303a --- /dev/null +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/DedicatedClusteredResourcePoolImplTest.java @@ -0,0 +1,33 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal.config; + +import org.ehcache.config.ResourcePool; +import org.ehcache.config.units.MemoryUnit; +import org.junit.Test; + +import static org.mockito.Mockito.mock; + +public class DedicatedClusteredResourcePoolImplTest { + + @Test(expected = UnsupportedOperationException.class) + public void validateUpdate() throws Exception { + DedicatedClusteredResourcePoolImpl resourcePool = new DedicatedClusteredResourcePoolImpl("foo", 3, MemoryUnit.MB); + resourcePool.validateUpdate(mock(ResourcePool.class)); + } + +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImplTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImplTest.java new file mode 100644 index 0000000000..e4f0dea3d6 --- /dev/null +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/config/SharedClusteredResourcePoolImplTest.java @@ -0,0 +1,32 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.internal.config; + +import org.ehcache.config.ResourcePool; +import org.junit.Test; + +import static org.mockito.Mockito.mock; + +public class SharedClusteredResourcePoolImplTest { + + @Test(expected = UnsupportedOperationException.class) + public void validateUpdate() throws Exception { + SharedClusteredResourcePoolImpl resourcePool = new SharedClusteredResourcePoolImpl("foo"); + resourcePool.validateUpdate(mock(ResourcePool.class)); + } + +} From 79c97ca99d113c9cf6c15ba62ec2de4f0908bcb7 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Fri, 18 Nov 2016 13:52:43 +0100 Subject: [PATCH 143/218] :construction: #1482 Explicit declaration of slf4j-api --- clustered/client/build.gradle | 2 +- clustered/common/build.gradle | 1 + clustered/server/build.gradle | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/clustered/client/build.gradle b/clustered/client/build.gradle index 9553cc381a..939af60acd 100644 --- a/clustered/client/build.gradle +++ b/clustered/client/build.gradle @@ -21,7 +21,7 @@ apply plugin: EhDeploy dependencies { compileOnly project(':api') compileOnly project(':xml') - compile project(':clustered:common') + compile project(':clustered:common'), "org.slf4j:slf4j-api:$parent.slf4jVersion" provided "org.terracotta:entity-client-api:$parent.entityApiVersion" provided "org.terracotta.management:monitoring-service-api:$parent.managementVersion" // provided in management-server jar diff --git a/clustered/common/build.gradle b/clustered/common/build.gradle index 6bd98eabc6..21038bdcdf 100644 --- a/clustered/common/build.gradle +++ b/clustered/common/build.gradle @@ -17,6 +17,7 @@ apply plugin: EhDeploy dependencies { + compile "org.slf4j:slf4j-api:$parent.slf4jVersion" provided "org.terracotta:entity-common-api:$parent.entityApiVersion" provided "org.terracotta:runnel:$parent.terracottaPlatformVersion" } diff --git a/clustered/server/build.gradle b/clustered/server/build.gradle index cbd86062ea..2943818bd6 100644 --- a/clustered/server/build.gradle +++ b/clustered/server/build.gradle @@ -25,7 +25,7 @@ dependencies { compile ("org.terracotta:statistics:$parent.statisticVersion") { exclude group:'org.slf4j', module:'slf4j-api' } - compile project(':clustered:common') + compile project(':clustered:common'), "org.slf4j:slf4j-api:$parent.slf4jVersion" compile group: 'org.terracotta', name: 'offheap-resource', version: parent.offheapResourceVersion compile group: 'org.terracotta', name: 'offheap-store', version: parent.offheapVersion compile group: 'org.slf4j', name: 'slf4j-api', version: parent.slf4jVersion From a7905651706658946914a95081a9e671bb9c1c6c Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Wed, 26 Oct 2016 11:05:13 +0200 Subject: [PATCH 144/218] :construction: #1482 Introduce EhcacheOperationMessage * Common super type for all client -> server operations * Carries getMessageType method Also rename EntitySyncMessage to EhcacheSyncMessage --- .../messages/EhcacheEntityMessage.java | 2 ++ .../messages/EhcacheOperationMessage.java | 25 ++++++++++++++ .../internal/messages/LifecycleMessage.java | 32 +++++++++++++++++- .../messages/PassiveReplicationMessage.java | 12 ++++++- .../messages/ServerStoreOpMessage.java | 33 ++++++++++++++++++- .../messages/StateRepositoryOpMessage.java | 17 +++++++++- .../clustered/server/EhcacheActiveEntity.java | 8 ++--- .../server/EhcacheExecutionStrategy.java | 4 +-- .../server/EhcachePassiveEntity.java | 14 ++++---- ...ssage.java => EhcacheDataSyncMessage.java} | 4 +-- ...sage.java => EhcacheStateSyncMessage.java} | 10 +++--- ...ncMessage.java => EhcacheSyncMessage.java} | 2 +- .../messages/EhcacheSyncMessageCodec.java | 16 ++++----- .../server/EhcacheActiveEntityTest.java | 6 ++-- .../messages/EhcacheSyncMessageCodecTest.java | 8 ++--- 15 files changed, 152 insertions(+), 41 deletions(-) create mode 100644 clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheOperationMessage.java rename clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/{EntityDataSyncMessage.java => EhcacheDataSyncMessage.java} (88%) rename clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/{EntityStateSyncMessage.java => EhcacheStateSyncMessage.java} (80%) rename clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/{EntitySyncMessage.java => EhcacheSyncMessage.java} (96%) diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java index 756d7b64c3..7ca37615b4 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java @@ -60,8 +60,10 @@ public static Type toType(byte code) { } } + @Deprecated public abstract Type getType(); + @Deprecated public abstract byte getOpCode(); @Override diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheOperationMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheOperationMessage.java new file mode 100644 index 0000000000..57031d1128 --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheOperationMessage.java @@ -0,0 +1,25 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +/** + * EhcacheOperationMessage + */ +public abstract class EhcacheOperationMessage extends EhcacheEntityMessage { + + public abstract EhcacheMessageType getMessageType(); +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java index d42658592b..e3f783c114 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java @@ -22,7 +22,7 @@ import java.io.Serializable; import java.util.UUID; -public abstract class LifecycleMessage extends EhcacheEntityMessage implements Serializable { +public abstract class LifecycleMessage extends EhcacheOperationMessage implements Serializable { public enum LifeCycleOp { CONFIGURE, @@ -86,6 +86,11 @@ public LifeCycleOp operation() { return LifeCycleOp.VALIDATE; } + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.VALIDATE; + } + public ServerSideConfiguration getConfiguration() { return configuration; } @@ -106,6 +111,11 @@ public LifeCycleOp operation() { return LifeCycleOp.CONFIGURE; } + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.CONFIGURE; + } + public ServerSideConfiguration getConfiguration() { return configuration; } @@ -147,6 +157,11 @@ public static class CreateServerStore extends BaseServerStore { public LifeCycleOp operation() { return LifeCycleOp.CREATE_SERVER_STORE; } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.CREATE_SERVER_STORE; + } } /** @@ -163,6 +178,11 @@ public static class ValidateServerStore extends BaseServerStore { public LifeCycleOp operation() { return LifeCycleOp.VALIDATE_SERVER_STORE; } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.VALIDATE_SERVER_STORE; + } } /** @@ -183,6 +203,11 @@ public LifeCycleOp operation() { return LifeCycleOp.RELEASE_SERVER_STORE; } + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.RELEASE_SERVER_STORE; + } + public String getName() { return name; } @@ -206,6 +231,11 @@ public LifeCycleOp operation() { return LifeCycleOp.DESTROY_SERVER_STORE; } + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.DESTROY_SERVER_STORE; + } + public String getName() { return name; } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java index cb20bb3b25..42d83731e8 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java @@ -23,7 +23,7 @@ /** * This message is sent by the Active Entity to Passive Entity. */ -public abstract class PassiveReplicationMessage extends EhcacheEntityMessage { +public abstract class PassiveReplicationMessage extends EhcacheOperationMessage { public enum ReplicationOp { CHAIN_REPLICATION_OP((byte) 41), @@ -72,6 +72,11 @@ public byte getOpCode() { return operation().getReplicationOpCode(); } + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.CLIENT_ID_TRACK_OP; + } + @Override public void setId(long id) { throw new UnsupportedOperationException("This method is not supported on replication message"); @@ -125,6 +130,11 @@ public Chain getChain() { return chain; } + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.CHAIN_REPLICATION_OP; + } + @Override public ReplicationOp operation() { return ReplicationOp.CHAIN_REPLICATION_OP; diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java index 82e6a4b146..0c6ba5aaee 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java @@ -21,7 +21,7 @@ import java.nio.ByteBuffer; import java.util.UUID; -public abstract class ServerStoreOpMessage extends EhcacheEntityMessage { +public abstract class ServerStoreOpMessage extends EhcacheOperationMessage { public enum ServerStoreOp { GET_AND_APPEND((byte) 11), @@ -99,6 +99,7 @@ public Type getType() { return Type.SERVER_STORE_OP; } + @Deprecated public abstract ServerStoreOp operation(); @Override @@ -140,6 +141,11 @@ public static class GetMessage extends KeyBasedServerStoreOpMessage { public ServerStoreOp operation() { return ServerStoreOp.GET; } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.GET_STORE; + } } public static class GetAndAppendMessage extends KeyBasedServerStoreOpMessage { @@ -157,6 +163,11 @@ public ServerStoreOp operation() { return ServerStoreOp.GET_AND_APPEND; } + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.GET_AND_APPEND; + } + public ByteBuffer getPayload() { return payload; } @@ -178,6 +189,11 @@ public ServerStoreOp operation() { return ServerStoreOp.APPEND; } + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.APPEND; + } + public ByteBuffer getPayload() { return payload; } @@ -201,6 +217,11 @@ public ServerStoreOp operation() { return ServerStoreOp.REPLACE; } + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.REPLACE; + } + public Chain getExpect() { return expect; } @@ -227,6 +248,11 @@ public int getInvalidationId() { public ServerStoreOp operation() { return ServerStoreOp.CLIENT_INVALIDATION_ACK; } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.CLIENT_INVALIDATION_ACK; + } } public static class ClearMessage extends ServerStoreOpMessage { @@ -240,6 +266,11 @@ public static class ClearMessage extends ServerStoreOpMessage { public ServerStoreOp operation() { return ServerStoreOp.CLEAR; } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.CLEAR; + } } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java index e2ad6ee732..c42bb09155 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java @@ -19,7 +19,7 @@ import java.io.Serializable; import java.util.UUID; -public abstract class StateRepositoryOpMessage extends EhcacheEntityMessage implements Serializable { +public abstract class StateRepositoryOpMessage extends EhcacheOperationMessage implements Serializable { public enum StateRepositoryOp { GET, @@ -107,6 +107,11 @@ public GetMessage(final String cacheId, final String mapId, final Object key, fi public StateRepositoryOp operation() { return StateRepositoryOp.GET; } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.GET_STATE_REPO; + } } public static class PutIfAbsentMessage extends KeyBasedMessage { @@ -126,6 +131,11 @@ public Object getValue() { public StateRepositoryOp operation() { return StateRepositoryOp.PUT_IF_ABSENT; } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.PUT_IF_ABSENT; + } } public static class EntrySetMessage extends StateRepositoryOpMessage { @@ -138,6 +148,11 @@ public EntrySetMessage(final String cacheId, final String mapId, final UUID clie public StateRepositoryOp operation() { return StateRepositoryOp.ENTRY_SET; } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.ENTRY_SET; + } } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index e904947a7b..99baea528c 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -58,8 +58,8 @@ import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; import org.ehcache.clustered.common.internal.store.Chain; import org.ehcache.clustered.common.internal.store.ServerStore; -import org.ehcache.clustered.server.internal.messages.EntityDataSyncMessage; -import org.ehcache.clustered.server.internal.messages.EntityStateSyncMessage; +import org.ehcache.clustered.server.internal.messages.EhcacheDataSyncMessage; +import org.ehcache.clustered.server.internal.messages.EhcacheStateSyncMessage; import org.ehcache.clustered.server.management.Management; import org.ehcache.clustered.server.state.EhcacheStateService; import org.ehcache.clustered.server.state.InvalidationTracker; @@ -353,14 +353,14 @@ public void synchronizeKeyToPassive(PassiveSynchronizationChannel { ServerStoreImpl store = ehcacheStateService.getStore(name); store.getSegments().get(concurrencyKey - DATA_CONCURRENCY_KEY_OFFSET).keySet().stream() .forEach(key -> { - syncChannel.synchronizeToPassive(new EntityDataSyncMessage(name, key, store.get(key))); + syncChannel.synchronizeToPassive(new EhcacheDataSyncMessage(name, key, store.get(key))); }); }); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java index 3ccd486ee1..164ca24ce1 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java @@ -21,7 +21,7 @@ import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; -import org.ehcache.clustered.server.internal.messages.EntitySyncMessage; +import org.ehcache.clustered.server.internal.messages.EhcacheSyncMessage; import org.terracotta.entity.ExecutionStrategy; /** @@ -56,7 +56,7 @@ public Location getExecutionLocation(EhcacheEntityMessage message) { return Location.ACTIVE; } else if (message instanceof PassiveReplicationMessage) { return Location.PASSIVE; - } else if (message instanceof EntitySyncMessage) { + } else if (message instanceof EhcacheSyncMessage) { throw new AssertionError("Unexpected use of ExecutionStrategy for sync messages"); } throw new AssertionError("Unknown message type: " + message.getClass()); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index 1526721a06..21e696e32a 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -37,9 +37,9 @@ import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ServerStoreLifeCycleReplicationMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; -import org.ehcache.clustered.server.internal.messages.EntityDataSyncMessage; -import org.ehcache.clustered.server.internal.messages.EntityStateSyncMessage; -import org.ehcache.clustered.server.internal.messages.EntitySyncMessage; +import org.ehcache.clustered.server.internal.messages.EhcacheDataSyncMessage; +import org.ehcache.clustered.server.internal.messages.EhcacheStateSyncMessage; +import org.ehcache.clustered.server.internal.messages.EhcacheSyncMessage; import org.ehcache.clustered.server.management.Management; import org.ehcache.clustered.server.state.ClientMessageTracker; import org.ehcache.clustered.server.state.EhcacheStateService; @@ -86,7 +86,7 @@ public void invoke(EhcacheEntityMessage message) { ehcacheStateService.getStateRepositoryManager().invoke((StateRepositoryOpMessage)message); break; case SYNC_OP: - invokeSyncOperation((EntitySyncMessage) message); + invokeSyncOperation((EhcacheSyncMessage) message); break; case REPLICATION_OP: invokeRetirementMessages((PassiveReplicationMessage)message); @@ -220,10 +220,10 @@ private void invokeServerStoreOperation(ServerStoreOpMessage message) throws Clu } } - private void invokeSyncOperation(EntitySyncMessage message) throws ClusterException { + private void invokeSyncOperation(EhcacheSyncMessage message) throws ClusterException { switch (message.operation()) { case STATE: - EntityStateSyncMessage stateSyncMessage = (EntityStateSyncMessage) message; + EhcacheStateSyncMessage stateSyncMessage = (EhcacheStateSyncMessage) message; ehcacheStateService.configure(stateSyncMessage.getConfiguration()); management.sharedPoolsConfigured(); @@ -238,7 +238,7 @@ private void invokeSyncOperation(EntitySyncMessage message) throws ClusterExcept stateSyncMessage.getTrackedClients().stream().forEach(id -> ehcacheStateService.getClientMessageTracker().add(id)); break; case DATA: - EntityDataSyncMessage dataSyncMessage = (EntityDataSyncMessage) message; + EhcacheDataSyncMessage dataSyncMessage = (EhcacheDataSyncMessage) message; ehcacheStateService.getStore(dataSyncMessage.getCacheId()).put(dataSyncMessage.getKey(), dataSyncMessage.getChain()); break; default: diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EntityDataSyncMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java similarity index 88% rename from clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EntityDataSyncMessage.java rename to clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java index 24e5bf2649..7026bef639 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EntityDataSyncMessage.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java @@ -21,13 +21,13 @@ import com.tc.classloader.CommonComponent; @CommonComponent -public class EntityDataSyncMessage extends EntitySyncMessage { +public class EhcacheDataSyncMessage extends EhcacheSyncMessage { private final String cacheId; private final long key; private final Chain chain; - public EntityDataSyncMessage(final String cacheId, final long key, final Chain chain) { + public EhcacheDataSyncMessage(final String cacheId, final long key, final Chain chain) { this.cacheId = cacheId; this.key = key; this.chain = chain; diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EntityStateSyncMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheStateSyncMessage.java similarity index 80% rename from clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EntityStateSyncMessage.java rename to clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheStateSyncMessage.java index 0b854acd1d..2e440214b8 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EntityStateSyncMessage.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheStateSyncMessage.java @@ -21,23 +21,21 @@ import com.tc.classloader.CommonComponent; -import com.tc.classloader.CommonComponent; - import java.io.Serializable; import java.util.Map; import java.util.Set; import java.util.UUID; @CommonComponent -public class EntityStateSyncMessage extends EntitySyncMessage implements Serializable { +public class EhcacheStateSyncMessage extends EhcacheSyncMessage implements Serializable { private final ServerSideConfiguration configuration; private final Map storeConfigs; private final Set trackedClients; - public EntityStateSyncMessage(final ServerSideConfiguration configuration, - final Map storeConfigs, - final Set trackedClients) { + public EhcacheStateSyncMessage(final ServerSideConfiguration configuration, + final Map storeConfigs, + final Set trackedClients) { this.configuration = configuration; this.storeConfigs = storeConfigs; this.trackedClients = trackedClients; diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EntitySyncMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessage.java similarity index 96% rename from clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EntitySyncMessage.java rename to clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessage.java index e3922203c1..7d5c51e5d8 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EntitySyncMessage.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessage.java @@ -23,7 +23,7 @@ import java.util.UUID; @CommonComponent -public abstract class EntitySyncMessage extends EhcacheEntityMessage { +public abstract class EhcacheSyncMessage extends EhcacheEntityMessage { @CommonComponent public enum SyncOp { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java index c8080ff882..79026ee3b3 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java @@ -37,7 +37,7 @@ public class EhcacheSyncMessageCodec implements SyncMessageCodec syncChannel = mock(PassiveSynchronizationChannel.class); activeEntity.synchronizeKeyToPassive(syncChannel, 1); - ArgumentCaptor captor = ArgumentCaptor.forClass(EntityStateSyncMessage.class); + ArgumentCaptor captor = ArgumentCaptor.forClass(EhcacheStateSyncMessage.class); verify(syncChannel).synchronizeToPassive(captor.capture()); - EntityStateSyncMessage capturedSyncMessage = captor.getValue(); + EhcacheStateSyncMessage capturedSyncMessage = captor.getValue(); ServerSideConfiguration configuration = capturedSyncMessage.getConfiguration(); assertThat(configuration.getDefaultServerResource(), is("serverResource1")); assertThat(configuration.getResourcePools().keySet(), containsInAnyOrder("primary", "secondary")); diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java index 27a140905d..b54927bda0 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java @@ -72,9 +72,9 @@ public void testStateSyncMessageEncodeDecode() throws Exception { clientIds.add(clientId1); clientIds.add(clientId2); - EntityStateSyncMessage message = new EntityStateSyncMessage(serverSideConfig, storeConfigs, clientIds); + EhcacheStateSyncMessage message = new EhcacheStateSyncMessage(serverSideConfig, storeConfigs, clientIds); EhcacheSyncMessageCodec codec = new EhcacheSyncMessageCodec(); - EntityStateSyncMessage decodedMessage = (EntityStateSyncMessage) codec.decode(0, codec.encode(0, message)); + EhcacheStateSyncMessage decodedMessage = (EhcacheStateSyncMessage) codec.decode(0, codec.encode(0, message)); assertThat(decodedMessage.getConfiguration().getDefaultServerResource(), is("default-pool")); assertThat(decodedMessage.getConfiguration().getResourcePools(), is(sharedPools)); @@ -120,9 +120,9 @@ public void testStateSyncMessageEncodeDecode() throws Exception { @Test public void testDataSyncMessageEncodeDecode() throws Exception { EhcacheSyncMessageCodec codec = new EhcacheSyncMessageCodec(); - EntityDataSyncMessage message = new EntityDataSyncMessage("foo", 123L, + EhcacheDataSyncMessage message = new EhcacheDataSyncMessage("foo", 123L, getChain(true, createPayload(10L), createPayload(100L), createPayload(1000L))); - EntityDataSyncMessage decoded = (EntityDataSyncMessage) codec.decode(0, codec.encode(0, message)); + EhcacheDataSyncMessage decoded = (EhcacheDataSyncMessage) codec.decode(0, codec.encode(0, message)); assertThat(decoded.getCacheId(), is(message.getCacheId())); assertThat(decoded.getKey(), is(message.getKey())); assertThat(chainsEqual(decoded.getChain(), message.getChain()), is(true)); From e76d63525ef6c505555607d7ea7115520c5489e0 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Thu, 27 Oct 2016 11:32:43 +0200 Subject: [PATCH 145/218] :construction: #1482 Using runnel for Client -> Server Covers lifecycle, server store operations, state repository operations and invalidations answers. This also include a number of active -> passive messages for operations replication. --- clustered/client/build.gradle | 1 + .../client/internal/EhcacheClientEntity.java | 15 +- clustered/clustered-dist/build.gradle | 5 + .../common/internal/messages/ChainCodec.java | 114 +++--- .../internal/messages/EhcacheCodec.java | 81 +++-- .../internal/messages/EhcacheMessageType.java | 15 +- .../messages/EhcacheOperationMessage.java | 5 + .../messages/LifeCycleMessageCodec.java | 318 +++++++++++++++- .../messages/LifeCycleMessageFactory.java | 12 +- .../internal/messages/LifecycleMessage.java | 5 - .../internal/messages/MessageCodecUtils.java | 106 ++++++ .../messages/PassiveReplicationMessage.java | 72 +++- .../PassiveReplicationMessageCodec.java | 296 +++++++++++---- .../messages/ServerStoreMessageFactory.java | 12 +- .../internal/messages/ServerStoreOpCodec.java | 342 +++++++++++------- .../messages/ServerStoreOpMessage.java | 5 - .../messages/StateRepositoryOpCodec.java | 178 ++++++++- .../messages/StateRepositoryOpMessage.java | 5 - .../clustered/common/internal/store/Util.java | 35 +- .../internal/util/ByteBufferInputStream.java | 62 ++++ .../internal/messages/EhcacheCodecTest.java | 109 +++--- .../messages/LifeCycleMessageCodecTest.java | 234 ++++++++++++ .../PassiveReplicationMessageCodecTest.java | 105 +++++- .../messages/ServerStoreOpCodecTest.java | 87 +++-- .../clustered/TerminatedServerTest.java | 8 +- clustered/server/build.gradle | 1 + .../clustered/server/EhcacheActiveEntity.java | 5 +- .../server/EhcachePassiveEntity.java | 65 ++-- .../server/EhcacheActiveEntityTest.java | 5 +- .../server/EhcachePassiveEntityTest.java | 24 +- 30 files changed, 1778 insertions(+), 549 deletions(-) create mode 100644 clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java create mode 100644 clustered/common/src/main/java/org/ehcache/clustered/common/internal/util/ByteBufferInputStream.java create mode 100644 clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodecTest.java diff --git a/clustered/client/build.gradle b/clustered/client/build.gradle index 939af60acd..335a90ebc2 100644 --- a/clustered/client/build.gradle +++ b/clustered/client/build.gradle @@ -24,6 +24,7 @@ dependencies { compile project(':clustered:common'), "org.slf4j:slf4j-api:$parent.slf4jVersion" provided "org.terracotta:entity-client-api:$parent.entityApiVersion" provided "org.terracotta.management:monitoring-service-api:$parent.managementVersion" // provided in management-server jar + provided "org.terracotta:runnel:$parent.terracottaPlatformVersion" testCompile project(':api') testCompile project(':xml') diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java index 96ece606ba..fef0188288 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java @@ -33,6 +33,8 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.Failure; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.Type; +import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; +import org.ehcache.clustered.common.internal.messages.EhcacheOperationMessage; import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; import org.ehcache.clustered.common.internal.messages.ReconnectMessage; import org.ehcache.clustered.common.internal.messages.ReconnectMessageCodec; @@ -304,17 +306,16 @@ public void destroyCache(String name) throws ClusteredTierDestructionException, */ public EhcacheEntityResponse invoke(EhcacheEntityMessage message, boolean replicate) throws ClusterException, TimeoutException { - TimeoutDuration timeLimit; - if (message.getType() == EhcacheEntityMessage.Type.SERVER_STORE_OP - && GET_STORE_OPS.contains(getServerStoreOp(message.getOpCode()))) { - timeLimit = timeouts.getReadOperationTimeout(); - } else { - timeLimit = timeouts.getMutativeOperationTimeout(); + TimeoutDuration timeLimit = timeouts.getMutativeOperationTimeout(); + if (message instanceof EhcacheOperationMessage) { + if (GET_STORE_OPS.contains(((EhcacheOperationMessage) message).getMessageType())) { + timeLimit = timeouts.getReadOperationTimeout(); + } } return invokeInternal(timeLimit, message, replicate); } - private static final Set GET_STORE_OPS = EnumSet.of(GET); + private static final Set GET_STORE_OPS = EnumSet.of(EhcacheMessageType.GET_STORE); private EhcacheEntityResponse invokeInternal(TimeoutDuration timeLimit, EhcacheEntityMessage message, boolean replicate) throws ClusterException, TimeoutException { diff --git a/clustered/clustered-dist/build.gradle b/clustered/clustered-dist/build.gradle index 8a175b9bb1..3b5f3ebb99 100644 --- a/clustered/clustered-dist/build.gradle +++ b/clustered/clustered-dist/build.gradle @@ -31,6 +31,8 @@ ext { dependencies { compile project(':clustered:client') compile project(':clustered:common') + // Needed because declared as provided in the different projects + compile "org.terracotta:runnel:$parent.terracottaPlatformVersion" } apply plugin: 'distribution' @@ -52,6 +54,9 @@ dependencies { exclude group: 'org.terracotta.internal', module: 'tc-config-parser' } + // Needed because declared as provided in the different projects + serverLibs "org.terracotta:runnel:$parent.terracottaPlatformVersion" + kit "org.terracotta.internal:terracotta-kit:$terracottaCoreVersion@zip" shadowCompile "org.slf4j:slf4j-api:$parent.slf4jVersion" diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java index 66f8815972..cb3bdb5c38 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java @@ -19,93 +19,73 @@ import org.ehcache.clustered.common.internal.store.Chain; import org.ehcache.clustered.common.internal.store.Element; import org.ehcache.clustered.common.internal.store.SequencedElement; +import org.ehcache.clustered.common.internal.store.Util; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.StructBuilder; +import org.terracotta.runnel.decoding.StructArrayDecoder; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.StructArrayEncoder; +import org.terracotta.runnel.encoding.StructEncoder; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; -import static org.ehcache.clustered.common.internal.store.Util.getElement; -import static org.ehcache.clustered.common.internal.store.Util.getChain; - public class ChainCodec { - private static final byte NON_SEQUENCED_CHAIN = 0; - private static final byte SEQUENCED_CHAIN = 1; - private static final byte SEQ_NUM_OFFSET = 8; - private static final byte ELEMENT_PAYLOAD_OFFSET = 4; + private static final Struct ELEMENT_STRUCT = StructBuilder.newStructBuilder() + .int64("sequence", 10) + .byteBuffer("payload", 20) + .build(); + + static final Struct CHAIN_STRUCT = StructBuilder.newStructBuilder() + .structs("elements", 10, ELEMENT_STRUCT) + .build(); - //TODO: optimize too many bytebuffer allocation public byte[] encode(Chain chain) { - ByteBuffer msg = null; - boolean firstIteration = true ; + StructEncoder encoder = CHAIN_STRUCT.encoder(); + + encode(encoder, chain); + + ByteBuffer byteBuffer = encoder.encode(); + return byteBuffer.array(); + } + + public void encode(StructEncoder encoder, Chain chain) { + StructArrayEncoder elementsEncoder = encoder.structs("elements"); for (Element element : chain) { - if (firstIteration) { - firstIteration = false; - ByteBuffer buffer = ByteBuffer.allocate(1); - if (element instanceof SequencedElement) { - buffer.put(SEQUENCED_CHAIN); - } else { - buffer.put(NON_SEQUENCED_CHAIN); - } - buffer.flip(); - msg = combine(buffer, encodeElement(element)); - continue; + if (element instanceof SequencedElement) { + elementsEncoder.int64("sequence", ((SequencedElement) element).getSequenceNumber()); } - if (msg == null) { - throw new IllegalArgumentException("Message cannot be null"); - } - msg = combine(msg, encodeElement(element)); + elementsEncoder.byteBuffer("payload", element.getPayload()); + elementsEncoder.next(); } - return msg != null ? msg.array() : new byte[0]; } public Chain decode(byte[] payload) { + StructDecoder decoder = CHAIN_STRUCT.decoder(ByteBuffer.wrap(payload)); + return decode(decoder); + } + + public Chain decode(StructDecoder decoder) { + StructArrayDecoder elementsDecoder = decoder.structs("elements"); + final List elements = new ArrayList(); - if (payload.length != 0) { - ByteBuffer buffer = ByteBuffer.wrap(payload); - boolean isSequenced = buffer.get() == 1; - if (isSequenced) { - while (buffer.hasRemaining()) { - long sequence = buffer.getLong(); - elements.add(getElement(sequence, getElementPayLoad(buffer))); - } + for (int i = 0; i < elementsDecoder.length(); i++) { + Long sequence = elementsDecoder.int64("sequence"); + ByteBuffer byteBuffer = elementsDecoder.byteBuffer("payload"); + Element element; + if (sequence != null) { + element = Util.getElement(sequence, byteBuffer); } else { - while (buffer.hasRemaining()) { - elements.add(getElement(getElementPayLoad(buffer))); - } + element = Util.getElement(byteBuffer); } + elements.add(element); + elementsDecoder.next(); } - return getChain(elements); - } - private static ByteBuffer combine(ByteBuffer buffer1, ByteBuffer buffer2) { - ByteBuffer byteBuffer = ByteBuffer.allocate(buffer1.remaining() + buffer2.remaining()); - byteBuffer.put(buffer1); - byteBuffer.put(buffer2); - byteBuffer.flip(); - return byteBuffer; - } - - private static ByteBuffer encodeElement(Element element) { - ByteBuffer buffer = null; - if (element instanceof SequencedElement) { - buffer = ByteBuffer.allocate(SEQ_NUM_OFFSET + ELEMENT_PAYLOAD_OFFSET + element.getPayload().remaining()); - buffer.putLong(((SequencedElement)element).getSequenceNumber()); - } else { - buffer = ByteBuffer.allocate(ELEMENT_PAYLOAD_OFFSET + element.getPayload().remaining()); - } - buffer.putInt(element.getPayload().remaining()); - buffer.put(element.getPayload()); - buffer.flip(); - return buffer; - } + elementsDecoder.end(); - private static ByteBuffer getElementPayLoad(ByteBuffer buffer) { - int payloadSize = buffer.getInt(); - buffer.limit(buffer.position() + payloadSize); - ByteBuffer elementPayload = buffer.slice(); - buffer.position(buffer.limit()); - buffer.limit(buffer.capacity()); - return elementPayload; + return Util.getChain(elements); } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java index 98e5d20e65..b5c2ff065b 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java @@ -16,17 +16,31 @@ package org.ehcache.clustered.common.internal.messages; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.terracotta.entity.MessageCodec; import org.terracotta.entity.MessageCodecException; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.decoding.Enm; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage.Type.LIFECYCLE_OP; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage.Type.REPLICATION_OP; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage.Type.SERVER_STORE_OP; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage.Type.STATE_REPO_OP; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage.Type.SYNC_OP; +import java.nio.ByteBuffer; + +import static java.nio.ByteBuffer.wrap; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.EHCACHE_MESSAGE_TYPES_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_INDEX; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isLifecycleMessage; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isPassiveSynchroMessage; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStateRepoOperationMessage; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStoreOperationMessage; +import static org.terracotta.runnel.StructBuilder.newStructBuilder; public class EhcacheCodec implements MessageCodec { + private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheCodec.class); + + static final Struct OP_CODE_DECODER = newStructBuilder().enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING).build(); + private static final MessageCodec SERVER_INSTANCE = new EhcacheCodec(new ServerStoreOpCodec(), new LifeCycleMessageCodec(), new StateRepositoryOpCodec(), new ResponseCodec(), new PassiveReplicationMessageCodec()); @@ -51,33 +65,48 @@ public static MessageCodec messageC @Override public byte[] encodeMessage(EhcacheEntityMessage message) { - switch (message.getType()) { - case LIFECYCLE_OP: - return lifeCycleMessageCodec.encode((LifecycleMessage)message); - case SERVER_STORE_OP: - return serverStoreOpCodec.encode((ServerStoreOpMessage) message); - case STATE_REPO_OP: - return stateRepositoryOpCodec.encode((StateRepositoryOpMessage) message); - case REPLICATION_OP: - return passiveReplicationMessageCodec.encode((PassiveReplicationMessage)message); - default: - throw new IllegalArgumentException("Undefined message type: " + message.getType()); + if (!(message instanceof EhcacheOperationMessage)) { + throw new AssertionError("Unexpected message type " + message.getClass()); } + EhcacheOperationMessage operationMessage = (EhcacheOperationMessage) message; + if (isLifecycleMessage(operationMessage.getMessageType())) { + return lifeCycleMessageCodec.encode((LifecycleMessage) operationMessage); + } else if (isStoreOperationMessage(operationMessage.getMessageType())) { + return serverStoreOpCodec.encode((ServerStoreOpMessage) operationMessage); + } else if (isStateRepoOperationMessage(operationMessage.getMessageType())) { + return stateRepositoryOpCodec.encode((StateRepositoryOpMessage) operationMessage); + } else if (isPassiveSynchroMessage(operationMessage.getMessageType())) { + return passiveReplicationMessageCodec.encode((PassiveReplicationMessage) operationMessage); + } + throw new AssertionError("Unknown message type: " + operationMessage.getMessageType()); } @Override public EhcacheEntityMessage decodeMessage(byte[] payload) throws MessageCodecException { - byte opCode = payload[0]; - if (opCode <= LIFECYCLE_OP.getCode()) { - return lifeCycleMessageCodec.decode(payload); - } else if (opCode <= SERVER_STORE_OP.getCode()) { - return serverStoreOpCodec.decode(payload); - } else if (opCode <= STATE_REPO_OP.getCode()) { - return stateRepositoryOpCodec.decode(payload); - } else if (opCode > SYNC_OP.getCode() && opCode <= REPLICATION_OP.getCode()) { - return passiveReplicationMessageCodec.decode(payload); + ByteBuffer byteBuffer = wrap(payload); + Enm opCodeEnm = OP_CODE_DECODER.decoder(byteBuffer).enm("opCode"); + + if (!opCodeEnm.isFound()) { + throw new AssertionError("Got a message without an opCode"); + } + if (!opCodeEnm.isValid()) { + LOGGER.warn("Received message with unknown operation code - more recent version at the other end?"); + return null; + } + + byteBuffer.rewind(); + + EhcacheMessageType opCode = opCodeEnm.get(); + if (isLifecycleMessage(opCode)) { + return lifeCycleMessageCodec.decode(opCode, byteBuffer); + } else if (isStoreOperationMessage(opCode)) { + return serverStoreOpCodec.decode(opCode, byteBuffer); + } else if (isStateRepoOperationMessage(opCode)) { + return stateRepositoryOpCodec.decode(opCode, byteBuffer); + } else if (isPassiveSynchroMessage(opCode)) { + return passiveReplicationMessageCodec.decode(opCode, byteBuffer); } else { - throw new UnsupportedOperationException("Undefined message code: " + opCode); + throw new UnsupportedOperationException("Undefined message code: " + opCodeEnm); } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheMessageType.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheMessageType.java index 04b278e4f0..1ebcdc69f3 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheMessageType.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheMessageType.java @@ -25,6 +25,8 @@ /** * EhcacheMessageType + * + * Whenever you edit this, you must think about enum mapping and helper methods */ public enum EhcacheMessageType { // Lifecycle messages @@ -48,9 +50,14 @@ public enum EhcacheMessageType { PUT_IF_ABSENT, ENTRY_SET, + // TODO server to server only, should not exist in common // Passive synchronization messages CHAIN_REPLICATION_OP, - CLIENT_ID_TRACK_OP; + CLIENT_ID_TRACK_OP, + CLEAR_INVALIDATION_COMPLETE, + INVALIDATION_COMPLETE, + CREATE_SERVER_STORE_REPLICATION, + DESTROY_SERVER_STORE_REPLICATION; public static final String MESSAGE_TYPE_FIELD_NAME = "opCode"; public static final int MESSAGE_TYPE_FIELD_INDEX = 10; @@ -75,6 +82,10 @@ public enum EhcacheMessageType { .mapping(CHAIN_REPLICATION_OP, 61) .mapping(CLIENT_ID_TRACK_OP, 62) + .mapping(CLEAR_INVALIDATION_COMPLETE, 63) + .mapping(INVALIDATION_COMPLETE, 64) + .mapping(CREATE_SERVER_STORE_REPLICATION, 65) + .mapping(DESTROY_SERVER_STORE_REPLICATION, 66) .build(); public static final EnumSet LIFECYCLE_MESSAGES = of(CONFIGURE, VALIDATE, CREATE_SERVER_STORE, VALIDATE_SERVER_STORE, RELEASE_SERVER_STORE, DESTROY_SERVER_STORE); @@ -92,7 +103,7 @@ public static boolean isStateRepoOperationMessage(EhcacheMessageType value) { return STATE_REPO_OPERATION_MESSAGES.contains(value); } - public static final EnumSet PASSIVE_SYNC_MESSAGES = of(CHAIN_REPLICATION_OP, CLIENT_ID_TRACK_OP); + public static final EnumSet PASSIVE_SYNC_MESSAGES = of(CHAIN_REPLICATION_OP, CLIENT_ID_TRACK_OP, CLEAR_INVALIDATION_COMPLETE, INVALIDATION_COMPLETE, CREATE_SERVER_STORE_REPLICATION, DESTROY_SERVER_STORE_REPLICATION); public static boolean isPassiveSynchroMessage(EhcacheMessageType value) { return PASSIVE_SYNC_MESSAGES.contains(value); } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheOperationMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheOperationMessage.java index 57031d1128..c2e2893912 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheOperationMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheOperationMessage.java @@ -22,4 +22,9 @@ public abstract class EhcacheOperationMessage extends EhcacheEntityMessage { public abstract EhcacheMessageType getMessageType(); + + @Override + public String toString() { + return getMessageType().toString(); + } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java index 9b3de64990..a480918e19 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java @@ -16,37 +16,319 @@ package org.ehcache.clustered.common.internal.messages; -import org.ehcache.clustered.common.internal.store.Util; +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.decoding.StructArrayDecoder; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.StructArrayEncoder; +import org.terracotta.runnel.encoding.StructEncoder; import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; + +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.EHCACHE_MESSAGE_TYPES_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_INDEX; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.CONSISTENCY_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.LSB_UUID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSB_UUID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSG_ID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.POOL_RESOURCE_NAME_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.POOL_SIZE_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.STORE_CONFIG_CONSISTENCY_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.STORE_CONFIG_KEY_TYPE_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.STORE_CONFIG_VALUE_TYPE_FIELD; +import static org.terracotta.runnel.StructBuilder.newStructBuilder; class LifeCycleMessageCodec { - private static final byte OPCODE_SIZE = 1; + private static final String CONFIG_PRESENT_FIELD = "configPresent"; + private static final String DEFAULT_RESOURCE_FIELD = "defaultResource"; + private static final String POOLS_SUB_STRUCT = "pools"; + private static final String POOL_NAME_FIELD = "poolName"; + + private static final Struct POOLS_STRUCT = newStructBuilder() + .string(POOL_NAME_FIELD, 10) + .int64(POOL_SIZE_FIELD, 20) + .string(POOL_RESOURCE_NAME_FIELD, 30).build(); + + private static final Struct CONFIGURE_MESSAGE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .bool(CONFIG_PRESENT_FIELD, 30) + .string(DEFAULT_RESOURCE_FIELD, 40) + .structs(POOLS_SUB_STRUCT, 50, POOLS_STRUCT) + .build(); + + private static final Struct VALIDATE_MESSAGE_STRUCT = CONFIGURE_MESSAGE_STRUCT; + + private static final Struct CREATE_STORE_MESSAGE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30) + .string(STORE_CONFIG_KEY_TYPE_FIELD, 40) + .string(STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD, 41) + .string(STORE_CONFIG_VALUE_TYPE_FIELD, 45) + .string(STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD, 46) + .enm(STORE_CONFIG_CONSISTENCY_FIELD, 50, CONSISTENCY_ENUM_MAPPING) + .int64(POOL_SIZE_FIELD, 60) + .string(POOL_RESOURCE_NAME_FIELD, 65) + .build(); + + private static final Struct VALIDATE_STORE_MESSAGE_STRUCT = CREATE_STORE_MESSAGE_STRUCT; + + private static final Struct DESTROY_STORE_MESSAGE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30) + .build(); + + private static final Struct RELEASE_STORE_MESSAGE_STRUCTU = DESTROY_STORE_MESSAGE_STRUCT; + + private final MessageCodecUtils messageCodecUtils = new MessageCodecUtils(); public byte[] encode(LifecycleMessage message) { //For configure message id serves as message creation timestamp if (message instanceof LifecycleMessage.ConfigureStoreManager) { message.setId(System.nanoTime()); } - byte[] encodedMsg = Util.marshall(message); - ByteBuffer buffer = ByteBuffer.allocate(OPCODE_SIZE + encodedMsg.length); - buffer.put(message.getOpCode()); - buffer.put(encodedMsg); - return buffer.array(); - } - - public EhcacheEntityMessage decode(byte[] payload) { - ByteBuffer message = ByteBuffer.wrap(payload); - byte[] encodedMsg = new byte[message.capacity() - OPCODE_SIZE]; - byte opCode = message.get(); - if (opCode == EhcacheEntityMessage.Type.LIFECYCLE_OP.getCode()) { - message.get(encodedMsg, 0, encodedMsg.length); - EhcacheEntityMessage entityMessage = (EhcacheEntityMessage) Util.unmarshall(encodedMsg); - return entityMessage; + + switch (message.getMessageType()) { + case CONFIGURE: + return encodeTierManagerConfigureMessage((LifecycleMessage.ConfigureStoreManager) message); + case VALIDATE: + return encodeTierManagerValidateMessage((LifecycleMessage.ValidateStoreManager) message); + case CREATE_SERVER_STORE: + return encodeCreateStoreMessage((LifecycleMessage.CreateServerStore) message); + case VALIDATE_SERVER_STORE: + return encodeValidateStoreMessage((LifecycleMessage.ValidateServerStore) message); + case DESTROY_SERVER_STORE: + return encodeDestroyStoreMessage((LifecycleMessage.DestroyServerStore) message); + case RELEASE_SERVER_STORE: + return encodeReleaseStoreMessage((LifecycleMessage.ReleaseServerStore) message); + default: + throw new IllegalArgumentException("Unknown lifecycle message: " + message.getClass()); + } + } + + private byte[] encodeReleaseStoreMessage(LifecycleMessage.ReleaseServerStore message) { + StructEncoder encoder = RELEASE_STORE_MESSAGE_STRUCTU.encoder(); + + messageCodecUtils.encodeMandatoryFields(encoder, message); + encoder.string(SERVER_STORE_NAME_FIELD, message.getName()); + return encoder.encode().array(); + } + + private byte[] encodeDestroyStoreMessage(LifecycleMessage.DestroyServerStore message) { + StructEncoder encoder = DESTROY_STORE_MESSAGE_STRUCT.encoder(); + + messageCodecUtils.encodeMandatoryFields(encoder, message); + encoder.string(SERVER_STORE_NAME_FIELD, message.getName()); + return encoder.encode().array(); + } + + private byte[] encodeCreateStoreMessage(LifecycleMessage.CreateServerStore message) { + StructEncoder encoder = CREATE_STORE_MESSAGE_STRUCT.encoder(); + return encodeBaseServerStoreMessage(message, encoder); + } + + private byte[] encodeValidateStoreMessage(LifecycleMessage.ValidateServerStore message) { + return encodeBaseServerStoreMessage(message, VALIDATE_STORE_MESSAGE_STRUCT.encoder()); + } + + private byte[] encodeBaseServerStoreMessage(LifecycleMessage.BaseServerStore message, StructEncoder encoder) { + messageCodecUtils.encodeMandatoryFields(encoder, message); + + encoder.string(SERVER_STORE_NAME_FIELD, message.getName()); + messageCodecUtils.encodeServerStoreConfiguration(encoder, message.getStoreConfiguration()); + return encoder.encode().array(); + } + + private byte[] encodeTierManagerConfigureMessage(LifecycleMessage.ConfigureStoreManager message) { + return encodeTierManagerCreateOrValidate(message, message.getConfiguration(), CONFIGURE_MESSAGE_STRUCT.encoder()); + } + + private byte[] encodeTierManagerValidateMessage(LifecycleMessage.ValidateStoreManager message) { + return encodeTierManagerCreateOrValidate(message, message.getConfiguration(), VALIDATE_MESSAGE_STRUCT.encoder()); + } + + private byte[] encodeTierManagerCreateOrValidate(LifecycleMessage message, ServerSideConfiguration config, StructEncoder encoder) { + messageCodecUtils.encodeMandatoryFields(encoder, message); + encodeServerSideConfiguration(encoder, config); + return encoder.encode().array(); + } + + private void encodeServerSideConfiguration(StructEncoder encoder, ServerSideConfiguration configuration) { + if (configuration == null) { + encoder.bool(CONFIG_PRESENT_FIELD, false); + } else { + encoder.bool(CONFIG_PRESENT_FIELD, true); + if (configuration.getDefaultServerResource() != null) { + encoder.string(DEFAULT_RESOURCE_FIELD, configuration.getDefaultServerResource()); + } + + if (!configuration.getResourcePools().isEmpty()) { + StructArrayEncoder poolsEncoder = encoder.structs(POOLS_SUB_STRUCT); + for (Map.Entry poolEntry : configuration.getResourcePools().entrySet()) { + poolsEncoder.string(POOL_NAME_FIELD, poolEntry.getKey()) + .int64(POOL_SIZE_FIELD, poolEntry.getValue().getSize()); + if (poolEntry.getValue().getServerResource() != null) { + poolsEncoder.string(POOL_RESOURCE_NAME_FIELD, poolEntry.getValue().getServerResource()); + } + poolsEncoder.next(); + } + poolsEncoder.end(); + } + } + } + + private ServerSideConfiguration decodeServerSideConfiguration(StructDecoder decoder) { + boolean configPresent = decoder.bool(CONFIG_PRESENT_FIELD); + + if (configPresent) { + String defaultResource = decoder.string(DEFAULT_RESOURCE_FIELD); + + HashMap resourcePools = new HashMap(); + StructArrayDecoder poolStructs = decoder.structs(POOLS_SUB_STRUCT); + if (poolStructs != null) { + for (int i = 0; i < poolStructs.length(); i++) { + String poolName = poolStructs.string(POOL_NAME_FIELD); + Long poolSize = poolStructs.int64(POOL_SIZE_FIELD); + String poolResourceName = poolStructs.string(POOL_RESOURCE_NAME_FIELD); + if (poolResourceName == null) { + resourcePools.put(poolName, new ServerSideConfiguration.Pool(poolSize)); + } else { + resourcePools.put(poolName, new ServerSideConfiguration.Pool(poolSize, poolResourceName)); + } + poolStructs.next(); + } + } + + ServerSideConfiguration serverSideConfiguration; + if (defaultResource == null) { + serverSideConfiguration = new ServerSideConfiguration(resourcePools); + } else { + serverSideConfiguration = new ServerSideConfiguration(defaultResource, resourcePools); + } + return serverSideConfiguration; } else { - throw new IllegalArgumentException("LifeCycleMessage operation not defined for : " + opCode); + return null; + } + } + + public EhcacheEntityMessage decode(EhcacheMessageType messageType, ByteBuffer messageBuffer) { + + switch (messageType) { + case CONFIGURE: + return decodeConfigureMessage(messageBuffer); + case VALIDATE: + return decodeValidateMessage(messageBuffer); + case CREATE_SERVER_STORE: + return decodeCreateServerStoreMessage(messageBuffer); + case VALIDATE_SERVER_STORE: + return decodeValidateServerStoreMessage(messageBuffer); + case DESTROY_SERVER_STORE: + return decodeDestroyServerStoreMessage(messageBuffer); + case RELEASE_SERVER_STORE: + return decodeReleaseServerStoreMessage(messageBuffer); + } + throw new IllegalArgumentException("LifeCycleMessage operation not defined for : " + messageType); + } + + private LifecycleMessage.ReleaseServerStore decodeReleaseServerStoreMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = RELEASE_STORE_MESSAGE_STRUCTU.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID cliendId = messageCodecUtils.decodeUUID(decoder); + + String storeName = decoder.string(SERVER_STORE_NAME_FIELD); + + LifecycleMessage.ReleaseServerStore message = new LifecycleMessage.ReleaseServerStore(storeName, cliendId); + message.setId(msgId); + return message; + } + + private LifecycleMessage.DestroyServerStore decodeDestroyServerStoreMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = DESTROY_STORE_MESSAGE_STRUCT.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID cliendId = messageCodecUtils.decodeUUID(decoder); + + String storeName = decoder.string(SERVER_STORE_NAME_FIELD); + + LifecycleMessage.DestroyServerStore message = new LifecycleMessage.DestroyServerStore(storeName, cliendId); + message.setId(msgId); + return message; + } + + private LifecycleMessage.ValidateServerStore decodeValidateServerStoreMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = VALIDATE_STORE_MESSAGE_STRUCT.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID cliendId = messageCodecUtils.decodeUUID(decoder); + + String storeName = decoder.string(SERVER_STORE_NAME_FIELD); + ServerStoreConfiguration config = messageCodecUtils.decodeServerStoreConfiguration(decoder); + + LifecycleMessage.ValidateServerStore message = new LifecycleMessage.ValidateServerStore(storeName, config, cliendId); + message.setId(msgId); + return message; + } + + private LifecycleMessage.CreateServerStore decodeCreateServerStoreMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = CREATE_STORE_MESSAGE_STRUCT.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID cliendId = messageCodecUtils.decodeUUID(decoder); + + String storeName = decoder.string(SERVER_STORE_NAME_FIELD); + ServerStoreConfiguration config = messageCodecUtils.decodeServerStoreConfiguration(decoder); + + LifecycleMessage.CreateServerStore message = new LifecycleMessage.CreateServerStore(storeName, config, cliendId); + message.setId(msgId); + return message; + } + + private LifecycleMessage.ValidateStoreManager decodeValidateMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = VALIDATE_MESSAGE_STRUCT.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID cliendId = messageCodecUtils.decodeUUID(decoder); + + ServerSideConfiguration config = decodeServerSideConfiguration(decoder); + + LifecycleMessage.ValidateStoreManager message = new LifecycleMessage.ValidateStoreManager(config, cliendId); + if (msgId != null) { + message.setId(msgId); + } + return message; + } + + private LifecycleMessage.ConfigureStoreManager decodeConfigureMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = CONFIGURE_MESSAGE_STRUCT.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID clientId = messageCodecUtils.decodeUUID(decoder); + + ServerSideConfiguration config = decodeServerSideConfiguration(decoder); + + LifecycleMessage.ConfigureStoreManager message = new LifecycleMessage.ConfigureStoreManager(config, clientId); + if (msgId != null) { + message.setId(msgId); } + return message; } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageFactory.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageFactory.java index c099cdbfd7..fe4c79ec61 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageFactory.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageFactory.java @@ -25,27 +25,27 @@ public class LifeCycleMessageFactory { private UUID clientId; - public EhcacheEntityMessage validateStoreManager(ServerSideConfiguration configuration){ + public LifecycleMessage validateStoreManager(ServerSideConfiguration configuration){ return new LifecycleMessage.ValidateStoreManager(configuration, clientId); } - public EhcacheEntityMessage configureStoreManager(ServerSideConfiguration configuration) { + public LifecycleMessage configureStoreManager(ServerSideConfiguration configuration) { return new LifecycleMessage.ConfigureStoreManager(configuration, clientId); } - public EhcacheEntityMessage createServerStore(String name, ServerStoreConfiguration serverStoreConfiguration) { + public LifecycleMessage createServerStore(String name, ServerStoreConfiguration serverStoreConfiguration) { return new LifecycleMessage.CreateServerStore(name, serverStoreConfiguration, clientId); } - public EhcacheEntityMessage validateServerStore(String name, ServerStoreConfiguration serverStoreConfiguration) { + public LifecycleMessage validateServerStore(String name, ServerStoreConfiguration serverStoreConfiguration) { return new LifecycleMessage.ValidateServerStore(name, serverStoreConfiguration, clientId); } - public EhcacheEntityMessage releaseServerStore(String name) { + public LifecycleMessage releaseServerStore(String name) { return new LifecycleMessage.ReleaseServerStore(name, clientId); } - public EhcacheEntityMessage destroyServerStore(String name) { + public LifecycleMessage destroyServerStore(String name) { return new LifecycleMessage.DestroyServerStore(name, clientId); } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java index e3f783c114..27adffc26e 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java @@ -66,11 +66,6 @@ public Type getType() { public abstract LifeCycleOp operation(); - @Override - public String toString() { - return getType() + "#" + operation(); - } - public static class ValidateStoreManager extends LifecycleMessage { private static final long serialVersionUID = 5742152283115139745L; diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java new file mode 100644 index 0000000000..29fa43c7ae --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java @@ -0,0 +1,106 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.PoolAllocation; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.terracotta.runnel.EnumMapping; +import org.terracotta.runnel.decoding.Enm; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.StructEncoder; + +import java.util.UUID; + +import static org.terracotta.runnel.EnumMappingBuilder.newEnumMappingBuilder; + +/** + * MessageCodecUtils + */ +class MessageCodecUtils { + + static final String MSG_ID_FIELD = "msgId"; + static final String LSB_UUID_FIELD = "lsbUUID"; + static final String MSB_UUID_FIELD = "msbUUID"; + static final String SERVER_STORE_NAME_FIELD = "serverStoreName"; + static final String KEY_FIELD = "key"; + static final String STORE_CONFIG_KEY_TYPE_FIELD = "keyType"; + static final String STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD = "keySerializerType"; + static final String STORE_CONFIG_VALUE_TYPE_FIELD = "valueType"; + static final String STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD = "valueSerializerType"; + static final String STORE_CONFIG_CONSISTENCY_FIELD = "consistency"; + static final String POOL_SIZE_FIELD = "poolSize"; + static final String POOL_RESOURCE_NAME_FIELD = "resourceName"; + + static final EnumMapping CONSISTENCY_ENUM_MAPPING = newEnumMappingBuilder(Consistency.class) + .mapping(Consistency.EVENTUAL, 1) + .mapping(Consistency.STRONG, 2) + .build(); + + void encodeMandatoryFields(StructEncoder encoder, EhcacheOperationMessage message) { + encoder.enm(EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) + .int64(MSG_ID_FIELD, message.getId()) + .int64(MSB_UUID_FIELD, message.getClientId().getMostSignificantBits()) + .int64(LSB_UUID_FIELD, message.getClientId().getLeastSignificantBits()); + } + + UUID decodeUUID(StructDecoder decoder) { + return new UUID(decoder.int64(MSB_UUID_FIELD), decoder.int64(LSB_UUID_FIELD)); + } + + void encodeServerStoreConfiguration(StructEncoder encoder, ServerStoreConfiguration configuration) { + encoder.string(STORE_CONFIG_KEY_TYPE_FIELD, configuration.getStoredKeyType()) + .string(STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD, configuration.getKeySerializerType()) + .string(STORE_CONFIG_VALUE_TYPE_FIELD, configuration.getStoredValueType()) + .string(STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD, configuration.getValueSerializerType()); + if (configuration.getConsistency() != null) { + encoder.enm(STORE_CONFIG_CONSISTENCY_FIELD, configuration.getConsistency()); + } + + PoolAllocation poolAllocation = configuration.getPoolAllocation(); + if (poolAllocation instanceof PoolAllocation.Dedicated) { + PoolAllocation.Dedicated dedicatedPool = (PoolAllocation.Dedicated) poolAllocation; + encoder.int64(POOL_SIZE_FIELD, dedicatedPool.getSize()); + if (dedicatedPool.getResourceName() != null) { + encoder.string(POOL_RESOURCE_NAME_FIELD, dedicatedPool.getResourceName()); + } + } else if (poolAllocation instanceof PoolAllocation.Shared) { + encoder.string(POOL_RESOURCE_NAME_FIELD, ((PoolAllocation.Shared) poolAllocation).getResourcePoolName()); + } + } + + ServerStoreConfiguration decodeServerStoreConfiguration(StructDecoder decoder) { + String keyType = decoder.string(STORE_CONFIG_KEY_TYPE_FIELD); + String keySerializer = decoder.string(STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD); + String valueType = decoder.string(STORE_CONFIG_VALUE_TYPE_FIELD); + String valueSerializer = decoder.string(STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD); + Enm consistencyEnm = decoder.enm(STORE_CONFIG_CONSISTENCY_FIELD); + Consistency consistency = Consistency.EVENTUAL; + if (consistencyEnm.isValid()) { + consistency = consistencyEnm.get(); + } + Long poolSize = decoder.int64(POOL_SIZE_FIELD); + String poolResource = decoder.string(POOL_RESOURCE_NAME_FIELD); + PoolAllocation poolAllocation = new PoolAllocation.Unknown(); + if (poolSize != null) { + poolAllocation = new PoolAllocation.Dedicated(poolResource, poolSize); + } else if (poolResource != null) { + poolAllocation = new PoolAllocation.Shared(poolResource); + } + return new ServerStoreConfiguration(poolAllocation, keyType, valueType, null, null, keySerializer, valueSerializer, consistency); + } +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java index 42d83731e8..574c4919b3 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java @@ -16,6 +16,7 @@ package org.ehcache.clustered.common.internal.messages; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.store.Chain; import java.util.UUID; @@ -72,11 +73,6 @@ public byte getOpCode() { return operation().getReplicationOpCode(); } - @Override - public EhcacheMessageType getMessageType() { - return EhcacheMessageType.CLIENT_ID_TRACK_OP; - } - @Override public void setId(long id) { throw new UnsupportedOperationException("This method is not supported on replication message"); @@ -103,6 +99,11 @@ public long getId() { public UUID getClientId() { return clientId; } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.CLIENT_ID_TRACK_OP; + } } public static class ChainReplicationMessage extends ClientIDTrackerMessage implements ConcurrentEntityMessage { @@ -172,6 +173,11 @@ public ReplicationOp operation() { return ReplicationOp.CLEAR_INVALIDATION_COMPLETE; } + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.CLEAR_INVALIDATION_COMPLETE; + } + public String getCacheId() { return cacheId; } @@ -195,27 +201,65 @@ public ReplicationOp operation() { return ReplicationOp.INVALIDATION_COMPLETE; } + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.INVALIDATION_COMPLETE; + } + public long getKey() { return key; } } - public static class ServerStoreLifeCycleReplicationMessage extends ClientIDTrackerMessage { + public static class CreateServerStoreReplicationMessage extends ClientIDTrackerMessage { + + private final String storeName; + private final ServerStoreConfiguration storeConfiguration; + + public CreateServerStoreReplicationMessage(LifecycleMessage.CreateServerStore createMessage) { + this(createMessage.getId(), createMessage.getClientId(), createMessage.getName(), createMessage.getStoreConfiguration()); + } + + public CreateServerStoreReplicationMessage(long msgId, UUID clientId, String storeName, ServerStoreConfiguration configuration) { + super(msgId, clientId); + this.storeName = storeName; + this.storeConfiguration = configuration; + } + + public String getStoreName() { + return storeName; + } + + public ServerStoreConfiguration getStoreConfiguration() { + return storeConfiguration; + } + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.CREATE_SERVER_STORE_REPLICATION; + } + } + + public static class DestroyServerStoreReplicationMessage extends ClientIDTrackerMessage { - private final LifecycleMessage message; + private final String storeName; - public ServerStoreLifeCycleReplicationMessage(LifecycleMessage message) { - super(message.getId(), message.getClientId()); - this.message = message; + public DestroyServerStoreReplicationMessage(LifecycleMessage.DestroyServerStore destroyMessage) { + this(destroyMessage.getId(), destroyMessage.getClientId(), destroyMessage.getName()); } - public LifecycleMessage getMessage() { - return message; + public DestroyServerStoreReplicationMessage(long msgId, UUID clientId, String storeName) { + super(msgId, clientId); + this.storeName = storeName; + } + + public String getStoreName() { + return storeName; } @Override - public ReplicationOp operation() { - return ReplicationOp.SERVER_STORE_LIFECYCLE_REPLICATION_OP; + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.DESTROY_SERVER_STORE_REPLICATION; } } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodec.java index 6019107789..65f724e22e 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodec.java @@ -16,111 +16,251 @@ package org.ehcache.clustered.common.internal.messages; -import org.ehcache.clustered.common.internal.ClusteredEhcacheIdentity; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.common.internal.store.Util; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.StructEncoder; import java.nio.ByteBuffer; import java.util.UUID; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.EHCACHE_MESSAGE_TYPES_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_INDEX; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.CONSISTENCY_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.KEY_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.LSB_UUID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSB_UUID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSG_ID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.POOL_RESOURCE_NAME_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.POOL_SIZE_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.STORE_CONFIG_CONSISTENCY_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.STORE_CONFIG_KEY_TYPE_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.STORE_CONFIG_VALUE_TYPE_FIELD; +import static org.terracotta.runnel.StructBuilder.newStructBuilder; + +// TODO move all this to server side - no use in common class PassiveReplicationMessageCodec { - private static final byte OP_CODE_SIZE = 1; - private static final byte CACHE_ID_LEN_SIZE = 4; - private static final byte KEY_SIZE = 8; - private static final byte MESSAGE_ID_SIZE = 24; + private static final String CHAIN_FIELD = "chain"; + + private static final Struct CLIENT_ID_TRACK_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .build(); + + private static final Struct CHAIN_REPLICATION_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30) + .int64(KEY_FIELD, 40) + .struct(CHAIN_FIELD, 45, ChainCodec.CHAIN_STRUCT) + .build(); + + private static final Struct CLEAR_INVALIDATION_COMPLETE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .string(SERVER_STORE_NAME_FIELD, 20) + .build(); - private ChainCodec chainCodec = new ChainCodec(); + private static final Struct INVALIDATION_COMPLETE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .string(SERVER_STORE_NAME_FIELD, 20) + .int64(KEY_FIELD, 30) + .build(); + + private static final Struct CREATE_SERVER_STORE_REPLICATION_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30) + .string(STORE_CONFIG_KEY_TYPE_FIELD, 40) + .string(STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD, 41) + .string(STORE_CONFIG_VALUE_TYPE_FIELD, 45) + .string(STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD, 46) + .enm(STORE_CONFIG_CONSISTENCY_FIELD, 50, CONSISTENCY_ENUM_MAPPING) + .int64(POOL_SIZE_FIELD, 60) + .string(POOL_RESOURCE_NAME_FIELD, 65) + .build(); + + private static final Struct DESTROY_SERVER_STORE_REPLICATION_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30) + .build(); + + private final ChainCodec chainCodec = new ChainCodec(); + private final MessageCodecUtils messageCodecUtils = new MessageCodecUtils(); public byte[] encode(PassiveReplicationMessage message) { - ByteBuffer encodedMsg; - switch (message.operation()) { - case CLIENTID_TRACK_OP: - encodedMsg = ByteBuffer.allocate(OP_CODE_SIZE + MESSAGE_ID_SIZE); - encodedMsg.put(message.getOpCode()); - encodedMsg.put(ClusteredEhcacheIdentity.serialize(message.getClientId())); - encodedMsg.putLong(message.getId()); - return encodedMsg.array(); + switch (message.getMessageType()) { + case CLIENT_ID_TRACK_OP: + return encodeClientIdTrackMessage((PassiveReplicationMessage.ClientIDTrackerMessage) message); case CHAIN_REPLICATION_OP: - PassiveReplicationMessage.ChainReplicationMessage chainReplicationMessage = (PassiveReplicationMessage.ChainReplicationMessage)message; - byte[] encodedChain = chainCodec.encode(chainReplicationMessage.getChain()); - int cacheIdLen = chainReplicationMessage.getCacheId().length(); - encodedMsg = ByteBuffer.allocate(OP_CODE_SIZE + CACHE_ID_LEN_SIZE + KEY_SIZE + MESSAGE_ID_SIZE + encodedChain.length + 2 * cacheIdLen); - encodedMsg.put(chainReplicationMessage.getOpCode()); - encodedMsg.put(ClusteredEhcacheIdentity.serialize(chainReplicationMessage.getClientId())); - encodedMsg.putLong(chainReplicationMessage.getId()); - encodedMsg.putInt(cacheIdLen); - CodecUtil.putStringAsCharArray(encodedMsg, chainReplicationMessage.getCacheId()); - encodedMsg.putLong(chainReplicationMessage.getKey()); - encodedMsg.put(encodedChain); - return encodedMsg.array(); + return encodeChainReplicationMessage((PassiveReplicationMessage.ChainReplicationMessage) message); case CLEAR_INVALIDATION_COMPLETE: - PassiveReplicationMessage.ClearInvalidationCompleteMessage clearInvalidationCompleteMessage = (PassiveReplicationMessage.ClearInvalidationCompleteMessage)message; - encodedMsg = ByteBuffer.allocate(OP_CODE_SIZE + 2 * clearInvalidationCompleteMessage.getCacheId().length()); - encodedMsg.put(message.getOpCode()); - CodecUtil.putStringAsCharArray(encodedMsg, clearInvalidationCompleteMessage.getCacheId()); - return encodedMsg.array(); + return encodeClearInvalidationCompleteMessage((PassiveReplicationMessage.ClearInvalidationCompleteMessage) message); case INVALIDATION_COMPLETE: - PassiveReplicationMessage.InvalidationCompleteMessage invalidationCompleteMessage = (PassiveReplicationMessage.InvalidationCompleteMessage)message; - encodedMsg = ByteBuffer.allocate(OP_CODE_SIZE + KEY_SIZE + 2 * invalidationCompleteMessage.getCacheId().length()); - encodedMsg.put(message.getOpCode()); - encodedMsg.putLong(invalidationCompleteMessage.getKey()); - CodecUtil.putStringAsCharArray(encodedMsg, invalidationCompleteMessage.getCacheId()); - return encodedMsg.array(); - case SERVER_STORE_LIFECYCLE_REPLICATION_OP: - PassiveReplicationMessage.ServerStoreLifeCycleReplicationMessage storeLifeCycleReplicationMessage = (PassiveReplicationMessage.ServerStoreLifeCycleReplicationMessage)message; - byte[] encodedLifeCycleMsg = Util.marshall(storeLifeCycleReplicationMessage.getMessage()); - encodedMsg = ByteBuffer.allocate(OP_CODE_SIZE + encodedLifeCycleMsg.length); - encodedMsg.put(message.getOpCode()); - encodedMsg.put(encodedLifeCycleMsg); - return encodedMsg.array(); + return encodeInvalidationCompleteMessage((PassiveReplicationMessage.InvalidationCompleteMessage) message); + case CREATE_SERVER_STORE_REPLICATION: + return encodeCreateServerStoreReplicationMessage((PassiveReplicationMessage.CreateServerStoreReplicationMessage) message); + case DESTROY_SERVER_STORE_REPLICATION: + return encoreDestroyServerStoreReplicationMessage((PassiveReplicationMessage.DestroyServerStoreReplicationMessage) message); default: - throw new UnsupportedOperationException("This operation is not supported : " + message.operation()); + throw new UnsupportedOperationException("This operation is not supported : " + message.getMessageType()); } + } + + private byte[] encoreDestroyServerStoreReplicationMessage(PassiveReplicationMessage.DestroyServerStoreReplicationMessage message) { + StructEncoder encoder = DESTROY_SERVER_STORE_REPLICATION_STRUCT.encoder(); + + messageCodecUtils.encodeMandatoryFields(encoder, message); + encoder.string(SERVER_STORE_NAME_FIELD, message.getStoreName()); + + return encoder.encode().array(); + } + + private byte[] encodeCreateServerStoreReplicationMessage(PassiveReplicationMessage.CreateServerStoreReplicationMessage message) { + StructEncoder encoder = CREATE_SERVER_STORE_REPLICATION_STRUCT.encoder(); + + messageCodecUtils.encodeMandatoryFields(encoder, message); + encoder.string(SERVER_STORE_NAME_FIELD, message.getStoreName()); + messageCodecUtils.encodeServerStoreConfiguration(encoder, message.getStoreConfiguration()); + + return encoder.encode().array(); + } + + private byte[] encodeInvalidationCompleteMessage(PassiveReplicationMessage.InvalidationCompleteMessage message) { + StructEncoder encoder = INVALIDATION_COMPLETE_STRUCT.encoder(); + + encoder.enm(MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) + .string(SERVER_STORE_NAME_FIELD, message.getCacheId()) + .int64(KEY_FIELD, message.getKey()); + + return encoder.encode().array(); + } + + private byte[] encodeClearInvalidationCompleteMessage(PassiveReplicationMessage.ClearInvalidationCompleteMessage message) { + StructEncoder encoder = CLEAR_INVALIDATION_COMPLETE_STRUCT.encoder(); + + encoder.enm(MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) + .string(SERVER_STORE_NAME_FIELD, message.getCacheId()); + + return encoder.encode().array(); + } + + private byte[] encodeChainReplicationMessage(PassiveReplicationMessage.ChainReplicationMessage message) { + StructEncoder encoder = CHAIN_REPLICATION_STRUCT.encoder(); + + messageCodecUtils.encodeMandatoryFields(encoder, message); + + encoder.string(SERVER_STORE_NAME_FIELD, message.getCacheId()); + encoder.int64(KEY_FIELD, message.getKey()); + chainCodec.encode(encoder.struct(CHAIN_FIELD), message.getChain()); + + return encoder.encode().array(); + } + private byte[] encodeClientIdTrackMessage(PassiveReplicationMessage.ClientIDTrackerMessage message) { + StructEncoder encoder = CLIENT_ID_TRACK_STRUCT.encoder(); + + messageCodecUtils.encodeMandatoryFields(encoder, message); + + return encoder.encode().array(); } - public EhcacheEntityMessage decode(byte[] payload) { - ByteBuffer byteBuffer = ByteBuffer.wrap(payload); - PassiveReplicationMessage.ReplicationOp replicationOp = PassiveReplicationMessage.ReplicationOp.getReplicationOp(byteBuffer.get()); - UUID clientId; - long msgId; - String cacheId; - long key; - switch (replicationOp) { + public EhcacheEntityMessage decode(EhcacheMessageType messageType, ByteBuffer messageBuffer) { + + switch (messageType) { + case CLIENT_ID_TRACK_OP: + return decodeClientIdTrackMessage(messageBuffer); case CHAIN_REPLICATION_OP: - clientId = getClientId(byteBuffer); - msgId = byteBuffer.getLong(); - int length = byteBuffer.getInt(); - cacheId = CodecUtil.getStringFromBuffer(byteBuffer, length); - key = byteBuffer.getLong(); - byte[] encodedChain = new byte[byteBuffer.remaining()]; - byteBuffer.get(encodedChain); - Chain chain = chainCodec.decode(encodedChain); - return new PassiveReplicationMessage.ChainReplicationMessage(cacheId, key, chain, msgId, clientId); - case CLIENTID_TRACK_OP: - clientId = getClientId(byteBuffer); - msgId = byteBuffer.getLong(); - return new PassiveReplicationMessage.ClientIDTrackerMessage(msgId, clientId); + return decodeChainReplicationMessage(messageBuffer); case CLEAR_INVALIDATION_COMPLETE: - cacheId = CodecUtil.getStringFromBuffer(byteBuffer, byteBuffer.remaining()/2); - return new PassiveReplicationMessage.ClearInvalidationCompleteMessage(cacheId); + return decodeClearInvalidationCompleteMessage(messageBuffer); case INVALIDATION_COMPLETE: - key = byteBuffer.getLong(); - cacheId = CodecUtil.getStringFromBuffer(byteBuffer, byteBuffer.remaining()/2); - return new PassiveReplicationMessage.InvalidationCompleteMessage(cacheId, key); - case SERVER_STORE_LIFECYCLE_REPLICATION_OP: - byte[] encodedLifeCycle = new byte[byteBuffer.remaining()]; - byteBuffer.get(encodedLifeCycle); - LifecycleMessage lifecycleMessage = (LifecycleMessage)Util.unmarshall(encodedLifeCycle); - return new PassiveReplicationMessage.ServerStoreLifeCycleReplicationMessage(lifecycleMessage); + return decodeInvalidationCompleteMessage(messageBuffer); + case CREATE_SERVER_STORE_REPLICATION: + return decodeCreateServerStoreReplicationMessage(messageBuffer); + case DESTROY_SERVER_STORE_REPLICATION: + return decodeDestroyServerStoreReplicationMessage(messageBuffer); default: - throw new UnsupportedOperationException("This operation code is not supported : " + replicationOp); + throw new UnsupportedOperationException("Unknown message type: " + messageType); } } + private PassiveReplicationMessage.DestroyServerStoreReplicationMessage decodeDestroyServerStoreReplicationMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = DESTROY_SERVER_STORE_REPLICATION_STRUCT.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID clientId = messageCodecUtils.decodeUUID(decoder); + + String storeName = decoder.string(SERVER_STORE_NAME_FIELD); + + return new PassiveReplicationMessage.DestroyServerStoreReplicationMessage(msgId, clientId, storeName); + } + + private PassiveReplicationMessage.CreateServerStoreReplicationMessage decodeCreateServerStoreReplicationMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = CREATE_SERVER_STORE_REPLICATION_STRUCT.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID clientId = messageCodecUtils.decodeUUID(decoder); + + String storeName = decoder.string(SERVER_STORE_NAME_FIELD); + ServerStoreConfiguration configuration = messageCodecUtils.decodeServerStoreConfiguration(decoder); + + return new PassiveReplicationMessage.CreateServerStoreReplicationMessage(msgId, clientId, storeName, configuration); + } + + private PassiveReplicationMessage.InvalidationCompleteMessage decodeInvalidationCompleteMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = INVALIDATION_COMPLETE_STRUCT.decoder(messageBuffer); + + String storeId = decoder.string(SERVER_STORE_NAME_FIELD); + Long key = decoder.int64(KEY_FIELD); + + return new PassiveReplicationMessage.InvalidationCompleteMessage(storeId, key); + } + + private PassiveReplicationMessage.ClearInvalidationCompleteMessage decodeClearInvalidationCompleteMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = CLEAR_INVALIDATION_COMPLETE_STRUCT.decoder(messageBuffer); + return new PassiveReplicationMessage.ClearInvalidationCompleteMessage(decoder.string(SERVER_STORE_NAME_FIELD)); + } + + private PassiveReplicationMessage.ChainReplicationMessage decodeChainReplicationMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = CHAIN_REPLICATION_STRUCT.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID clientId = messageCodecUtils.decodeUUID(decoder); + + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); + Long key = decoder.int64(KEY_FIELD); + + Chain chain = chainCodec.decode(decoder.struct(CHAIN_FIELD)); + + return new PassiveReplicationMessage.ChainReplicationMessage(cacheId, key, chain, msgId, clientId); + } + + private PassiveReplicationMessage.ClientIDTrackerMessage decodeClientIdTrackMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = CLIENT_ID_TRACK_STRUCT.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID clientId = messageCodecUtils.decodeUUID(decoder); + + return new PassiveReplicationMessage.ClientIDTrackerMessage(msgId, clientId); + } + private static UUID getClientId(ByteBuffer payload) { long msb = payload.getLong(); long lsb = payload.getLong(); diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreMessageFactory.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreMessageFactory.java index edbd11ecef..710fd86dfa 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreMessageFactory.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreMessageFactory.java @@ -31,27 +31,27 @@ public ServerStoreMessageFactory(String cacheId, UUID clientId) { this.clientId = clientId; } - public EhcacheEntityMessage getOperation(long key) { + public ServerStoreOpMessage.GetMessage getOperation(long key) { return new ServerStoreOpMessage.GetMessage(this.cacheId, key); } - public EhcacheEntityMessage getAndAppendOperation(long key, ByteBuffer payload) { + public ServerStoreOpMessage.GetAndAppendMessage getAndAppendOperation(long key, ByteBuffer payload) { return new ServerStoreOpMessage.GetAndAppendMessage(this.cacheId, key, payload, clientId); } - public EhcacheEntityMessage appendOperation(long key, ByteBuffer payload) { + public ServerStoreOpMessage.AppendMessage appendOperation(long key, ByteBuffer payload) { return new ServerStoreOpMessage.AppendMessage(this.cacheId, key, payload, clientId); } - public EhcacheEntityMessage replaceAtHeadOperation(long key, Chain expect, Chain update) { + public ServerStoreOpMessage.ReplaceAtHeadMessage replaceAtHeadOperation(long key, Chain expect, Chain update) { return new ServerStoreOpMessage.ReplaceAtHeadMessage(this.cacheId, key, expect, update, clientId); } - public EhcacheEntityMessage clientInvalidationAck(int invalidationId) { + public ServerStoreOpMessage.ClientInvalidationAck clientInvalidationAck(int invalidationId) { return new ServerStoreOpMessage.ClientInvalidationAck(this.cacheId, invalidationId); } - public EhcacheEntityMessage clearOperation() { + public ServerStoreOpMessage.ClearMessage clearOperation() { return new ServerStoreOpMessage.ClearMessage(this.cacheId, clientId); } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java index bdb525484b..67a8157e2b 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java @@ -16,173 +16,241 @@ package org.ehcache.clustered.common.internal.messages; -import org.ehcache.clustered.common.internal.ClusteredEhcacheIdentity; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.AppendMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClearMessage; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClientInvalidationAck; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.GetAndAppendMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.GetMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ReplaceAtHeadMessage; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ClientInvalidationAck; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ServerStoreOp; +import org.ehcache.clustered.common.internal.store.Chain; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.StructBuilder; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.StructEncoder; +import org.terracotta.runnel.encoding.StructEncoderFunction; import java.nio.ByteBuffer; import java.util.UUID; +import static org.ehcache.clustered.common.internal.messages.ChainCodec.CHAIN_STRUCT; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.EHCACHE_MESSAGE_TYPES_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_INDEX; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.KEY_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.LSB_UUID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSB_UUID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSG_ID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; + class ServerStoreOpCodec { - private static final byte STORE_OP_CODE_SIZE = 1; - private static final byte CACHE_ID_LEN_SIZE = 4; - private static final byte KEY_SIZE = 8; - private static final byte CHAIN_LEN_SIZE = 4; - private static final byte INVALIDATION_ID_LEN_SIZE = 4; - private static final byte MESSAGE_ID_SIZE = 24; + private static final Struct GET_AND_APPEND_MESSAGE_STRUCT = StructBuilder.newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30) + .int64(KEY_FIELD, 40) + .byteBuffer("payload", 50) + .build(); + + private static final Struct APPEND_MESSAGE_STRUCT = StructBuilder.newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30) + .int64(KEY_FIELD, 40) + .byteBuffer("payload", 50) + .build(); + + private static final Struct REPLACE_MESSAGE_STRUCT = StructBuilder.newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30) + .int64(KEY_FIELD, 40) + .struct("expect", 50, CHAIN_STRUCT) + .struct("update", 60, CHAIN_STRUCT) + .build(); + + private static final Struct CLIENT_INVALIDATION_ACK_MESSAGE_STRUCT = StructBuilder.newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .string(SERVER_STORE_NAME_FIELD, 30) + .int32("invalidationId", 40) + .build(); + + private static final Struct CLEAR_MESSAGE_STRUCT = StructBuilder.newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30) + .build(); + + private static final Struct GET_MESSAGE_STRUCT = StructBuilder.newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .string(SERVER_STORE_NAME_FIELD, 30) + .int64(KEY_FIELD, 40) + .build(); private final ChainCodec chainCodec; + private final MessageCodecUtils messageCodecUtils = new MessageCodecUtils(); ServerStoreOpCodec() { this.chainCodec = new ChainCodec(); } public byte[] encode(ServerStoreOpMessage message) { - // TODO: improve data send over n/w by optimizing cache Id - ByteBuffer encodedMsg; - int cacheIdLen = message.getCacheId().length(); - switch (message.operation()) { - case GET: - GetMessage getMessage = (GetMessage)message; - encodedMsg = ByteBuffer.allocate(STORE_OP_CODE_SIZE + KEY_SIZE + 2 * cacheIdLen); - encodedMsg.put(getMessage.getOpCode()); - encodedMsg.putLong(getMessage.getKey()); - CodecUtil.putStringAsCharArray(encodedMsg, getMessage.getCacheId()); - return encodedMsg.array(); + StructEncoder encoder = null; + + switch (message.getMessageType()) { + case GET_STORE: + GetMessage getMessage = (GetMessage) message; + encoder = GET_MESSAGE_STRUCT.encoder(); + return encoder + .enm(MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) + .int64(MSG_ID_FIELD, message.getId()) + .string(SERVER_STORE_NAME_FIELD, getMessage.getCacheId()) + .int64(KEY_FIELD, getMessage.getKey()) + .encode() + .array(); case APPEND: - AppendMessage appendMessage = (AppendMessage)message; - encodedMsg = ByteBuffer.allocate(STORE_OP_CODE_SIZE + CACHE_ID_LEN_SIZE + KEY_SIZE + MESSAGE_ID_SIZE + 2 * cacheIdLen + appendMessage - .getPayload() - .remaining()); - putCacheIdKeyAndOpCode(encodedMsg, appendMessage, appendMessage.getKey()); - encodedMsg.put(appendMessage.getPayload()); - return encodedMsg.array(); + AppendMessage appendMessage = (AppendMessage) message; + encoder = APPEND_MESSAGE_STRUCT.encoder(); + messageCodecUtils.encodeMandatoryFields(encoder, message); + return encoder + .string(SERVER_STORE_NAME_FIELD, appendMessage.getCacheId()) + .int64(KEY_FIELD, appendMessage.getKey()) + .byteBuffer("payload", appendMessage.getPayload()) + .encode() + .array(); case GET_AND_APPEND: - GetAndAppendMessage getAndAppendMessage = (GetAndAppendMessage)message; - encodedMsg = ByteBuffer.allocate(STORE_OP_CODE_SIZE + CACHE_ID_LEN_SIZE + KEY_SIZE + MESSAGE_ID_SIZE + 2 * cacheIdLen + - getAndAppendMessage.getPayload().remaining()); - putCacheIdKeyAndOpCode(encodedMsg, getAndAppendMessage, getAndAppendMessage.getKey()); - encodedMsg.put(getAndAppendMessage.getPayload()); - return encodedMsg.array(); + GetAndAppendMessage getAndAppendMessage = (GetAndAppendMessage) message; + encoder = GET_AND_APPEND_MESSAGE_STRUCT.encoder(); + messageCodecUtils.encodeMandatoryFields(encoder, message); + return encoder + .string(SERVER_STORE_NAME_FIELD, getAndAppendMessage.getCacheId()) + .int64(KEY_FIELD, getAndAppendMessage.getKey()) + .byteBuffer("payload", getAndAppendMessage.getPayload()) + .encode() + .array(); case REPLACE: - ReplaceAtHeadMessage replaceAtHeadMessage = (ReplaceAtHeadMessage)message; - byte[] encodedExpectedChain = chainCodec.encode(replaceAtHeadMessage.getExpect()); - byte[] encodedUpdatedChain = chainCodec.encode(replaceAtHeadMessage.getUpdate()); - encodedMsg = ByteBuffer.allocate(STORE_OP_CODE_SIZE + CACHE_ID_LEN_SIZE + KEY_SIZE + MESSAGE_ID_SIZE + 2 * cacheIdLen + - CHAIN_LEN_SIZE + encodedExpectedChain.length + encodedUpdatedChain.length); - putCacheIdKeyAndOpCode(encodedMsg, replaceAtHeadMessage, replaceAtHeadMessage.getKey()); - encodedMsg.putInt(encodedExpectedChain.length); - encodedMsg.put(encodedExpectedChain); - encodedMsg.put(encodedUpdatedChain); - return encodedMsg.array(); + final ReplaceAtHeadMessage replaceAtHeadMessage = (ReplaceAtHeadMessage) message; + encoder = REPLACE_MESSAGE_STRUCT.encoder(); + messageCodecUtils.encodeMandatoryFields(encoder, message); + return encoder + .string(SERVER_STORE_NAME_FIELD, replaceAtHeadMessage.getCacheId()) + .int64(KEY_FIELD, replaceAtHeadMessage.getKey()) + .struct("expect", new StructEncoderFunction() { + @Override + public void encode(StructEncoder encoder) { + Chain expect = replaceAtHeadMessage.getExpect(); + chainCodec.encode(encoder, expect); + } + }) + .struct("update", new StructEncoderFunction() { + @Override + public void encode(StructEncoder encoder) { + Chain update = replaceAtHeadMessage.getUpdate(); + chainCodec.encode(encoder, update); + } + }) + .encode() + .array(); case CLIENT_INVALIDATION_ACK: - ClientInvalidationAck clientInvalidationAck = (ClientInvalidationAck)message; - encodedMsg = ByteBuffer.allocate(STORE_OP_CODE_SIZE + INVALIDATION_ID_LEN_SIZE + 2 * cacheIdLen); - encodedMsg.put(clientInvalidationAck.getOpCode()); - encodedMsg.putInt(clientInvalidationAck.getInvalidationId()); - CodecUtil.putStringAsCharArray(encodedMsg, clientInvalidationAck.getCacheId()); - return encodedMsg.array(); + ClientInvalidationAck clientInvalidationAckMessage = (ClientInvalidationAck) message; + encoder = CLIENT_INVALIDATION_ACK_MESSAGE_STRUCT.encoder(); + return encoder + .enm(MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) + .int64(MSG_ID_FIELD, message.getId()) + .string(SERVER_STORE_NAME_FIELD, clientInvalidationAckMessage.getCacheId()) + .int32("invalidationId", clientInvalidationAckMessage.getInvalidationId()) + .encode() + .array(); case CLEAR: - ClearMessage clearMessage = (ClearMessage)message; - encodedMsg = ByteBuffer.allocate(STORE_OP_CODE_SIZE + MESSAGE_ID_SIZE + 2 * cacheIdLen); - encodedMsg.put(clearMessage.getOpCode()); - encodedMsg.put(ClusteredEhcacheIdentity.serialize(message.getClientId())); - encodedMsg.putLong(message.getId()); - CodecUtil.putStringAsCharArray(encodedMsg, clearMessage.getCacheId()); - return encodedMsg.array(); + ClearMessage clearMessage = (ClearMessage) message; + encoder = CLEAR_MESSAGE_STRUCT.encoder(); + messageCodecUtils.encodeMandatoryFields(encoder, message); + return encoder + .string(SERVER_STORE_NAME_FIELD, clearMessage.getCacheId()) + .encode() + .array(); default: - throw new UnsupportedOperationException("This operation is not supported : " + message.operation()); + throw new RuntimeException("Unhandled message operation : " + message.operation()); } } - // This assumes correct allocation and puts extracts common code - private static void putCacheIdKeyAndOpCode(ByteBuffer byteBuffer, ServerStoreOpMessage message, long key) { - byteBuffer.put(message.getOpCode()); - byteBuffer.put(ClusteredEhcacheIdentity.serialize(message.getClientId())); - byteBuffer.putLong(message.getId()); - byteBuffer.putInt(message.getCacheId().length()); - CodecUtil.putStringAsCharArray(byteBuffer, message.getCacheId()); - byteBuffer.putLong(key); - } - - public EhcacheEntityMessage decode(byte[] payload) { - ByteBuffer msg = ByteBuffer.wrap(payload); - byte opCode = msg.get(); - ServerStoreOp storeOp = ServerStoreOp.getServerStoreOp(opCode); - - long key; - String cacheId; - UUID clientId; - long msgId; - - EhcacheEntityMessage decodecMsg; - switch (storeOp) { - case GET: - key = msg.getLong(); - cacheId = CodecUtil.getStringFromBuffer(msg, msg.remaining() / 2); - return new GetMessage(cacheId, key); - case GET_AND_APPEND: - clientId = getClientId(msg); - msgId = msg.getLong(); - cacheId = readStringFromBufferWithSize(msg); - key = msg.getLong(); - decodecMsg = new GetAndAppendMessage(cacheId, key, msg.slice().asReadOnlyBuffer(), clientId); - decodecMsg.setId(msgId); - return decodecMsg; - case APPEND: - clientId = getClientId(msg); - msgId = msg.getLong(); - cacheId = readStringFromBufferWithSize(msg); - key = msg.getLong(); - decodecMsg = new AppendMessage(cacheId, key, msg.slice().asReadOnlyBuffer(), clientId); - decodecMsg.setId(msgId); - return decodecMsg; - case REPLACE: - clientId = getClientId(msg); - msgId = msg.getLong(); - cacheId = readStringFromBufferWithSize(msg); - key = msg.getLong(); - int expectChainLen = msg.getInt(); - byte[] encodedExpectChain = new byte[expectChainLen]; - msg.get(encodedExpectChain); - int updateChainLen = msg.remaining(); - byte[] encodedUpdateChain = new byte[updateChainLen]; - msg.get(encodedUpdateChain); - decodecMsg = new ReplaceAtHeadMessage(cacheId, key, chainCodec.decode(encodedExpectChain), - chainCodec.decode(encodedUpdateChain), clientId); - decodecMsg.setId(msgId); - return decodecMsg; - case CLIENT_INVALIDATION_ACK: - int invalidationId = msg.getInt(); - cacheId = CodecUtil.getStringFromBuffer(msg, msg.remaining() / 2); - return new ClientInvalidationAck(cacheId, invalidationId); - case CLEAR: - clientId = getClientId(msg); - msgId = msg.getLong(); - cacheId = CodecUtil.getStringFromBuffer(msg, msg.remaining() / 2); - decodecMsg = new ClearMessage(cacheId, clientId); - decodecMsg.setId(msgId); - return decodecMsg; + public EhcacheEntityMessage decode(EhcacheMessageType opCode, ByteBuffer messageBuffer) { + StructDecoder decoder; + switch (opCode) { + case GET_STORE: { + decoder = GET_MESSAGE_STRUCT.decoder(messageBuffer); + Long msgId = decoder.int64(MSG_ID_FIELD); + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); + Long key = decoder.int64(KEY_FIELD); + GetMessage message = new GetMessage(cacheId, key); + message.setId(msgId); + return message; + } + case GET_AND_APPEND: { + decoder = GET_AND_APPEND_MESSAGE_STRUCT.decoder(messageBuffer); + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID uuid = messageCodecUtils.decodeUUID(decoder); + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); + Long key = decoder.int64(KEY_FIELD); + ByteBuffer payload = decoder.byteBuffer("payload"); + GetAndAppendMessage message = new GetAndAppendMessage(cacheId, key, payload, uuid); + message.setId(msgId); + return message; + } + case APPEND: { + decoder = APPEND_MESSAGE_STRUCT.decoder(messageBuffer); + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID uuid = messageCodecUtils.decodeUUID(decoder); + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); + Long key = decoder.int64(KEY_FIELD); + ByteBuffer payload = decoder.byteBuffer("payload"); + AppendMessage message = new AppendMessage(cacheId, key, payload, uuid); + message.setId(msgId); + return message; + } + case REPLACE: { + decoder = REPLACE_MESSAGE_STRUCT.decoder(messageBuffer); + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID uuid = messageCodecUtils.decodeUUID(decoder); + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); + Long key = decoder.int64(KEY_FIELD); + Chain expect = chainCodec.decode(decoder.struct("expect")); + Chain update = chainCodec.decode(decoder.struct("update")); + ReplaceAtHeadMessage message = new ReplaceAtHeadMessage(cacheId, key, expect, update, uuid); + message.setId(msgId); + return message; + } + case CLIENT_INVALIDATION_ACK: { + decoder = CLIENT_INVALIDATION_ACK_MESSAGE_STRUCT.decoder(messageBuffer); + Long msgId = decoder.int64(MSG_ID_FIELD); + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); + Integer invalidationId = decoder.int32("invalidationId"); + ClientInvalidationAck message = new ClientInvalidationAck(cacheId, invalidationId); + message.setId(msgId); + return message; + } + case CLEAR: { + decoder = CLEAR_MESSAGE_STRUCT.decoder(messageBuffer); + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID uuid = messageCodecUtils.decodeUUID(decoder); + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); + ClearMessage message = new ClearMessage(cacheId, uuid); + message.setId(msgId); + return message; + } default: - throw new UnsupportedOperationException("This operation code is not supported : " + opCode); + throw new RuntimeException("Unhandled message operation : " + opCode); } } - private static String readStringFromBufferWithSize(ByteBuffer buffer) { - int length = buffer.getInt(); - return CodecUtil.getStringFromBuffer(buffer, length); - } - - private static UUID getClientId(ByteBuffer payload) { - long msb = payload.getLong(); - long lsb = payload.getLong(); - return new UUID(msb, lsb); - } - } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java index 0c6ba5aaee..22beaa36af 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java @@ -107,11 +107,6 @@ public byte getOpCode() { return operation().getStoreOpCode(); } - @Override - public String toString() { - return getType() + "#" + operation(); - } - public static abstract class KeyBasedServerStoreOpMessage extends ServerStoreOpMessage implements ConcurrentEntityMessage { private final long key; diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpCodec.java index 3a198fa3b3..649243b9f4 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpCodec.java @@ -17,31 +17,171 @@ package org.ehcache.clustered.common.internal.messages; import org.ehcache.clustered.common.internal.store.Util; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.StructEncoder; import java.nio.ByteBuffer; +import java.util.UUID; -public class StateRepositoryOpCodec { +import static java.nio.ByteBuffer.wrap; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.EHCACHE_MESSAGE_TYPES_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_INDEX; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.KEY_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.LSB_UUID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSB_UUID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSG_ID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; +import static org.terracotta.runnel.StructBuilder.newStructBuilder; - private static final byte OPCODE_SIZE = 1; +class StateRepositoryOpCodec { + + private static final String MAP_ID_FIELD = "mapId"; + private static final String VALUE_FIELD = "value"; + + private static final Struct GET_MESSAGE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30) + .string(MAP_ID_FIELD, 35) + .byteBuffer(KEY_FIELD, 40) + .build(); + + private static final Struct PUT_IF_ABSENT_MESSAGE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30) + .string(MAP_ID_FIELD, 35) + .byteBuffer(KEY_FIELD, 40) + .byteBuffer(VALUE_FIELD, 45) + .build(); + + private static final Struct ENTRY_SET_MESSAGE_STRUCT = newStructBuilder() + .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) + .int64(MSG_ID_FIELD, 15) + .int64(MSB_UUID_FIELD, 20) + .int64(LSB_UUID_FIELD, 21) + .string(SERVER_STORE_NAME_FIELD, 30) + .string(MAP_ID_FIELD, 35) + .build(); + + private final MessageCodecUtils messageCodecUtils = new MessageCodecUtils(); public byte[] encode(StateRepositoryOpMessage message) { - byte[] encodedMsg = Util.marshall(message); - ByteBuffer buffer = ByteBuffer.allocate(OPCODE_SIZE + encodedMsg.length); - buffer.put(message.getOpCode()); - buffer.put(encodedMsg); - return buffer.array(); - } - - public StateRepositoryOpMessage decode(byte[] payload) { - ByteBuffer message = ByteBuffer.wrap(payload); - byte[] encodedMsg = new byte[message.capacity() - OPCODE_SIZE]; - byte opCode = message.get(); - if (opCode == EhcacheEntityMessage.Type.STATE_REPO_OP.getCode()) { - message.get(encodedMsg, 0, encodedMsg.length); - StateRepositoryOpMessage entityMessage = (StateRepositoryOpMessage) Util.unmarshall(encodedMsg); - return entityMessage; - } else { - throw new UnsupportedOperationException("State repository operation not defined for : " + opCode); + + switch (message.getMessageType()) { + case GET_STATE_REPO: + return encodeGetMessage((StateRepositoryOpMessage.GetMessage) message); + case PUT_IF_ABSENT: + return encodePutIfAbsentMessage((StateRepositoryOpMessage.PutIfAbsentMessage) message); + case ENTRY_SET: + return encodeEntrySetMessage((StateRepositoryOpMessage.EntrySetMessage) message); + default: + throw new IllegalArgumentException("Unsupported StateRepositoryOpMessage " + message.getClass()); } } + + private byte[] encodeEntrySetMessage(StateRepositoryOpMessage.EntrySetMessage message) { + StructEncoder encoder = ENTRY_SET_MESSAGE_STRUCT.encoder(); + + messageCodecUtils.encodeMandatoryFields(encoder, message); + encoder.string(SERVER_STORE_NAME_FIELD, message.getCacheId()); + encoder.string(MAP_ID_FIELD, message.getCacheId()); + + return encoder.encode().array(); + } + + private byte[] encodePutIfAbsentMessage(StateRepositoryOpMessage.PutIfAbsentMessage message) { + StructEncoder encoder = PUT_IF_ABSENT_MESSAGE_STRUCT.encoder(); + + messageCodecUtils.encodeMandatoryFields(encoder, message); + encoder.string(SERVER_STORE_NAME_FIELD, message.getCacheId()); + encoder.string(MAP_ID_FIELD, message.getCacheId()); + // TODO this needs to change - serialization needs to happen in the StateRepo not here, though we need the hashcode for server side comparison. + encoder.byteBuffer(KEY_FIELD, wrap(Util.marshall(message.getKey()))); + encoder.byteBuffer(VALUE_FIELD, wrap(Util.marshall(message.getValue()))); + + return encoder.encode().array(); + } + + private byte[] encodeGetMessage(StateRepositoryOpMessage.GetMessage message) { + StructEncoder encoder = GET_MESSAGE_STRUCT.encoder(); + + messageCodecUtils.encodeMandatoryFields(encoder, message); + encoder.string(SERVER_STORE_NAME_FIELD, message.getCacheId()); + encoder.string(MAP_ID_FIELD, message.getCacheId()); + // TODO this needs to change - serialization needs to happen in the StateRepo not here, though we need the hashcode for server side comparison. + encoder.byteBuffer(KEY_FIELD, wrap(Util.marshall(message.getKey()))); + + return encoder.encode().array(); + } + + public StateRepositoryOpMessage decode(EhcacheMessageType messageType, ByteBuffer messageBuffer) { + switch (messageType) { + case GET_STATE_REPO: + return decodeGetMessage(messageBuffer); + case PUT_IF_ABSENT: + return decodePutIfAbsentMessage(messageBuffer); + case ENTRY_SET: + return decodeEntrySetMessage(messageBuffer); + default: + throw new IllegalArgumentException("Unsupported StateRepositoryOpMessage " + messageType); + } + } + + private StateRepositoryOpMessage.EntrySetMessage decodeEntrySetMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = ENTRY_SET_MESSAGE_STRUCT.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID clientId = messageCodecUtils.decodeUUID(decoder); + + String storeName = decoder.string(SERVER_STORE_NAME_FIELD); + String mapId = decoder.string(MAP_ID_FIELD); + + StateRepositoryOpMessage.EntrySetMessage message = new StateRepositoryOpMessage.EntrySetMessage(storeName, mapId, clientId); + message.setId(msgId); + return message; + } + + private StateRepositoryOpMessage.PutIfAbsentMessage decodePutIfAbsentMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = PUT_IF_ABSENT_MESSAGE_STRUCT.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID clientId = messageCodecUtils.decodeUUID(decoder); + + String storeName = decoder.string(SERVER_STORE_NAME_FIELD); + String mapId = decoder.string(MAP_ID_FIELD); + + ByteBuffer keyBuffer = decoder.byteBuffer(KEY_FIELD); + Object key = Util.unmarshall(keyBuffer); + + ByteBuffer valueBuffer = decoder.byteBuffer(VALUE_FIELD); + Object value = Util.unmarshall(valueBuffer); + + StateRepositoryOpMessage.PutIfAbsentMessage message = new StateRepositoryOpMessage.PutIfAbsentMessage(storeName, mapId, key, value, clientId); + message.setId(msgId); + return message; + } + + private StateRepositoryOpMessage.GetMessage decodeGetMessage(ByteBuffer messageBuffer) { + StructDecoder decoder = GET_MESSAGE_STRUCT.decoder(messageBuffer); + + Long msgId = decoder.int64(MSG_ID_FIELD); + UUID clientId = messageCodecUtils.decodeUUID(decoder); + + String storeName = decoder.string(SERVER_STORE_NAME_FIELD); + String mapId = decoder.string(MAP_ID_FIELD); + + ByteBuffer keyBuffer = decoder.byteBuffer(KEY_FIELD); + Object key = Util.unmarshall(keyBuffer); + + StateRepositoryOpMessage.GetMessage getMessage = new StateRepositoryOpMessage.GetMessage(storeName, mapId, key, clientId); + getMessage.setId(msgId); + return getMessage; + } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java index c42bb09155..10800439bd 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java @@ -77,11 +77,6 @@ public byte getOpCode() { return getType().getCode(); } - @Override - public String toString() { - return getType() + "#" + operation(); - } - private static abstract class KeyBasedMessage extends StateRepositoryOpMessage { private final Object key; diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/Util.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/Util.java index 1e8fc71029..3affaf8a0f 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/Util.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/store/Util.java @@ -16,8 +16,11 @@ package org.ehcache.clustered.common.internal.store; +import org.ehcache.clustered.common.internal.util.ByteBufferInputStream; + import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; +import java.io.Closeable; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; @@ -147,12 +150,30 @@ public ByteBuffer getPayload() { } public static Object unmarshall(byte[] payload) { + ObjectInputStream objectInputStream = null; + try { + objectInputStream = new ObjectInputStream(new ByteArrayInputStream(payload)); + return objectInputStream.readObject(); + } catch (IOException ex) { + throw new IllegalArgumentException(ex); + } catch (ClassNotFoundException ex) { + throw new IllegalArgumentException(ex); + } finally { + closeSilently(objectInputStream); + } + } + + public static Object unmarshall(ByteBuffer payload) { + ObjectInputStream objectInputStream = null; try { - return new ObjectInputStream(new ByteArrayInputStream(payload)).readObject(); + objectInputStream = new ObjectInputStream(new ByteBufferInputStream(payload)); + return objectInputStream.readObject(); } catch (IOException ex) { throw new IllegalArgumentException(ex); } catch (ClassNotFoundException ex) { throw new IllegalArgumentException(ex); + } finally { + closeSilently(objectInputStream); } } @@ -163,11 +184,21 @@ public static byte[] marshall(Object message) { try { oout.writeObject(message); } finally { - oout.close(); + closeSilently(oout); } } catch (IOException e) { throw new IllegalArgumentException(e); } return out.toByteArray(); } + + private static void closeSilently(Closeable closeable) { + if (closeable != null) { + try { + closeable.close(); + } catch (IOException e) { + // Ignore + } + } + } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/util/ByteBufferInputStream.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/util/ByteBufferInputStream.java new file mode 100644 index 0000000000..0b0c1e60b0 --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/util/ByteBufferInputStream.java @@ -0,0 +1,62 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.util; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; + +import static java.lang.Math.max; +import static java.lang.Math.min; + +// TODO remove once it comes with Runnel +public class ByteBufferInputStream extends InputStream { + + private final ByteBuffer buffer; + + public ByteBufferInputStream(ByteBuffer buffer) { + this.buffer = buffer.slice(); + } + + @Override + public int read() throws IOException { + if (buffer.hasRemaining()) { + return 0xff & buffer.get(); + } else { + return -1; + } + } + + @Override + public int read(byte b[], int off, int len) { + len = min(len, buffer.remaining()); + buffer.get(b, off, len); + return len; + } + + @Override + public long skip(long n) { + n = min(buffer.remaining(), max(n, 0)); + buffer.position((int) (buffer.position() + n)); + return n; + } + + @Override + public synchronized int available() { + return buffer.remaining(); + } +} diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java index 7c7dc563a1..cdf082d00e 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java @@ -17,29 +17,48 @@ package org.ehcache.clustered.common.internal.messages; import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; +import org.junit.Before; import org.junit.Test; +import org.mockito.Mock; +import java.nio.ByteBuffer; import java.util.UUID; import static org.mockito.Matchers.any; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.only; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.MockitoAnnotations.initMocks; public class EhcacheCodecTest { private static final UUID CLIENT_ID = UUID.randomUUID(); + @Mock + private ServerStoreOpCodec serverStoreOpCodec; + + @Mock + private LifeCycleMessageCodec lifeCycleMessageCodec; + + @Mock + private StateRepositoryOpCodec stateRepositoryOpCodec; + + @Mock + private PassiveReplicationMessageCodec passiveReplicationMessageCodec; + + private EhcacheCodec codec; + + @Before + public void setUp() { + initMocks(this); + + codec = new EhcacheCodec(serverStoreOpCodec, lifeCycleMessageCodec, stateRepositoryOpCodec, null, passiveReplicationMessageCodec); + } + @Test public void encodeMessage() throws Exception { - ServerStoreOpCodec serverStoreOpCodec = mock(ServerStoreOpCodec.class); - LifeCycleMessageCodec lifeCycleMessageCodec = mock(LifeCycleMessageCodec.class); - StateRepositoryOpCodec stateRepositoryOpCodec = mock(StateRepositoryOpCodec.class); - PassiveReplicationMessageCodec passiveReplicationMessageCodec = mock(PassiveReplicationMessageCodec.class); - EhcacheCodec codec = new EhcacheCodec(serverStoreOpCodec, lifeCycleMessageCodec, stateRepositoryOpCodec, null, passiveReplicationMessageCodec); - LifecycleMessage.DestroyServerStore lifecycleMessage = new LifecycleMessage.DestroyServerStore("foo", CLIENT_ID); codec.encodeMessage(lifecycleMessage); verify(lifeCycleMessageCodec, only()).encode(any(LifecycleMessage.class)); @@ -71,50 +90,42 @@ public void encodeMessage() throws Exception { } @Test - public void decodeMessage() throws Exception { - ServerStoreOpCodec serverStoreOpCodec = mock(ServerStoreOpCodec.class); - LifeCycleMessageCodec lifeCycleMessageCodec = mock(LifeCycleMessageCodec.class); - StateRepositoryOpCodec stateRepositoryOpCodec = mock(StateRepositoryOpCodec.class); - PassiveReplicationMessageCodec passiveReplicationMessageCodec = mock(PassiveReplicationMessageCodec.class); - EhcacheCodec codec = new EhcacheCodec(serverStoreOpCodec, lifeCycleMessageCodec, stateRepositoryOpCodec, null, passiveReplicationMessageCodec); - - byte[] payload = new byte[1]; - - for (byte i = 1; i <= EhcacheEntityMessage.Type.LIFECYCLE_OP.getCode(); i++) { - payload[0] = i; - codec.decodeMessage(payload); - } - verify(lifeCycleMessageCodec, times(10)).decode(payload); - verify(serverStoreOpCodec, never()).decode(payload); - verify(stateRepositoryOpCodec, never()).decode(payload); - verify(passiveReplicationMessageCodec, never()).decode(payload); - - for (byte i = 11; i <= EhcacheEntityMessage.Type.SERVER_STORE_OP.getCode(); i++) { - payload[0] = i; - codec.decodeMessage(payload); + public void decodeLifeCycleMessages() throws Exception { + for (EhcacheMessageType messageType : EhcacheMessageType.LIFECYCLE_MESSAGES) { + ByteBuffer encodedBuffer = EhcacheCodec.OP_CODE_DECODER.encoder().enm("opCode", messageType).encode(); + codec.decodeMessage(encodedBuffer.array()); } - verify(lifeCycleMessageCodec, times(10)).decode(payload); - verify(serverStoreOpCodec, times(10)).decode(payload); - verify(stateRepositoryOpCodec, never()).decode(payload); - verify(passiveReplicationMessageCodec, never()).decode(payload); - - for (byte i = 21; i <= EhcacheEntityMessage.Type.STATE_REPO_OP.getCode(); i++) { - payload[0] = i; - codec.decodeMessage(payload); + verify(lifeCycleMessageCodec, times(EhcacheMessageType.LIFECYCLE_MESSAGES.size())).decode(any(EhcacheMessageType.class), any(ByteBuffer.class)); + verifyZeroInteractions(serverStoreOpCodec, stateRepositoryOpCodec, passiveReplicationMessageCodec); + } + + @Test + public void decodeServerStoreMessages() throws Exception { + for (EhcacheMessageType messageType : EhcacheMessageType.STORE_OPERATION_MESSAGES) { + ByteBuffer encodedBuffer = EhcacheCodec.OP_CODE_DECODER.encoder().enm("opCode", messageType).encode(); + codec.decodeMessage(encodedBuffer.array()); } - verify(lifeCycleMessageCodec, times(10)).decode(payload); - verify(serverStoreOpCodec, times(10)).decode(payload); - verify(stateRepositoryOpCodec, times(10)).decode(payload); - verify(passiveReplicationMessageCodec, never()).decode(payload); - - for (byte i = 41; i <= EhcacheEntityMessage.Type.REPLICATION_OP.getCode(); i++) { - payload[0] = i; - codec.decodeMessage(payload); + verify(serverStoreOpCodec, times(EhcacheMessageType.STORE_OPERATION_MESSAGES.size())).decode(any(EhcacheMessageType.class), any(ByteBuffer.class)); + verifyZeroInteractions(lifeCycleMessageCodec, stateRepositoryOpCodec, passiveReplicationMessageCodec); + } + + @Test + public void decodeStateRepoMessages() throws Exception { + for (EhcacheMessageType messageType : EhcacheMessageType.STATE_REPO_OPERATION_MESSAGES) { + ByteBuffer encodedBuffer = EhcacheCodec.OP_CODE_DECODER.encoder().enm("opCode", messageType).encode(); + codec.decodeMessage(encodedBuffer.array()); } - verify(lifeCycleMessageCodec, times(10)).decode(payload); - verify(serverStoreOpCodec, times(10)).decode(payload); - verify(stateRepositoryOpCodec, times(10)).decode(payload); - verify(passiveReplicationMessageCodec, times(10)).decode(payload); + verify(stateRepositoryOpCodec, times(EhcacheMessageType.STATE_REPO_OPERATION_MESSAGES.size())).decode(any(EhcacheMessageType.class), any(ByteBuffer.class)); + verifyZeroInteractions(lifeCycleMessageCodec, serverStoreOpCodec, passiveReplicationMessageCodec); + } + @Test + public void decodeClientIDTrackerMessages() throws Exception { + for (EhcacheMessageType messageType : EhcacheMessageType.PASSIVE_SYNC_MESSAGES) { + ByteBuffer encodedBuffer = EhcacheCodec.OP_CODE_DECODER.encoder().enm("opCode", messageType).encode(); + codec.decodeMessage(encodedBuffer.array()); + } + verify(passiveReplicationMessageCodec, times(EhcacheMessageType.PASSIVE_SYNC_MESSAGES.size())).decode(any(EhcacheMessageType.class), any(ByteBuffer.class)); + verifyZeroInteractions(lifeCycleMessageCodec, serverStoreOpCodec, stateRepositoryOpCodec); } -} \ No newline at end of file +} diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodecTest.java new file mode 100644 index 0000000000..582d9cdf15 --- /dev/null +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodecTest.java @@ -0,0 +1,234 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.PoolAllocation; +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.junit.Before; +import org.junit.Test; + +import java.util.Collections; +import java.util.UUID; + +import static java.nio.ByteBuffer.wrap; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.*; + +/** + * LifeCycleMessageCodecTest + */ +public class LifeCycleMessageCodecTest { + + private static final long MESSAGE_ID = 42L; + private static final UUID CLIENT_ID = UUID.randomUUID(); + + private final LifeCycleMessageFactory factory = new LifeCycleMessageFactory(); + private final LifeCycleMessageCodec codec = new LifeCycleMessageCodec(); + + @Before + public void setUp() { + factory.setClientId(CLIENT_ID); + } + + @Test + public void testConfigureStoreManager() throws Exception { + ServerSideConfiguration configuration = getServerSideConfiguration(); + LifecycleMessage message = factory.configureStoreManager(configuration); + + byte[] encoded = codec.encode(message); + LifecycleMessage.ConfigureStoreManager decodedMessage = (LifecycleMessage.ConfigureStoreManager) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getClientId(), is(CLIENT_ID)); + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.CONFIGURE)); + assertThat(decodedMessage.getConfiguration().getDefaultServerResource(), is(configuration.getDefaultServerResource())); + assertThat(decodedMessage.getConfiguration().getResourcePools(), is(configuration.getResourcePools())); + } + + @Test + public void testValidateStoreManager() throws Exception { + ServerSideConfiguration configuration = getServerSideConfiguration(); + LifecycleMessage message = factory.validateStoreManager(configuration); + message.setId(MESSAGE_ID); + + byte[] encoded = codec.encode(message); + LifecycleMessage.ValidateStoreManager decodedMessage = (LifecycleMessage.ValidateStoreManager) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getId(), is(MESSAGE_ID)); + assertThat(decodedMessage.getClientId(), is(CLIENT_ID)); + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.VALIDATE)); + assertThat(decodedMessage.getConfiguration().getDefaultServerResource(), is(configuration.getDefaultServerResource())); + assertThat(decodedMessage.getConfiguration().getResourcePools(), is(configuration.getResourcePools())); + } + + @Test + public void testCreateServerStoreDedicated() throws Exception { + PoolAllocation.Dedicated dedicated = new PoolAllocation.Dedicated("dedicate", 420000L); + ServerStoreConfiguration configuration = new ServerStoreConfiguration(dedicated, "java.lang.Long", "java.lang.String", null, null, + "org.ehcache.impl.serialization.LongSerializer", "org.ehcache.impl.serialization.StringSerializer", + Consistency.STRONG); + LifecycleMessage message = factory.createServerStore("store1", configuration); + message.setId(MESSAGE_ID); + + byte[] encoded = codec.encode(message); + LifecycleMessage.CreateServerStore decodedMessage = (LifecycleMessage.CreateServerStore) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.CREATE_SERVER_STORE)); + validateCommonServerStoreConfig(decodedMessage, configuration); + PoolAllocation.Dedicated decodedPoolAllocation = (PoolAllocation.Dedicated) decodedMessage.getStoreConfiguration().getPoolAllocation(); + assertThat(decodedPoolAllocation.getResourceName(), is(dedicated.getResourceName())); + assertThat(decodedPoolAllocation.getSize(), is(dedicated.getSize())); + } + + @Test + public void testCreateServerStoreShared() throws Exception { + PoolAllocation.Shared shared = new PoolAllocation.Shared("shared"); + ServerStoreConfiguration configuration = new ServerStoreConfiguration(shared, "java.lang.Long", "java.lang.String", null, null, + "org.ehcache.impl.serialization.LongSerializer", "org.ehcache.impl.serialization.StringSerializer", + Consistency.STRONG); + LifecycleMessage message = factory.createServerStore("store1", configuration); + message.setId(MESSAGE_ID); + + byte[] encoded = codec.encode(message); + LifecycleMessage.CreateServerStore decodedMessage = (LifecycleMessage.CreateServerStore) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.CREATE_SERVER_STORE)); + validateCommonServerStoreConfig(decodedMessage, configuration); + PoolAllocation.Shared decodedPoolAllocation = (PoolAllocation.Shared) decodedMessage.getStoreConfiguration().getPoolAllocation(); + assertThat(decodedPoolAllocation.getResourcePoolName(), is(shared.getResourcePoolName())); + } + + @Test + public void testCreateServerStoreUnknown() throws Exception { + PoolAllocation.Unknown unknown = new PoolAllocation.Unknown(); + ServerStoreConfiguration configuration = new ServerStoreConfiguration(unknown, "java.lang.Long", "java.lang.String", null, null, + "org.ehcache.impl.serialization.LongSerializer", "org.ehcache.impl.serialization.StringSerializer", + Consistency.STRONG); + LifecycleMessage message = factory.createServerStore("store1", configuration); + message.setId(MESSAGE_ID); + + byte[] encoded = codec.encode(message); + LifecycleMessage.CreateServerStore decodedMessage = (LifecycleMessage.CreateServerStore) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.CREATE_SERVER_STORE)); + validateCommonServerStoreConfig(decodedMessage, configuration); + assertThat(decodedMessage.getStoreConfiguration().getPoolAllocation(), instanceOf(PoolAllocation.Unknown.class)); + } + + @Test + public void testValidateServerStoreDedicated() throws Exception { + PoolAllocation.Dedicated dedicated = new PoolAllocation.Dedicated("dedicate", 420000L); + ServerStoreConfiguration configuration = new ServerStoreConfiguration(dedicated, "java.lang.Long", "java.lang.String", null, null, + "org.ehcache.impl.serialization.LongSerializer", "org.ehcache.impl.serialization.StringSerializer", + Consistency.STRONG); + LifecycleMessage message = factory.validateServerStore("store1", configuration); + message.setId(MESSAGE_ID); + + byte[] encoded = codec.encode(message); + LifecycleMessage.ValidateServerStore decodedMessage = (LifecycleMessage.ValidateServerStore) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.VALIDATE_SERVER_STORE)); + validateCommonServerStoreConfig(decodedMessage, configuration); + PoolAllocation.Dedicated decodedPoolAllocation = (PoolAllocation.Dedicated) decodedMessage.getStoreConfiguration().getPoolAllocation(); + assertThat(decodedPoolAllocation.getResourceName(), is(dedicated.getResourceName())); + assertThat(decodedPoolAllocation.getSize(), is(dedicated.getSize())); + } + + @Test + public void testValidateServerStoreShared() throws Exception { + PoolAllocation.Shared shared = new PoolAllocation.Shared("shared"); + ServerStoreConfiguration configuration = new ServerStoreConfiguration(shared, "java.lang.Long", "java.lang.String", null, null, + "org.ehcache.impl.serialization.LongSerializer", "org.ehcache.impl.serialization.StringSerializer", + Consistency.STRONG); + LifecycleMessage message = factory.validateServerStore("store1", configuration); + message.setId(MESSAGE_ID); + + byte[] encoded = codec.encode(message); + LifecycleMessage.ValidateServerStore decodedMessage = (LifecycleMessage.ValidateServerStore) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.VALIDATE_SERVER_STORE)); + validateCommonServerStoreConfig(decodedMessage, configuration); + PoolAllocation.Shared decodedPoolAllocation = (PoolAllocation.Shared) decodedMessage.getStoreConfiguration().getPoolAllocation(); + assertThat(decodedPoolAllocation.getResourcePoolName(), is(shared.getResourcePoolName())); + } + + @Test + public void testValidateServerStoreUnknown() throws Exception { + PoolAllocation.Unknown unknown = new PoolAllocation.Unknown(); + ServerStoreConfiguration configuration = new ServerStoreConfiguration(unknown, "java.lang.Long", "java.lang.String", null, null, + "org.ehcache.impl.serialization.LongSerializer", "org.ehcache.impl.serialization.StringSerializer", + Consistency.STRONG); + LifecycleMessage message = factory.validateServerStore("store1", configuration); + message.setId(MESSAGE_ID); + + byte[] encoded = codec.encode(message); + LifecycleMessage.ValidateServerStore decodedMessage = (LifecycleMessage.ValidateServerStore) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.VALIDATE_SERVER_STORE)); + validateCommonServerStoreConfig(decodedMessage, configuration); + assertThat(decodedMessage.getStoreConfiguration().getPoolAllocation(), instanceOf(PoolAllocation.Unknown.class)); + } + + @Test + public void testReleaseServerStore() throws Exception { + LifecycleMessage message = factory.releaseServerStore("store1"); + message.setId(MESSAGE_ID); + + byte[] encoded = codec.encode(message); + LifecycleMessage.ReleaseServerStore decodedMessage = (LifecycleMessage.ReleaseServerStore) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.RELEASE_SERVER_STORE)); + assertThat(decodedMessage.getClientId(), is(CLIENT_ID)); + assertThat(decodedMessage.getId(), is(MESSAGE_ID)); + assertThat(decodedMessage.getName(), is("store1")); + } + + @Test + public void testDestroyServerStore() throws Exception { + LifecycleMessage message = factory.destroyServerStore("store1"); + message.setId(MESSAGE_ID); + + byte[] encoded = codec.encode(message); + LifecycleMessage.DestroyServerStore decodedMessage = (LifecycleMessage.DestroyServerStore) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.DESTROY_SERVER_STORE)); + assertThat(decodedMessage.getClientId(), is(CLIENT_ID)); + assertThat(decodedMessage.getId(), is(MESSAGE_ID)); + assertThat(decodedMessage.getName(), is("store1")); + } + + private void validateCommonServerStoreConfig(LifecycleMessage.BaseServerStore decodedMessage, ServerStoreConfiguration initialConfiguration) { + assertThat(decodedMessage.getId(), is(MESSAGE_ID)); + assertThat(decodedMessage.getClientId(), is(CLIENT_ID)); + assertThat(decodedMessage.getName(), is("store1")); + assertThat(decodedMessage.getStoreConfiguration().getStoredKeyType(), is(initialConfiguration.getStoredKeyType())); + assertThat(decodedMessage.getStoreConfiguration().getStoredValueType(), is(initialConfiguration.getStoredValueType())); + assertThat(decodedMessage.getStoreConfiguration().getActualKeyType(), nullValue()); + assertThat(decodedMessage.getStoreConfiguration().getActualValueType(), nullValue()); + assertThat(decodedMessage.getStoreConfiguration().getConsistency(), is(initialConfiguration.getConsistency())); + assertThat(decodedMessage.getStoreConfiguration().getKeySerializerType(), is(initialConfiguration.getKeySerializerType())); + assertThat(decodedMessage.getStoreConfiguration().getValueSerializerType(), is(initialConfiguration.getValueSerializerType())); + } + + private ServerSideConfiguration getServerSideConfiguration() { + return new ServerSideConfiguration("default", Collections.singletonMap("shared", new ServerSideConfiguration.Pool(100, "other"))); + } + +} diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodecTest.java index 900c3f7c89..6348cd3c37 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodecTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodecTest.java @@ -16,6 +16,9 @@ package org.ehcache.clustered.common.internal.messages; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.PoolAllocation; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ChainReplicationMessage; import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; @@ -25,25 +28,28 @@ import java.util.UUID; +import static java.nio.ByteBuffer.wrap; import static org.ehcache.clustered.common.internal.store.Util.createPayload; import static org.ehcache.clustered.common.internal.store.Util.getChain; import static org.ehcache.clustered.common.internal.store.Util.chainsEqual; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; public class PassiveReplicationMessageCodecTest { + private static final long MESSAGE_ID = 42L; + private PassiveReplicationMessageCodec codec = new PassiveReplicationMessageCodec(); + @Test public void testClientIDTrackerMessageCodec() { ClientIDTrackerMessage clientIDTrackerMessage = new ClientIDTrackerMessage(200L, UUID.randomUUID()); - PassiveReplicationMessageCodec passiveReplicationMessageCodec = new PassiveReplicationMessageCodec(); - - PassiveReplicationMessage decodedMsg = (PassiveReplicationMessage)passiveReplicationMessageCodec.decode(passiveReplicationMessageCodec - .encode(clientIDTrackerMessage)); + byte[] encoded = codec.encode(clientIDTrackerMessage); + PassiveReplicationMessage decodedMsg = (PassiveReplicationMessage) codec.decode(EhcacheMessageType.CLIENT_ID_TRACK_OP, wrap(encoded)); assertThat(decodedMsg.getClientId(), is(clientIDTrackerMessage.getClientId())); assertThat(decodedMsg.getId(), is(clientIDTrackerMessage.getId())); @@ -55,10 +61,8 @@ public void testChainReplicationMessageCodec() { Chain chain = getChain(false, createPayload(2L), createPayload(20L)); ChainReplicationMessage chainReplicationMessage = new ChainReplicationMessage("test", 2L, chain, 200L, UUID.randomUUID()); - PassiveReplicationMessageCodec passiveReplicationMessageCodec = new PassiveReplicationMessageCodec(); - - ChainReplicationMessage decodedMsg = (ChainReplicationMessage)passiveReplicationMessageCodec.decode(passiveReplicationMessageCodec - .encode(chainReplicationMessage)); + byte[] encoded = codec.encode(chainReplicationMessage); + ChainReplicationMessage decodedMsg = (ChainReplicationMessage) codec.decode(EhcacheMessageType.CHAIN_REPLICATION_OP, wrap(encoded)); assertThat(decodedMsg.getCacheId(), is(chainReplicationMessage.getCacheId())); assertThat(decodedMsg.getClientId(), is(chainReplicationMessage.getClientId())); @@ -72,12 +76,11 @@ public void testChainReplicationMessageCodec() { public void testClearInvalidationCompleteMessage() { ClearInvalidationCompleteMessage clearInvalidationCompleteMessage = new ClearInvalidationCompleteMessage("test"); - PassiveReplicationMessageCodec messageCodec = new PassiveReplicationMessageCodec(); - - ClearInvalidationCompleteMessage decoded = (ClearInvalidationCompleteMessage)messageCodec.decode(messageCodec.encode(clearInvalidationCompleteMessage)); + byte[] encoded = codec.encode(clearInvalidationCompleteMessage); + ClearInvalidationCompleteMessage decoded = (ClearInvalidationCompleteMessage) codec.decode(EhcacheMessageType.CLEAR_INVALIDATION_COMPLETE, wrap(encoded)); - assertThat(decoded.getOpCode(), equalTo(clearInvalidationCompleteMessage.getOpCode())); - assertThat(decoded.getCacheId(), equalTo(clearInvalidationCompleteMessage.getCacheId())); + assertThat(decoded.getMessageType(), is(EhcacheMessageType.CLEAR_INVALIDATION_COMPLETE)); + assertThat(decoded.getCacheId(), is(clearInvalidationCompleteMessage.getCacheId())); } @@ -86,13 +89,81 @@ public void testInvalidationCompleteMessage() { InvalidationCompleteMessage invalidationCompleteMessage = new InvalidationCompleteMessage("test", 20L); - PassiveReplicationMessageCodec messageCodec = new PassiveReplicationMessageCodec(); + byte[] encoded = codec.encode(invalidationCompleteMessage); + InvalidationCompleteMessage decoded = (InvalidationCompleteMessage) codec.decode(EhcacheMessageType.INVALIDATION_COMPLETE, wrap(encoded)); - InvalidationCompleteMessage decoded = (InvalidationCompleteMessage)messageCodec.decode(messageCodec.encode(invalidationCompleteMessage)); - - assertThat(decoded.getOpCode(), equalTo(invalidationCompleteMessage.getOpCode())); + assertThat(decoded.getMessageType(), is(EhcacheMessageType.INVALIDATION_COMPLETE)); assertThat(decoded.getCacheId(), equalTo(invalidationCompleteMessage.getCacheId())); assertThat(decoded.getKey(), equalTo(invalidationCompleteMessage.getKey())); } + @Test + public void testCreateServerStoreReplicationDedicated() throws Exception { + UUID clientId = UUID.randomUUID(); + PoolAllocation.Dedicated dedicated = new PoolAllocation.Dedicated("dedicate", 420000L); + ServerStoreConfiguration configuration = new ServerStoreConfiguration(dedicated, "java.lang.Long", "java.lang.String", null, null, + "org.ehcache.impl.serialization.LongSerializer", "org.ehcache.impl.serialization.StringSerializer", + Consistency.STRONG); + PassiveReplicationMessage.CreateServerStoreReplicationMessage message = new PassiveReplicationMessage.CreateServerStoreReplicationMessage(MESSAGE_ID, clientId, "storeId", configuration); + + byte[] encoded = codec.encode(message); + PassiveReplicationMessage.CreateServerStoreReplicationMessage decodedMessage = (PassiveReplicationMessage.CreateServerStoreReplicationMessage) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.CREATE_SERVER_STORE_REPLICATION)); + assertThat(decodedMessage.getId(), is(MESSAGE_ID)); + assertThat(decodedMessage.getClientId(), is(clientId)); + assertThat(decodedMessage.getStoreName(), is("storeId")); + assertThat(decodedMessage.getStoreConfiguration().getStoredKeyType(), is(configuration.getStoredKeyType())); + assertThat(decodedMessage.getStoreConfiguration().getStoredValueType(), is(configuration.getStoredValueType())); + assertThat(decodedMessage.getStoreConfiguration().getActualKeyType(), nullValue()); + assertThat(decodedMessage.getStoreConfiguration().getActualValueType(), nullValue()); + assertThat(decodedMessage.getStoreConfiguration().getConsistency(), is(configuration.getConsistency())); + assertThat(decodedMessage.getStoreConfiguration().getKeySerializerType(), is(configuration.getKeySerializerType())); + assertThat(decodedMessage.getStoreConfiguration().getValueSerializerType(), is(configuration.getValueSerializerType())); + PoolAllocation.Dedicated decodedPoolAllocation = (PoolAllocation.Dedicated) decodedMessage.getStoreConfiguration().getPoolAllocation(); + assertThat(decodedPoolAllocation.getResourceName(), is(dedicated.getResourceName())); + assertThat(decodedPoolAllocation.getSize(), is(dedicated.getSize())); + } + + @Test + public void testCreateServerStoreReplicationShared() throws Exception { + UUID clientId = UUID.randomUUID(); + PoolAllocation.Shared shared = new PoolAllocation.Shared("shared"); + ServerStoreConfiguration configuration = new ServerStoreConfiguration(shared, "java.lang.Long", "java.lang.String", null, null, + "org.ehcache.impl.serialization.LongSerializer", "org.ehcache.impl.serialization.StringSerializer", + Consistency.STRONG); + PassiveReplicationMessage.CreateServerStoreReplicationMessage message = new PassiveReplicationMessage.CreateServerStoreReplicationMessage(MESSAGE_ID, clientId, "storeId", configuration); + + byte[] encoded = codec.encode(message); + PassiveReplicationMessage.CreateServerStoreReplicationMessage decodedMessage = (PassiveReplicationMessage.CreateServerStoreReplicationMessage) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.CREATE_SERVER_STORE_REPLICATION)); + assertThat(decodedMessage.getId(), is(MESSAGE_ID)); + assertThat(decodedMessage.getClientId(), is(clientId)); + assertThat(decodedMessage.getStoreName(), is("storeId")); + assertThat(decodedMessage.getStoreConfiguration().getStoredKeyType(), is(configuration.getStoredKeyType())); + assertThat(decodedMessage.getStoreConfiguration().getStoredValueType(), is(configuration.getStoredValueType())); + assertThat(decodedMessage.getStoreConfiguration().getActualKeyType(), nullValue()); + assertThat(decodedMessage.getStoreConfiguration().getActualValueType(), nullValue()); + assertThat(decodedMessage.getStoreConfiguration().getConsistency(), is(configuration.getConsistency())); + assertThat(decodedMessage.getStoreConfiguration().getKeySerializerType(), is(configuration.getKeySerializerType())); + assertThat(decodedMessage.getStoreConfiguration().getValueSerializerType(), is(configuration.getValueSerializerType())); + PoolAllocation.Shared decodedPoolAllocation = (PoolAllocation.Shared) decodedMessage.getStoreConfiguration().getPoolAllocation(); + assertThat(decodedPoolAllocation.getResourcePoolName(), is(shared.getResourcePoolName())); + } + + @Test + public void testDestroyServerStoreReplication() throws Exception { + UUID clientId = UUID.randomUUID(); + PassiveReplicationMessage.DestroyServerStoreReplicationMessage message = new PassiveReplicationMessage.DestroyServerStoreReplicationMessage(MESSAGE_ID, clientId, "storeId"); + + byte[] encoded = codec.encode(message); + PassiveReplicationMessage.DestroyServerStoreReplicationMessage decodedMessage = (PassiveReplicationMessage.DestroyServerStoreReplicationMessage) codec.decode(message.getMessageType(), wrap(encoded)); + + assertThat(decodedMessage.getMessageType(), is(EhcacheMessageType.DESTROY_SERVER_STORE_REPLICATION)); + assertThat(decodedMessage.getId(), is(MESSAGE_ID)); + assertThat(decodedMessage.getClientId(), is(clientId)); + assertThat(decodedMessage.getStoreName(), is("storeId")); + } + } diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodecTest.java index 12e58585c9..0c7161bc5e 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodecTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodecTest.java @@ -21,95 +21,130 @@ import java.util.UUID; +import static java.nio.ByteBuffer.wrap; import static org.ehcache.clustered.common.internal.store.Util.createPayload; import static org.ehcache.clustered.common.internal.store.Util.getChain; import static org.ehcache.clustered.common.internal.store.Util.readPayLoad; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; +import static org.junit.Assert.fail; public class ServerStoreOpCodecTest { - private static final ServerStoreMessageFactory MESSAGE_FACTORY = new ServerStoreMessageFactory("test", UUID.randomUUID()); + private static final UUID CLIENT_ID = UUID.randomUUID(); + private static final ServerStoreMessageFactory MESSAGE_FACTORY = new ServerStoreMessageFactory("test", CLIENT_ID); private static final ServerStoreOpCodec STORE_OP_CODEC = new ServerStoreOpCodec(); @Test public void testAppendMessageCodec() { - EhcacheEntityMessage appendMessage = MESSAGE_FACTORY.appendOperation(1L, createPayload(1L)); + ServerStoreOpMessage.AppendMessage appendMessage = MESSAGE_FACTORY.appendOperation(1L, createPayload(1L)); + appendMessage.setId(42L); - EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(STORE_OP_CODEC.encode((ServerStoreOpMessage)appendMessage)); + byte[] encoded = STORE_OP_CODEC.encode(appendMessage); + EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(appendMessage.getMessageType(), wrap(encoded)); ServerStoreOpMessage.AppendMessage decodedAppendMessage = (ServerStoreOpMessage.AppendMessage) decodedMsg; assertThat(decodedAppendMessage.getCacheId(), is("test")); assertThat(decodedAppendMessage.getKey(), is(1L)); assertThat(readPayLoad(decodedAppendMessage.getPayload()), is(1L)); - assertThat(decodedAppendMessage.getId(), is(-1L)); - assertEquals(appendMessage.getClientId(), decodedAppendMessage.getClientId()); + assertThat(decodedAppendMessage.getId(), is(42L)); + assertThat(decodedAppendMessage.getClientId(), is(CLIENT_ID)); + assertThat(decodedAppendMessage.getMessageType(), is(EhcacheMessageType.APPEND)); } @Test public void testGetMessageCodec() { - EhcacheEntityMessage getMessage = MESSAGE_FACTORY.getOperation(2L); + ServerStoreOpMessage getMessage = MESSAGE_FACTORY.getOperation(2L); + getMessage.setId(42L); - EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(STORE_OP_CODEC.encode((ServerStoreOpMessage)getMessage)); + byte[] encoded = STORE_OP_CODEC.encode(getMessage); + EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(getMessage.getMessageType(), wrap(encoded)); ServerStoreOpMessage.GetMessage decodedGetMessage = (ServerStoreOpMessage.GetMessage) decodedMsg; assertThat(decodedGetMessage.getCacheId(), is("test")); assertThat(decodedGetMessage.getKey(), is(2L)); + assertThat(decodedGetMessage.getId(), is(42L)); + assertThat(decodedGetMessage.getMessageType(), is(EhcacheMessageType.GET_STORE)); + try { + decodedGetMessage.getClientId(); + fail("AssertionError expected"); + } catch (AssertionError error) { + assertThat(error.getMessage(), containsString("Client Id is not supported")); + } } @Test public void testGetAndAppendMessageCodec() { - EhcacheEntityMessage getAndAppendMessage = MESSAGE_FACTORY.getAndAppendOperation(10L, createPayload(10L)); + ServerStoreOpMessage getAndAppendMessage = MESSAGE_FACTORY.getAndAppendOperation(10L, createPayload(10L)); + getAndAppendMessage.setId(123L); - EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(STORE_OP_CODEC.encode((ServerStoreOpMessage)getAndAppendMessage)); + byte[] encoded = STORE_OP_CODEC.encode(getAndAppendMessage); + EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(getAndAppendMessage.getMessageType(), wrap(encoded)); ServerStoreOpMessage.GetAndAppendMessage decodedGetAndAppendMessage = (ServerStoreOpMessage.GetAndAppendMessage) decodedMsg; assertThat(decodedGetAndAppendMessage.getCacheId(), is("test")); assertThat(decodedGetAndAppendMessage.getKey(), is(10L)); assertThat(readPayLoad(decodedGetAndAppendMessage.getPayload()), is(10L)); - assertThat(decodedGetAndAppendMessage.getId(), is(-1L)); - assertEquals(getAndAppendMessage.getClientId(), decodedGetAndAppendMessage.getClientId()); + assertThat(decodedGetAndAppendMessage.getId(), is(123L)); + assertThat(decodedGetAndAppendMessage.getClientId(), is(CLIENT_ID)); + assertThat(decodedGetAndAppendMessage.getMessageType(), is(EhcacheMessageType.GET_AND_APPEND)); } @Test public void testReplaceAtHeadMessageCodec() { - EhcacheEntityMessage replaceAtHeadMessage = MESSAGE_FACTORY.replaceAtHeadOperation(10L, + ServerStoreOpMessage replaceAtHeadMessage = MESSAGE_FACTORY.replaceAtHeadOperation(10L, getChain(true, createPayload(10L), createPayload(100L), createPayload(1000L)), getChain(false, createPayload(2000L))); + replaceAtHeadMessage.setId(42L); - EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(STORE_OP_CODEC.encode((ServerStoreOpMessage)replaceAtHeadMessage)); + byte[] encoded = STORE_OP_CODEC.encode(replaceAtHeadMessage); + EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(replaceAtHeadMessage.getMessageType(), wrap(encoded)); ServerStoreOpMessage.ReplaceAtHeadMessage decodedReplaceAtHeadMessage = (ServerStoreOpMessage.ReplaceAtHeadMessage) decodedMsg; assertThat(decodedReplaceAtHeadMessage.getCacheId(), is("test")); assertThat(decodedReplaceAtHeadMessage.getKey(), is(10L)); - assertThat(decodedReplaceAtHeadMessage.getId(), is(-1L)); + assertThat(decodedReplaceAtHeadMessage.getId(), is(42L)); Util.assertChainHas(decodedReplaceAtHeadMessage.getExpect(), 10L, 100L, 1000L); Util.assertChainHas(decodedReplaceAtHeadMessage.getUpdate(), 2000L); - assertEquals(replaceAtHeadMessage.getClientId(), decodedReplaceAtHeadMessage.getClientId()); + assertThat(decodedReplaceAtHeadMessage.getClientId(), is(CLIENT_ID)); + assertThat(decodedReplaceAtHeadMessage.getMessageType(), is(EhcacheMessageType.REPLACE)); } @Test public void testClearMessageCodec() throws Exception { - EhcacheEntityMessage clearMessage = MESSAGE_FACTORY.clearOperation(); - byte[] encodedBytes = STORE_OP_CODEC.encode((ServerStoreOpMessage)clearMessage); - EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(encodedBytes); - assertThat(((ServerStoreOpMessage)decodedMsg).getCacheId(), is("test")); - assertThat(decodedMsg.getId(), is(-1L)); - assertEquals(clearMessage.getClientId(), decodedMsg.getClientId()); + ServerStoreOpMessage clearMessage = MESSAGE_FACTORY.clearOperation(); + clearMessage.setId(42L); + + byte[] encoded = STORE_OP_CODEC.encode(clearMessage); + ServerStoreOpMessage decodedMsg = (ServerStoreOpMessage) STORE_OP_CODEC.decode(clearMessage.getMessageType(), wrap(encoded)); + + assertThat(decodedMsg.getCacheId(), is("test")); + assertThat(decodedMsg.getId(), is(42L)); + assertThat(decodedMsg.getClientId(), is(CLIENT_ID)); + assertThat(decodedMsg.getMessageType(), is(EhcacheMessageType.CLEAR)); } @Test public void testClientInvalidationAckMessageCodec() throws Exception { - EhcacheEntityMessage invalidationAckMessage = MESSAGE_FACTORY.clientInvalidationAck(123); - byte[] encodedBytes = STORE_OP_CODEC.encode((ServerStoreOpMessage)invalidationAckMessage); - EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(encodedBytes); + ServerStoreOpMessage invalidationAckMessage = MESSAGE_FACTORY.clientInvalidationAck(123); + invalidationAckMessage.setId(456L); + + byte[] encoded = STORE_OP_CODEC.encode(invalidationAckMessage); + EhcacheEntityMessage decodedMsg = STORE_OP_CODEC.decode(invalidationAckMessage.getMessageType(), wrap(encoded)); ServerStoreOpMessage.ClientInvalidationAck decodedInvalidationAckMessage = (ServerStoreOpMessage.ClientInvalidationAck)decodedMsg; assertThat(decodedInvalidationAckMessage.getCacheId(), is("test")); assertThat(decodedInvalidationAckMessage.getInvalidationId(), is(123)); + assertThat(decodedInvalidationAckMessage.getId(), is(456L)); + assertThat(decodedInvalidationAckMessage.getMessageType(), is(EhcacheMessageType.CLIENT_INVALIDATION_ACK)); + try { + decodedInvalidationAckMessage.getClientId(); + fail("AssertionError expected"); + } catch (AssertionError error) { + assertThat(error.getMessage(), containsString("Client Id is not supported")); + } } } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java index e84392eabc..dd70332511 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java @@ -515,7 +515,7 @@ Void runTask() throws Exception { }.run(); fail("Expecting StoreAccessTimeoutException"); } catch (StoreAccessTimeoutException e) { - assertThat(e.getMessage(), containsString("Timeout exceeded for SERVER_STORE_OP#GET_AND_APPEND")); + assertThat(e.getMessage(), containsString("Timeout exceeded for GET_AND_APPEND")); } } @@ -551,7 +551,7 @@ String runTask() throws Exception { }.run(); fail("Expecting StoreAccessTimeoutException"); } catch (StoreAccessTimeoutException e) { - assertThat(e.getMessage(), containsString("Timeout exceeded for SERVER_STORE_OP#GET_AND_APPEND")); + assertThat(e.getMessage(), containsString("Timeout exceeded for GET_AND_APPEND")); } } @@ -588,7 +588,7 @@ Void runTask() throws Exception { }.run(); fail("Expecting StoreAccessTimeoutException"); } catch (StoreAccessTimeoutException e) { - assertThat(e.getMessage(), containsString("Timeout exceeded for SERVER_STORE_OP#GET_AND_APPEND")); + assertThat(e.getMessage(), containsString("Timeout exceeded for GET_AND_APPEND")); } } @@ -625,7 +625,7 @@ Void runTask() throws Exception { }.run(); fail("Expecting StoreAccessTimeoutException"); } catch (StoreAccessTimeoutException e) { - assertThat(e.getMessage(), containsString("Timeout exceeded for SERVER_STORE_OP#CLEAR")); + assertThat(e.getMessage(), containsString("Timeout exceeded for CLEAR")); } } diff --git a/clustered/server/build.gradle b/clustered/server/build.gradle index 2943818bd6..4ed58e062f 100644 --- a/clustered/server/build.gradle +++ b/clustered/server/build.gradle @@ -38,6 +38,7 @@ dependencies { compile"org.terracotta.management.dist:management-common:$parent.managementVersion" provided "org.terracotta:entity-server-api:$parent.entityApiVersion" provided "org.terracotta:standard-cluster-services:$parent.terracottaApisVersion" + provided "org.terracotta:runnel:$parent.terracottaPlatformVersion" } compileJava { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 99baea528c..5cd477aa6b 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -47,6 +47,7 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponseFactory; import org.ehcache.clustered.common.internal.messages.LifecycleMessage; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage; import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.InvalidationCompleteMessage; @@ -770,7 +771,7 @@ private void createServerStore(ClientDescriptor clientDescriptor, CreateServerSt serverStore.setEvictionListener(key -> invalidateHashAfterEviction(name, key)); attachStore(clientDescriptor, name); try { - entityMessenger.messageSelfAndDeferRetirement(createServerStore, new ClientIDTrackerMessage.ServerStoreLifeCycleReplicationMessage(createServerStore)); + entityMessenger.messageSelfAndDeferRetirement(createServerStore, new PassiveReplicationMessage.CreateServerStoreReplicationMessage(createServerStore)); } catch (MessageCodecException e) { throw new AssertionError("Codec error", e); } @@ -861,7 +862,7 @@ private void destroyServerStore(ClientDescriptor clientDescriptor, DestroyServer storeClientMap.remove(name); try { - entityMessenger.messageSelfAndDeferRetirement(destroyServerStore, new ClientIDTrackerMessage.ServerStoreLifeCycleReplicationMessage(destroyServerStore)); + entityMessenger.messageSelfAndDeferRetirement(destroyServerStore, new PassiveReplicationMessage.DestroyServerStoreReplicationMessage(destroyServerStore)); } catch (MessageCodecException e) { throw new AssertionError("Codec error", e); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index 21e696e32a..4fee4e7e7a 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -34,7 +34,6 @@ import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ChainReplicationMessage; import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.InvalidationCompleteMessage; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ServerStoreLifeCycleReplicationMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; import org.ehcache.clustered.server.internal.messages.EhcacheDataSyncMessage; @@ -57,6 +56,9 @@ import java.util.Set; import java.util.UUID; +import static org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ReplicationOp.CLIENTID_TRACK_OP; +import static org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ReplicationOp.SERVER_STORE_LIFECYCLE_REPLICATION_OP; + class EhcachePassiveEntity implements PassiveServerEntity { private static final Logger LOGGER = LoggerFactory.getLogger(EhcachePassiveEntity.class); @@ -117,7 +119,7 @@ public void invoke(EhcacheEntityMessage message) { private void invokeRetirementMessages(PassiveReplicationMessage message) throws ClusterException { - switch (message.operation()) { + switch (message.getMessageType()) { case CHAIN_REPLICATION_OP: LOGGER.debug("Chain Replication message for msgId {} & client Id {}", message.getId(), message.getClientId()); ChainReplicationMessage retirementMessage = (ChainReplicationMessage)message; @@ -130,7 +132,7 @@ private void invokeRetirementMessages(PassiveReplicationMessage message) throws ehcacheStateService.getClientMessageTracker().applied(message.getId(), message.getClientId()); trackHashInvalidationForEventualCache(retirementMessage); break; - case CLIENTID_TRACK_OP: + case CLIENT_ID_TRACK_OP: LOGGER.debug("PassiveReplicationMessage message for msgId {} & client Id {}", message.getId(), message.getClientId()); ehcacheStateService.getClientMessageTracker().add(message.getClientId()); break; @@ -140,8 +142,15 @@ private void invokeRetirementMessages(PassiveReplicationMessage message) throws case CLEAR_INVALIDATION_COMPLETE: ehcacheStateService.getInvalidationTracker(((ClearInvalidationCompleteMessage)message).getCacheId()).setClearInProgress(false); break; - case SERVER_STORE_LIFECYCLE_REPLICATION_OP: - invokeRetiredServerStoreLifecycleMessage((ServerStoreLifeCycleReplicationMessage)message); + case CREATE_SERVER_STORE_REPLICATION: + ehcacheStateService.getClientMessageTracker().applied(message.getId(), message.getClientId()); + PassiveReplicationMessage.CreateServerStoreReplicationMessage createMessage = (PassiveReplicationMessage.CreateServerStoreReplicationMessage) message; + createServerStore(createMessage.getStoreName(), createMessage.getStoreConfiguration()); + break; + case DESTROY_SERVER_STORE_REPLICATION: + ehcacheStateService.getClientMessageTracker().applied(message.getId(), message.getClientId()); + PassiveReplicationMessage.DestroyServerStoreReplicationMessage destroyMessage = (PassiveReplicationMessage.DestroyServerStoreReplicationMessage) message; + destroyServerStore(destroyMessage.getStoreName()); break; default: throw new IllegalMessageException("Unknown Retirement Message : " + message); @@ -171,23 +180,6 @@ private void trackHashInvalidationForEventualCache(ChainReplicationMessage retir } } - private void invokeRetiredServerStoreLifecycleMessage(ServerStoreLifeCycleReplicationMessage storeLifeCycleReplicationMessage) throws ClusterException { - - LifecycleMessage message = storeLifeCycleReplicationMessage.getMessage(); - ehcacheStateService.getClientMessageTracker().applied(message.getId(), message.getClientId()); - switch (message.operation()) { - case CREATE_SERVER_STORE: - createServerStore((CreateServerStore)message); - break; - case DESTROY_SERVER_STORE: - destroyServerStore((DestroyServerStore)message); - break; - default: - throw new IllegalMessageException("Unknown Replicated ServerStore operation : " + message); - } - } - - private void invokeServerStoreOperation(ServerStoreOpMessage message) throws ClusterException { ServerStoreImpl cacheStore = ehcacheStateService.getStore(message.getCacheId()); if (cacheStore == null) { @@ -278,37 +270,32 @@ private void trackAndApplyMessage(LifecycleMessage message) { clientMessageTracker.applied(message.getId(), message.getClientId()); } - private void createServerStore(CreateServerStore createServerStore) throws ClusterException { + private void createServerStore(String storeName, ServerStoreConfiguration configuration) throws ClusterException { if (!ehcacheStateService.isConfigured()) { throw new LifecycleException("Clustered Tier Manager is not configured"); } - if(createServerStore.getStoreConfiguration().getPoolAllocation() instanceof PoolAllocation.Unknown) { + if(configuration.getPoolAllocation() instanceof PoolAllocation.Unknown) { throw new LifecycleException("Clustered tier can't be created with an Unknown resource pool"); } - final String name = createServerStore.getName(); // client cache identifier/name + LOGGER.info("Creating new clustered tier '{}'", storeName); - LOGGER.info("Creating new clustered tier '{}'", name); - - ServerStoreConfiguration storeConfiguration = createServerStore.getStoreConfiguration(); - ehcacheStateService.createStore(name, storeConfiguration); - if(storeConfiguration.getConsistency() == Consistency.EVENTUAL) { - ehcacheStateService.addInvalidationtracker(name); + ehcacheStateService.createStore(storeName, configuration); + if(configuration.getConsistency() == Consistency.EVENTUAL) { + ehcacheStateService.addInvalidationtracker(storeName); } - management.serverStoreCreated(name); + management.serverStoreCreated(storeName); } - private void destroyServerStore(DestroyServerStore destroyServerStore) throws ClusterException { + private void destroyServerStore(String storeName) throws ClusterException { if (!ehcacheStateService.isConfigured()) { throw new LifecycleException("Clustered Tier Manager is not configured"); } - String name = destroyServerStore.getName(); - - LOGGER.info("Destroying clustered tier '{}'", name); - management.serverStoreDestroyed(name); - ehcacheStateService.destroyServerStore(name); - ehcacheStateService.removeInvalidationtracker(name); + LOGGER.info("Destroying clustered tier '{}'", storeName); + management.serverStoreDestroyed(storeName); + ehcacheStateService.destroyServerStore(storeName); + ehcacheStateService.removeInvalidationtracker(storeName); } @Override diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index 813f97d41a..5033d8b9fc 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -42,7 +42,6 @@ import org.ehcache.clustered.common.internal.messages.LifecycleMessage.ValidateStoreManager; import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage; import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ServerStoreLifeCycleReplicationMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; import org.ehcache.clustered.server.internal.messages.EhcacheStateSyncMessage; import org.ehcache.clustered.server.state.ClientMessageTracker; @@ -2762,7 +2761,7 @@ public void testCreateServerStoreSendsPassiveReplicationMessageIfSuccessful() th .build())); verify(entityMessenger, times(0)).messageSelf(any()); - verify(entityMessenger, times(1)).messageSelfAndDeferRetirement(any(CreateServerStore.class), any(ServerStoreLifeCycleReplicationMessage.class)); + verify(entityMessenger, times(1)).messageSelfAndDeferRetirement(any(CreateServerStore.class), any(PassiveReplicationMessage.class)); } @@ -2835,7 +2834,7 @@ public void testDestroyServerStoreSendsPassiveReplicationMessageIfSuccessful() t MESSAGE_FACTORY.destroyServerStore("test")); verify(entityMessenger, times(0)).messageSelf(any()); - verify(entityMessenger, times(1)).messageSelfAndDeferRetirement(any(DestroyServerStore.class), any(ServerStoreLifeCycleReplicationMessage.class)); + verify(entityMessenger, times(1)).messageSelfAndDeferRetirement(any(DestroyServerStore.class), any(PassiveReplicationMessage.class)); } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java index 041af8fb6c..742bdc6478 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java @@ -23,9 +23,9 @@ import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; +import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage; import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; import org.ehcache.clustered.common.internal.messages.LifecycleMessage; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ServerStoreLifeCycleReplicationMessage; import org.ehcache.clustered.server.state.EhcacheStateService; import org.hamcrest.Matchers; import org.junit.Before; @@ -256,7 +256,7 @@ public void testCreateDedicatedServerStore() throws Exception { .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) .build()); passiveEntity.invoke(createServerStore); - passiveEntity.invoke(new ServerStoreLifeCycleReplicationMessage((LifecycleMessage)createServerStore)); + passiveEntity.invoke(new PassiveReplicationMessage.CreateServerStoreReplicationMessage((LifecycleMessage.CreateServerStore)createServerStore)); assertThat(registry.getStoreManagerService() .getSharedResourcePoolIds(), containsInAnyOrder("primary", "secondary")); @@ -296,7 +296,7 @@ public void testCreateSharedServerStore() throws Exception { .shared("primary") .build()); passiveEntity.invoke(createServerStore); - passiveEntity.invoke(new ServerStoreLifeCycleReplicationMessage((LifecycleMessage)createServerStore)); + passiveEntity.invoke(new PassiveReplicationMessage.CreateServerStoreReplicationMessage((LifecycleMessage.CreateServerStore)createServerStore)); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("cacheAlias")); assertThat(registry.getStoreManagerService() @@ -332,7 +332,7 @@ public void testDestroyServerStore() throws Exception { .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) .build()); passiveEntity.invoke(createServerStore); - passiveEntity.invoke(new ServerStoreLifeCycleReplicationMessage((LifecycleMessage)createServerStore)); + passiveEntity.invoke(new PassiveReplicationMessage.CreateServerStoreReplicationMessage((LifecycleMessage.CreateServerStore) createServerStore)); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache")); assertThat(registry.getStoreManagerService().getDedicatedResourcePoolIds(), containsInAnyOrder("dedicatedCache")); @@ -347,7 +347,7 @@ public void testDestroyServerStore() throws Exception { .shared("secondary") .build()); passiveEntity.invoke(sharedServerStore); - passiveEntity.invoke(new ServerStoreLifeCycleReplicationMessage((LifecycleMessage)sharedServerStore)); + passiveEntity.invoke(new PassiveReplicationMessage.CreateServerStoreReplicationMessage((LifecycleMessage.CreateServerStore) sharedServerStore)); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache", "sharedCache")); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache", "sharedCache")); @@ -355,7 +355,7 @@ public void testDestroyServerStore() throws Exception { EhcacheEntityMessage destroySharedCache = MESSAGE_FACTORY.destroyServerStore("sharedCache"); passiveEntity.invoke(destroySharedCache); - passiveEntity.invoke(new ServerStoreLifeCycleReplicationMessage((LifecycleMessage)destroySharedCache)); + passiveEntity.invoke(new PassiveReplicationMessage.DestroyServerStoreReplicationMessage((LifecycleMessage.DestroyServerStore) destroySharedCache)); assertThat(registry.getResource("serverResource1").getUsed(), is(MemoryUnit.MEGABYTES.toBytes(4L + 4L))); assertThat(registry.getResource("serverResource2").getUsed(), is(MemoryUnit.MEGABYTES.toBytes(8L))); @@ -364,7 +364,7 @@ public void testDestroyServerStore() throws Exception { EhcacheEntityMessage destroyDedicatedCache = MESSAGE_FACTORY.destroyServerStore("dedicatedCache"); passiveEntity.invoke(destroyDedicatedCache); - passiveEntity.invoke(new ServerStoreLifeCycleReplicationMessage((LifecycleMessage)destroyDedicatedCache)); + passiveEntity.invoke(new PassiveReplicationMessage.DestroyServerStoreReplicationMessage((LifecycleMessage.DestroyServerStore) destroyDedicatedCache)); assertThat(registry.getStoreManagerService().getStores(), is(Matchers.empty())); assertThat(registry.getStoreManagerService().getDedicatedResourcePoolIds(), is(Matchers.empty())); @@ -400,7 +400,7 @@ public void testSharedPoolCacheNameCollision() throws Exception { .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) .build()); passiveEntity.invoke(createServerStore); - passiveEntity.invoke(new ServerStoreLifeCycleReplicationMessage((LifecycleMessage)createServerStore)); + passiveEntity.invoke(new PassiveReplicationMessage.CreateServerStoreReplicationMessage((LifecycleMessage.CreateServerStore) createServerStore)); assertThat(registry.getStoreManagerService() .getSharedResourcePoolIds(), containsInAnyOrder("primary", "secondary")); assertThat(registry.getStoreManagerService().getDedicatedResourcePoolIds(), containsInAnyOrder("dedicatedCache")); @@ -411,7 +411,7 @@ public void testSharedPoolCacheNameCollision() throws Exception { .shared("primary") .build()); passiveEntity.invoke(sharedServerStore); - passiveEntity.invoke(new ServerStoreLifeCycleReplicationMessage((LifecycleMessage)sharedServerStore)); + passiveEntity.invoke(new PassiveReplicationMessage.CreateServerStoreReplicationMessage((LifecycleMessage.CreateServerStore) sharedServerStore)); assertThat(registry.getStoreManagerService() .getSharedResourcePoolIds(), containsInAnyOrder("primary", "secondary")); assertThat(registry.getStoreManagerService().getDedicatedResourcePoolIds(), containsInAnyOrder("dedicatedCache")); @@ -422,7 +422,7 @@ public void testSharedPoolCacheNameCollision() throws Exception { .dedicated("serverResource2", 4, MemoryUnit.MEGABYTES) .build()); passiveEntity.invoke(createServerStore2); - passiveEntity.invoke(new ServerStoreLifeCycleReplicationMessage((LifecycleMessage)createServerStore2)); + passiveEntity.invoke(new PassiveReplicationMessage.CreateServerStoreReplicationMessage((LifecycleMessage.CreateServerStore) createServerStore2)); assertThat(registry.getStoreManagerService() .getSharedResourcePoolIds(), containsInAnyOrder("primary", "secondary")); assertThat(registry.getStoreManagerService() @@ -471,7 +471,7 @@ public void testDestroyWithStores() throws Exception { .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) .build()); passiveEntity.invoke(createServerStore); - passiveEntity.invoke(new ServerStoreLifeCycleReplicationMessage((LifecycleMessage)createServerStore)); + passiveEntity.invoke(new PassiveReplicationMessage.CreateServerStoreReplicationMessage((LifecycleMessage.CreateServerStore) createServerStore)); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache")); assertThat(registry.getResource("serverResource1").getUsed(), is(MemoryUnit.MEGABYTES.toBytes(4L + 4L))); @@ -482,7 +482,7 @@ public void testDestroyWithStores() throws Exception { .shared("secondary") .build()); passiveEntity.invoke(sharedServerStore); - passiveEntity.invoke(new ServerStoreLifeCycleReplicationMessage((LifecycleMessage)sharedServerStore)); + passiveEntity.invoke(new PassiveReplicationMessage.CreateServerStoreReplicationMessage((LifecycleMessage.CreateServerStore) sharedServerStore)); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache", "sharedCache")); assertThat(registry.getStoreManagerService().getStores(), containsInAnyOrder("dedicatedCache", "sharedCache")); From c26ba18b32fa9c4c29cc835fb0fa78e1c2ded847 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Thu, 17 Nov 2016 15:25:51 +0100 Subject: [PATCH 146/218] :construction: Move to latest Terracotta version * Clean up lifecycle of integration tests by no longer starting the servers in tearDown. --- build.gradle | 4 ++-- ...dCacheOpsReplicationMultiThreadedTest.java | 10 +++++----- ...BasicClusteredCacheOpsReplicationTest.java | 6 +++--- ...OpsReplicationWithMulitpleClientsTest.java | 10 +++++----- .../BasicLifeCyclePassiveReplicationTest.java | 19 ++++++++++++++----- .../clustered/sync/PassiveSyncTest.java | 11 +++-------- 6 files changed, 32 insertions(+), 28 deletions(-) diff --git a/build.gradle b/build.gradle index fac315fecc..fd198e098f 100644 --- a/build.gradle +++ b/build.gradle @@ -31,12 +31,12 @@ ext { terracottaPlatformVersion = '5.0.11.beta2' managementVersion = terracottaPlatformVersion terracottaApisVersion = '1.0.11.beta' - terracottaCoreVersion = '5.0.11-beta' + terracottaCoreVersion = '5.0.11-beta2' offheapResourceVersion = terracottaPlatformVersion entityApiVersion = terracottaApisVersion terracottaPassthroughTestingVersion = '1.0.11.beta' entityTestLibVersion = terracottaPassthroughTestingVersion - galvanVersion = '1.0.11-beta' + galvanVersion = '1.0.11-beta2' // Tools findbugsVersion = '3.0.1' diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java index 7626995512..7982ff03f2 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java @@ -79,8 +79,8 @@ public class BasicClusteredCacheOpsReplicationMultiThreadedTest { + "" + "\n"; - private static CacheManager CACHE_MANAGER1; - private static CacheManager CACHE_MANAGER2; + private static PersistentCacheManager CACHE_MANAGER1; + private static PersistentCacheManager CACHE_MANAGER2; private static Cache CACHE1; private static Cache CACHE2; @@ -98,6 +98,7 @@ public static Consistency[] data() { @Before public void startServers() throws Exception { + CLUSTER.getClusterControl().startAllServers(); CLUSTER.getClusterControl().waitForActive(); CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); final CacheManagerBuilder clusteredCacheManagerBuilder @@ -122,8 +123,7 @@ public void startServers() throws Exception { public void tearDown() throws Exception { CACHE_MANAGER1.close(); CACHE_MANAGER2.close(); - CLUSTER.getClusterControl().terminateActive(); - CLUSTER.getClusterControl().startAllServers(); + CACHE_MANAGER2.destroy(); } @Test(timeout=180000) @@ -274,4 +274,4 @@ private static class BlobValue implements Serializable { private final byte[] data = new byte[10 * 1024]; } -} \ No newline at end of file +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java index 4498f3a272..0236774035 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java @@ -64,7 +64,7 @@ public class BasicClusteredCacheOpsReplicationTest { + "" + "\n"; - private static CacheManager CACHE_MANAGER; + private static PersistentCacheManager CACHE_MANAGER; private static Cache CACHE1; private static Cache CACHE2; @@ -82,6 +82,7 @@ public static Consistency[] data() { @Before public void startServers() throws Exception { + CLUSTER.getClusterControl().startAllServers(); CLUSTER.getClusterControl().waitForActive(); CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); final CacheManagerBuilder clusteredCacheManagerBuilder @@ -103,8 +104,7 @@ public void startServers() throws Exception { @After public void tearDown() throws Exception { CACHE_MANAGER.close(); - CLUSTER.getClusterControl().terminateActive(); - CLUSTER.getClusterControl().startAllServers(); + CACHE_MANAGER.destroy(); } @Test diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMulitpleClientsTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMulitpleClientsTest.java index b29e662465..8f36b0d507 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMulitpleClientsTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMulitpleClientsTest.java @@ -71,8 +71,8 @@ public class BasicClusteredCacheOpsReplicationWithMulitpleClientsTest { + "" + "\n"; - private static CacheManager CACHE_MANAGER1; - private static CacheManager CACHE_MANAGER2; + private static PersistentCacheManager CACHE_MANAGER1; + private static PersistentCacheManager CACHE_MANAGER2; private static Cache CACHE1; private static Cache CACHE2; @@ -90,6 +90,7 @@ public static Consistency[] data() { @Before public void startServers() throws Exception { + CLUSTER.getClusterControl().startAllServers(); CLUSTER.getClusterControl().waitForActive(); CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); final CacheManagerBuilder clusteredCacheManagerBuilder @@ -113,8 +114,7 @@ public void startServers() throws Exception { public void tearDown() throws Exception { CACHE_MANAGER1.close(); CACHE_MANAGER2.close(); - CLUSTER.getClusterControl().terminateActive(); - CLUSTER.getClusterControl().startAllServers(); + CACHE_MANAGER2.destroy(); } @Test(timeout=180000) @@ -221,4 +221,4 @@ public void testClear() throws Exception { private static class BlobValue implements Serializable { private final byte[] data = new byte[10 * 1024]; } -} \ No newline at end of file +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java index d8a807b471..7a2fc2b8e9 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java @@ -16,6 +16,7 @@ package org.ehcache.clustered.replication; +import org.ehcache.CachePersistenceException; import org.ehcache.clustered.client.config.ClusteredResourcePool; import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; @@ -33,6 +34,7 @@ import org.ehcache.clustered.common.internal.exceptions.InvalidStoreManagerException; import org.ehcache.clustered.common.internal.exceptions.LifecycleException; import org.ehcache.impl.serialization.CompactJavaSerializer; +import org.ehcache.spi.service.MaintainableService; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; @@ -66,6 +68,7 @@ public class BasicLifeCyclePassiveReplicationTest { @Before public void startServers() throws Exception { + CLUSTER.getClusterControl().startAllServers(); CLUSTER.getClusterControl().waitForActive(); CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); } @@ -73,7 +76,6 @@ public void startServers() throws Exception { @After public void tearDown() throws Exception { CLUSTER.getClusterControl().terminateActive(); - CLUSTER.getClusterControl().startAllServers(); } @Test @@ -103,7 +105,7 @@ public void testCreateCacheReplication() throws Exception { } service.stop(); - + cleanUpCluster(service); } @Test @@ -136,7 +138,7 @@ public void testDestroyCacheReplication() throws Exception { } service.stop(); - + cleanUpCluster(service); } @Test @@ -163,6 +165,7 @@ public void testConfigureReplication() throws Exception { } service.stop(); + cleanUpCluster(service); } @Test @@ -189,6 +192,7 @@ public void testValidateReplication() throws Exception { } service.stop(); + cleanUpCluster(service); } @Test @@ -226,16 +230,21 @@ public void testDestroyServerStoreIsNotReplicatedIfFailsOnActive() throws Except service1.stop(); service2.stop(); - + cleanUpCluster(service1); } - private static EhcacheClientEntity getEntity(ClusteringService clusteringService) throws NoSuchFieldException, IllegalAccessException { Field entity = clusteringService.getClass().getDeclaredField("entity"); entity.setAccessible(true); return (EhcacheClientEntity)entity.get(clusteringService); } + private void cleanUpCluster(ClusteringService service) throws CachePersistenceException { + service.startForMaintenance(null, MaintainableService.MaintenanceScope.CACHE_MANAGER); + service.destroyAll(); + service.stop(); + } + private static ServerStoreConfiguration getServerStoreConfiguration(String resourceName) { ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(resourceName, 4, MB); return new ServerStoreConfiguration(resourcePool.getPoolAllocation(), diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java index e19e989418..c1db4c03d2 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java @@ -55,23 +55,18 @@ public class PassiveSyncTest { @Before public void startServers() throws Exception { + CLUSTER.getClusterControl().startAllServers(); CLUSTER.getClusterControl().waitForActive(); CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); } - @After - public void tearDown() throws Exception { - CLUSTER.getClusterControl().terminateActive(); - CLUSTER.getClusterControl().startAllServers(); - } - @Test public void testSync() throws Exception { CLUSTER.getClusterControl().terminateOnePassive(); final CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/crud-cm")) + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/op-sync")) .autoCreate() .defaultServerResource("primary-server-resource")); final PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(false); @@ -108,7 +103,7 @@ public void testLifeCycleOperationsOnSync() throws Exception { final CacheManagerBuilder clusteredCacheManagerBuilder = CacheManagerBuilder.newCacheManagerBuilder() - .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/crud-cm")) + .with(ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/lifecycle-sync")) .autoCreate() .defaultServerResource("primary-server-resource")); final PersistentCacheManager cacheManager = clusteredCacheManagerBuilder.build(true); From fb5bbfae609b9dcfedfd25e202cd134764eea83e Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Fri, 18 Nov 2016 10:34:15 +0100 Subject: [PATCH 147/218] :arrow_up: Move to gradle 3.2 --- gradle/wrapper/gradle-wrapper.jar | Bin 52818 -> 52928 bytes gradle/wrapper/gradle-wrapper.properties | 4 ++-- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index deedc7fa5e6310eac3148a7dd0b1f069b07364cb..6ffa237849ef3607e39c3b334a92a65367962071 100644 GIT binary patch delta 3904 zcmaJ@2{@G78=v{cnk^N@V626>mZ4+`30Wg+vKNs_A^S4KEnA6m$(E2cgoqZTj4aug zB6THHM2S+g{m*>k>Y00==l?y=dCvKM=l8z9_q=Bv$i_8a!Eu-w;TSexFf1&X_wK}G zj_s_lGN`N6PSQ+Px8`}rx9I!P7mNALdI?#DA~j$QJqDHRrpxk77|bm;42B4*3A`!I!sJxz4!8iCkmq@zeQx=pYwEN~nNnow*(~eeo8^mg z3+=55R*qZlKfMw|qP$v68~-Ldero>b^f;jsGZMe#L(;O5cyjHC?7sDS6Y{5uwe)i8B_Dw{ z=WM-muc}VxBQ1tk9ioz@RZdTCYb9DOm<_%3lJr!Os*-t{I_g(0mNn*Iew?hYznrV( z@IEGZz<9KXm_GYa<1rtJr=;h$Y5#7)`@OYB}6Uq?8z%ROtAL6omv>%p_^>GsCLWbDl6OkADi^797IFBv(7?=2{?n#atI zq~C-@mumOQj3_F<&bW4Mz-V4IwcbegUBO%xX*8$zv1|3 z#Y)n;=d676U(ckM2EQZqviUI=Ki`&+(@cGKYj-6@*3h5N{>VZq8tjwm&6 zYg@!kC_HTQDbJuIZE8U_ZIeO!T0&L-ZevEQ$c+;P^3NyF`M%K!xTP(yB63*%k@4f= z$Joe>CWVK*yTW!igI!8t1(Qd!2N&Wl9#klrAPQbRnrL7ro`lHOD=Cc_EU_KRRWIF8 z6k@jeLc408&1H$K*MT*9X^pR02sYwbyjj@_ZsSrLt$Zhz_QjRXl`VgHaY?)?qW@?5 zwF9Z-FYWRccI?fBS5`SU-{g=f^ z^`(xMd**9NV;d@bR34S?B5FAE7wc40Vh`7d<8bdnC+|4F{5#m%#n>LZlyg4s=2MwgTn|CiTF2%@4M@8#J z)b3sg%oAXJ{^vcBs^oe_ig|GnbFcr2NCttk7w%+uv7P!cCH2N}EQ~up_%oNJm-kof z^f=?gBSEc`k5!JB1IHckoeZNq`M5!~_ zzq;E*n5-IS`%BkD-P$y59~uMatfVlM#OCz=vR z>a+Jv-N;I7oWFiRUspANb!VNIs6$4{cRs;}A+PT7YjcUg@7YLKDV;yl?ye@>)(|w= zzvHyNYFey`F!yPvU&`7OR&kPB8fQ2+0GsBovT~ui+Ihxp9&rn*q9}E zv_eZYNB9BDmQ5D9V8MIa0m^+78`&NUf8~l^*P%+qRr1SBGwyQUTPK)zT9Vk`o18eF zZB^H!XyH8jQ~@tdNKzcu&%lC8M)yuvQ_}O^nO_*@`CS(&^S2|Ezd( zLznwa6Ug^YUjSbZ9{ad8VDi-51I>1YRtM2|@BAS9axH7|sv}CyNgh*UdQB&_1Ie=l{$!FqP+9I+#UX4ov~pOyM)!t7jPVmp`TAdVO?j?}v8FJi==ON8T-~2TQ!067`}gxjVx3ePa@b zx3IA_<+w-wC=e3{4N6MQ^A2IOliy@FtYr1`G9e0!fTa zp!p$pO}KXX29zsf{DKK>uv;+G9P?UPu}~4OT->#w+zMED4E>Uda)ncN`Cc_2Ugl$1%UtXE>Mx;rH$J||E%Vj#x>5?C3x?oah z?Zrn7Eim52G;iNdQqH2xNRx99*-#9%Tw05*~boK9q`F%2qVL`f^*t2Fy%@-Q=s^qlXRSv!p4TC6;VL4Zh`kq#+;jUeX!^1$7K)S3)U{7E5yZ z0<9~i(t)NoRi#biNZ#tBA>ZmO5mZ>Oj^&F64uk1r#bDG>;o)xnnw&fOs4wug!3+l< zJ3y^b>Gz`t1U2p^)C(J+=NF2ut0=`2go6;Agc*o&7c~>7-S7r z0Czjsg{cv}nX~n~4jqocA_~htUu{$X14_2Kk4K3Nr6Diu;b_g_NM-+F!MZlenGu+4 z<3ZVdVU$Ssgrg%evp|o^K)9J5-4AljcqEn+6*1=I2cc_lJcN&Nqi{pB6zZ}tSpKdx zHPdi;5a?X)=S7M954I7gqjCM}x$`Z2KJs&7Fx%;fPl5RZWmGoq0Up5#qh@Bt%%F$B z1lk|alJOzL?(zpXzG1i;=*MVJL=A+A{9v@1M&8jbcm5Fz29pHmLy}G_O$8;}sqn0) zneODEBj;E8zt)qo;&b^ zlN5I9_5)!QJaz=6Q-sjJI}|*^>;xtX_`nCscGQ!ilUq^|w0{c=eY#RyKy{oLux;Ip z+IqL*kqB25I_QcA@$KxWeM7m4VlrF<(QplDtj9rJf01q|xv7&KIJj*@Q%(*2tr*9m zJ{INz8Y$2jkUXG)00?^|hI-cA>hY_M8IBQ7B)wXPnkrE`gkhb|9L6Nb9$ z=)fb@i4YP6ES)rWf}ME8{wi>}BMDqP`A}8_gv|2SVxi6UIE09WaJDtpMZuK@o(7l; zVK`edbfR;mFyKGd2VB=&<(CeL*Ws?EPyS;0`hYvidDj!i(S=8*D`A%asI25#7u4zJ zeLV}476y-1bvi-aY7o_Bh{oR2g~!H$*{%cVyvy=pExM}aA`k;?fWB+vN> delta 3589 zcmZ8j2{=?;AHOqW8NyhiF~b;3nvrz|MO0cSQX)$)rihAAltLkEgfy-_OUM!_p-~!4 zmLY2;^}gX%RLa)!c}w}qcjru>o_Wvn+~@r7@BjO+=iGA-Cm-`94v`qbK?s8@j3 zBejn;T*+)!mik6on*6Zz-FVZ_#UV)uHvyAbMRjPIYwb_oSe=n(D1DvSCcW|0#h%EW zfm32voo9S3Ojc($lXsVlrA9&hLVMK{jMfv|=^@5D90r5~r3My@g!CyZ=_s$bT*5Y; zcr9kz328#Vv!_s_N=XDhB3t&^wM%PE2ZA*mW+%h;1fSAaTpepanPITHY^|m4vtzEg z%wtTu4E>boYW(TK;#H1f@+{-Z-0k-xtH>7C))oC%8Vd~lh6oO)Eava{#vfM6v?JMS z62EJN$Q{nSXhiL^yPs4NQ<>o;670A@r(gWL+$HyJRs!R!OG$=HcYV<9yi%`VBjW(- zH@0ub3q7xSPYa^M219rM9C9ChJr<^YD3Xv< z?0r36*@n`nYsNC{P5Qp@GSukMV0NLZ>**_f_*5A<;IO5}(|c(ml1Jj@+wA^{o`~{D zPS|z*Kd%hvD?Ab&rK9_vF5$>xs8<6@R|9lJ|>IiNXKzP3 z4App!N2wm$$jzZtYbOZfTzgj4UR{A%d+sVY{pLz}&Yv zD#7#IWUXouZT8a1cBPA0rQ?C~YMGT@@k4%IZjb#uQzUYnyl&o8pUd0sN2hK38d%vh zyCtGC^^7jR@u}vZRkcc z=4WqoMc5i~mT7mqo^1?ecUIYS?%LjRN|(NDoqDssahhZNx#>!DZ<4?5wYClZ182=9KIW1B`oPY_>p9 zv+Bg|)|A(IvFf-1gPD?*qajlZMOmnabj%0N95sD_vrb=fQ)(1-Uu?8nnVpH`z59tH z)P4Dx;|wYzLUO2YBb9^|ZMG8CyVK0odzG_q;}3~GdWGUq@#-Azi@WP)^0!bJk)*OI zQ>*WD10GtpaA)t?mHK@q7`ruZP@ggEyL$Sn5V@XN5ciPkCmt87S=?85kTo)LQaM<5 zT;8L2WD2jL*{S(z^rZC71vQczLo{@4ED<+!#DJIgabYWe!%Hp@ z?>u%!A1F~CyNVebr$lfD=btAYIQmKofd4Vf_cyAME$!n6~63Cq8e`T>w@FF^n|M_-m-jkQZQ?F&aQ4s%`+Un zGbVN|lQuLmtXMI8%}P&aP1rAcQ)I^37u$9kDzF9(H#VTMWhRRP(O%JOyuN zXhbF$NVSnApfk<(*K2R*J&G!I&C;F~mB7nWHTIIjBL|Bb^5$CizMa={bj@lEU9tZB zR?96SY))e0vFg%IX<3F+77%UBP-L2WnS#!*%~LN;UK#gS&Xa;9zuBhKh28cvnL4t~SX#vnP>tvrx`<_U12$9n)Ic`gBy&F~D4^Qa_Wcd3c0l#fB*~_~E zK*&HMfg$v3h9GI^hebB>c0A|^(oqz8C5C?=`L_j<$gjb(V8bVx31BOvG!s#iFjGhY zZfTYPNPjaCO+dk_?aDB=WhH=AT8QYMd}x1*5rCKobV>+e$;0JBINZNN!>IjfO&;U7wPJrJ%;5YkfD-lgy z58wfgDBQnZil+iW8xjvr7bAA|j3`BPUd63MO zM|~8>9e&WR$@^H~sdggD7Z&Lt0h7yJ^O>dTX-@z(c@km2P6=SozF|vaB7tI%K;{DWvfhAf z*DC@ibJc*4+gxN*AAwOdO7Ij{9YE5ZM0C0jpV>(P&{+gx1uh%=1WALn;}!W5QT=%e zsK{MffNk4FL@R^kW&XVn-$%mO zN;TN>Avl^~*M~&(gA1@tf*IH$yrrP|IkT*ha)pp`Ed|QWNCd$!-3Fj9ai#UEj>tj~ z7uiRGn|*6CLR5o4B#Xf3y7|c_braEZ1;D{M91Fvgn5$MbBHWN{z)3~2&pb1V?@O2h9Juw-Gsnj>d*H}bu15Fl^A a2gqCd`J39WpNJl2!-@R{BHB$n;{O0=Lu#}D diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 4a444e5b24..cc1e0a8410 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ -#Mon Sep 19 15:49:17 PDT 2016 +#Fri Nov 18 10:33:07 CET 2016 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-3.1-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-3.2-all.zip From 31511fba67d3eefbb250eb4a93615c40f6f5ba5e Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Tue, 22 Nov 2016 13:57:59 +0100 Subject: [PATCH 148/218] Ignore all currently broken stats tests --- .../ehcache/management/providers/statistics/EvictionTest.java | 2 ++ .../ehcache/management/providers/statistics/HitCountTest.java | 2 ++ .../ehcache/management/providers/statistics/HitRatioTest.java | 2 ++ .../ehcache/management/providers/statistics/MissCountTest.java | 2 ++ .../ehcache/management/providers/statistics/MissRatioTest.java | 2 ++ .../providers/statistics/StandardEhcacheStatisticsTest.java | 2 ++ .../registry/DefaultManagementRegistryServiceTest.java | 3 +++ .../registry/DefaultSharedManagementServiceTest.java | 2 ++ 8 files changed, 17 insertions(+) diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java index bc8cb222ae..c95bd8276e 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java @@ -46,6 +46,7 @@ import org.hamcrest.CoreMatchers; import org.hamcrest.Matchers; import org.junit.Assert; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -56,6 +57,7 @@ import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.management.model.stats.history.CounterHistory; +@Ignore("Test currently broken - needs platform fix") @RunWith(Parameterized.class) public class EvictionTest { diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java index eff9a7d146..946a16f08c 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java @@ -41,6 +41,7 @@ import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryService; import org.junit.Assert; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -49,6 +50,7 @@ import org.junit.runners.Parameterized; import org.terracotta.management.model.context.Context; +@Ignore("Test currently broken - needs platform fix") @RunWith(Parameterized.class) public class HitCountTest { diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java index b816952db6..eaa6524047 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java @@ -28,6 +28,7 @@ import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryService; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -48,6 +49,7 @@ import static org.ehcache.config.units.EntryUnit.ENTRIES; import static org.ehcache.config.units.MemoryUnit.MB; +@Ignore("Test currently broken - needs platform fix") @RunWith(Parameterized.class) public class HitRatioTest { diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java index 9606d08840..a5c7561497 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java @@ -39,6 +39,7 @@ import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryService; import org.junit.Assert; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -47,6 +48,7 @@ import org.junit.runners.Parameterized; import org.terracotta.management.model.context.Context; +@Ignore("Test currently broken - needs platform fix") @RunWith(Parameterized.class) public class MissCountTest { diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java index c321acdc3d..23b671419e 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java @@ -40,6 +40,7 @@ import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryService; import org.junit.Assert; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -48,6 +49,7 @@ import org.junit.runners.Parameterized; import org.terracotta.management.model.context.Context; +@Ignore("Test currently broken - needs platform fix") @RunWith(Parameterized.class) public class MissRatioTest { diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java index 784d9a71d3..ce6532ea72 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java @@ -31,6 +31,7 @@ import org.ehcache.management.registry.DefaultManagementRegistryService; import org.hamcrest.Matchers; import org.junit.Assert; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.Timeout; @@ -40,6 +41,7 @@ import static org.junit.Assert.assertThat; +@Ignore("Test currently broken - needs platform fix") public class StandardEhcacheStatisticsTest { private final EhcacheStatisticsProviderConfiguration EHCACHE_STATS_CONFIG = new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES); diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java index a2d6c75e63..ec209eda71 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java @@ -34,6 +34,7 @@ import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.management.ManagementRegistryService; +import org.junit.Ignore; import org.junit.rules.Timeout; import org.terracotta.management.registry.ResultSet; import org.terracotta.management.registry.StatisticQuery; @@ -246,6 +247,7 @@ public void testCanGetCapabilities() { cacheManager1.close(); } + @Ignore("Test currently broken - needs platform fix") @Test public void testCanGetStats() { String queryStatisticName = "Cache:HitCount"; @@ -355,6 +357,7 @@ private static ResultSet getResultSet(Builder builder, Con return counters; } + @Ignore("Test currently broken - needs platform fix") @Test public void testCanGetStatsSinceTime() throws InterruptedException { diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java index d32fc46720..ee213c7c42 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java @@ -25,6 +25,7 @@ import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.Timeout; @@ -156,6 +157,7 @@ public void testSharedCapabilities() { assertThat(new ArrayList(capabilities2).get(3).getName(), equalTo("SettingsCapability")); } + @Ignore("Test currently broken - needs platform fix") @Test public void testStats() { String statisticName = "Cache:MissCount"; From 014c45958a9263243c915a265389b9631c7c5ecb Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Tue, 22 Nov 2016 08:18:42 -0500 Subject: [PATCH 149/218] :bug: Close #1641 (https://github.com/Terracotta-OSS/statistics/pull/21) --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index fd198e098f..ba24225559 100644 --- a/build.gradle +++ b/build.gradle @@ -22,7 +22,7 @@ ext { // Third parties offheapVersion = '2.3.1' - statisticVersion = '1.4.0' + statisticVersion = '1.4.1' jcacheVersion = '1.0.0' slf4jVersion = '1.7.7' sizeofVersion = '0.3.0' From 4dbfced04110136bff482372a14fa687344f1ef8 Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Tue, 22 Nov 2016 09:24:00 -0500 Subject: [PATCH 150/218] Revert: Ignore all currently broken stats tests --- .../ehcache/management/providers/statistics/EvictionTest.java | 2 -- .../ehcache/management/providers/statistics/HitCountTest.java | 2 -- .../ehcache/management/providers/statistics/HitRatioTest.java | 2 -- .../ehcache/management/providers/statistics/MissCountTest.java | 2 -- .../ehcache/management/providers/statistics/MissRatioTest.java | 2 -- .../providers/statistics/StandardEhcacheStatisticsTest.java | 2 -- .../registry/DefaultManagementRegistryServiceTest.java | 3 --- .../registry/DefaultSharedManagementServiceTest.java | 2 -- 8 files changed, 17 deletions(-) diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java index c95bd8276e..bc8cb222ae 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/EvictionTest.java @@ -46,7 +46,6 @@ import org.hamcrest.CoreMatchers; import org.hamcrest.Matchers; import org.junit.Assert; -import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -57,7 +56,6 @@ import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.management.model.stats.history.CounterHistory; -@Ignore("Test currently broken - needs platform fix") @RunWith(Parameterized.class) public class EvictionTest { diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java index 946a16f08c..eff9a7d146 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/HitCountTest.java @@ -41,7 +41,6 @@ import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryService; import org.junit.Assert; -import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -50,7 +49,6 @@ import org.junit.runners.Parameterized; import org.terracotta.management.model.context.Context; -@Ignore("Test currently broken - needs platform fix") @RunWith(Parameterized.class) public class HitCountTest { diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java index eaa6524047..b816952db6 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/HitRatioTest.java @@ -28,7 +28,6 @@ import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryService; -import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -49,7 +48,6 @@ import static org.ehcache.config.units.EntryUnit.ENTRIES; import static org.ehcache.config.units.MemoryUnit.MB; -@Ignore("Test currently broken - needs platform fix") @RunWith(Parameterized.class) public class HitRatioTest { diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java index a5c7561497..9606d08840 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/MissCountTest.java @@ -39,7 +39,6 @@ import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryService; import org.junit.Assert; -import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -48,7 +47,6 @@ import org.junit.runners.Parameterized; import org.terracotta.management.model.context.Context; -@Ignore("Test currently broken - needs platform fix") @RunWith(Parameterized.class) public class MissCountTest { diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java index 23b671419e..c321acdc3d 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/MissRatioTest.java @@ -40,7 +40,6 @@ import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; import org.ehcache.management.registry.DefaultManagementRegistryService; import org.junit.Assert; -import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -49,7 +48,6 @@ import org.junit.runners.Parameterized; import org.terracotta.management.model.context.Context; -@Ignore("Test currently broken - needs platform fix") @RunWith(Parameterized.class) public class MissRatioTest { diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java index ce6532ea72..784d9a71d3 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/StandardEhcacheStatisticsTest.java @@ -31,7 +31,6 @@ import org.ehcache.management.registry.DefaultManagementRegistryService; import org.hamcrest.Matchers; import org.junit.Assert; -import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.Timeout; @@ -41,7 +40,6 @@ import static org.junit.Assert.assertThat; -@Ignore("Test currently broken - needs platform fix") public class StandardEhcacheStatisticsTest { private final EhcacheStatisticsProviderConfiguration EHCACHE_STATS_CONFIG = new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.MILLISECONDS,10,TimeUnit.MINUTES); diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java index ec209eda71..a2d6c75e63 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java @@ -34,7 +34,6 @@ import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.management.ManagementRegistryService; -import org.junit.Ignore; import org.junit.rules.Timeout; import org.terracotta.management.registry.ResultSet; import org.terracotta.management.registry.StatisticQuery; @@ -247,7 +246,6 @@ public void testCanGetCapabilities() { cacheManager1.close(); } - @Ignore("Test currently broken - needs platform fix") @Test public void testCanGetStats() { String queryStatisticName = "Cache:HitCount"; @@ -357,7 +355,6 @@ private static ResultSet getResultSet(Builder builder, Con return counters; } - @Ignore("Test currently broken - needs platform fix") @Test public void testCanGetStatsSinceTime() throws InterruptedException { diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java index ee213c7c42..d32fc46720 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java @@ -25,7 +25,6 @@ import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; -import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.Timeout; @@ -157,7 +156,6 @@ public void testSharedCapabilities() { assertThat(new ArrayList(capabilities2).get(3).getName(), equalTo("SettingsCapability")); } - @Ignore("Test currently broken - needs platform fix") @Test public void testStats() { String statisticName = "Cache:MissCount"; From 95a0c689f7d6d6431c8382ba2487a0bc1d1bdf98 Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Wed, 16 Nov 2016 16:36:51 -0500 Subject: [PATCH 151/218] :heavy_plus_sign: Close #1609: Support new voltron statistic service --- build.gradle | 2 +- clustered/client/build.gradle | 1 - clustered/integration-test/build.gradle | 2 - .../AbstractClusteringManagementTest.java | 193 ++++++++++-------- .../ClusteringManagementServiceTest.java | 90 +++++--- clustered/server/build.gradle | 10 +- .../clustered/server/EhcacheActiveEntity.java | 2 +- .../server/EhcachePassiveEntity.java | 2 +- .../management/AbstractExposedStatistics.java | 166 --------------- .../AbstractStatisticsManagementProvider.java | 89 -------- ...ClientStateSettingsManagementProvider.java | 9 +- .../server/management/Management.java | 62 ++---- .../management/OffHeapResourceBinding.java | 32 --- ...eapResourceSettingsManagementProvider.java | 70 ------- .../PoolSettingsManagementProvider.java | 10 +- .../PoolStatisticsManagementProvider.java | 46 +++-- ...ServerStoreSettingsManagementProvider.java | 8 +- ...rverStoreStatisticsManagementProvider.java | 18 +- .../StatisticCollectorManagementProvider.java | 73 ------- .../management/StatisticConfiguration.java | 148 -------------- management/build.gradle | 6 +- .../DefaultClusteringManagementService.java | 36 +++- ...efaultStatisticsProviderConfiguration.java | 123 ++--------- .../statistics/StandardEhcacheStatistics.java | 105 +--------- .../registry/DefaultCollectorService.java | 18 +- ...entRegistryServiceConfigurationParser.java | 15 +- 26 files changed, 321 insertions(+), 1015 deletions(-) delete mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractExposedStatistics.java delete mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractStatisticsManagementProvider.java delete mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceBinding.java delete mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceSettingsManagementProvider.java delete mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/management/StatisticCollectorManagementProvider.java delete mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/management/StatisticConfiguration.java diff --git a/build.gradle b/build.gradle index ba24225559..f475a2c893 100644 --- a/build.gradle +++ b/build.gradle @@ -28,7 +28,7 @@ ext { sizeofVersion = '0.3.0' // Clustered - terracottaPlatformVersion = '5.0.11.beta2' + terracottaPlatformVersion = '5.0.11.beta3' managementVersion = terracottaPlatformVersion terracottaApisVersion = '1.0.11.beta' terracottaCoreVersion = '5.0.11-beta2' diff --git a/clustered/client/build.gradle b/clustered/client/build.gradle index 335a90ebc2..fd4dc8a556 100644 --- a/clustered/client/build.gradle +++ b/clustered/client/build.gradle @@ -23,7 +23,6 @@ dependencies { compileOnly project(':xml') compile project(':clustered:common'), "org.slf4j:slf4j-api:$parent.slf4jVersion" provided "org.terracotta:entity-client-api:$parent.entityApiVersion" - provided "org.terracotta.management:monitoring-service-api:$parent.managementVersion" // provided in management-server jar provided "org.terracotta:runnel:$parent.terracottaPlatformVersion" testCompile project(':api') diff --git a/clustered/integration-test/build.gradle b/clustered/integration-test/build.gradle index 20a21dc623..7827be1864 100644 --- a/clustered/integration-test/build.gradle +++ b/clustered/integration-test/build.gradle @@ -28,7 +28,6 @@ dependencies { testCompile project(':clustered:clustered-dist') testCompile project(':management') testCompile "org.terracotta.management.dist:management-client:$parent.managementVersion" - testCompile "org.terracotta.management:monitoring-service-entity:$parent.managementVersion" testCompile "com.fasterxml.jackson.core:jackson-databind:2.8.0" testCompile group:'org.terracotta', name:'galvan-support', version: galvanVersion @@ -41,7 +40,6 @@ dependencies { serverLibs ("org.terracotta.management.dist:management-server:$parent.managementVersion") { exclude group:'org.terracotta.management.dist', module:'management-common' } - serverLibs "org.terracotta.management:monitoring-service-entity:$parent.managementVersion" // test entity for monitoring service } task unzipKit(type: Copy) { diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java index 5987b3dfcb..8361ea04b2 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java @@ -30,12 +30,9 @@ import org.junit.Rule; import org.junit.rules.Timeout; import org.terracotta.connection.Connection; -import org.terracotta.connection.ConnectionFactory; import org.terracotta.management.entity.management.ManagementAgentConfig; import org.terracotta.management.entity.management.client.ManagementAgentEntityFactory; import org.terracotta.management.entity.management.client.ManagementAgentService; -import org.terracotta.management.entity.monitoring.client.MonitoringServiceEntityFactory; -import org.terracotta.management.entity.monitoring.client.MonitoringServiceProxyEntity; import org.terracotta.management.entity.tms.TmsAgentConfig; import org.terracotta.management.entity.tms.client.TmsAgentEntity; import org.terracotta.management.entity.tms.client.TmsAgentEntityFactory; @@ -53,11 +50,10 @@ import java.io.File; import java.io.FileNotFoundException; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; -import java.util.Properties; +import java.util.Map; import java.util.Scanner; import java.util.concurrent.Exchanger; import java.util.concurrent.TimeUnit; @@ -74,7 +70,7 @@ import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.collection.IsCollectionWithSize.hasSize; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; @@ -89,15 +85,19 @@ public abstract class AbstractClusteringManagementTest { + "" + "\n"; - protected static MonitoringServiceProxyEntity consumer; protected static CacheManager cacheManager; - protected static ClientIdentifier clientIdentifier; - protected static ServerEntityIdentifier serverEntityIdentifier; + protected static ClientIdentifier ehcacheClientIdentifier; + protected static ServerEntityIdentifier ehcacheServerEntityIdentifier; protected static ObjectMapper mapper = new ObjectMapper(); - private static final List MANAGEMENT_PLUGINS = Stream.of(System.getProperty("managementPlugins", "").split(File.pathSeparator)) - .map(File::new) - .collect(Collectors.toList()); + protected static TmsAgentEntity tmsAgentEntity; + protected static ServerEntityIdentifier tmsServerEntityIdentifier; + + private static final List MANAGEMENT_PLUGINS = System.getProperty("managementPlugins") == null ? + Collections.emptyList() : + Stream.of(System.getProperty("managementPlugins").split(File.pathSeparator)) + .map(File::new) + .collect(Collectors.toList()); @ClassRule public static Cluster CLUSTER = new BasicExternalCluster(new File("build/cluster"), 1, MANAGEMENT_PLUGINS, "", RESOURCE_CONFIG, ""); @@ -108,8 +108,16 @@ public static void beforeClass() throws Exception { CLUSTER.getClusterControl().waitForActive(); - consumer = new MonitoringServiceEntityFactory(ConnectionFactory.connect(CLUSTER.getConnectionURI(), new Properties())).retrieveOrCreate("MonitoringConsumerEntity"); - consumer.createMessageBuffer(1024); + // simulate a TMS client + Connection managementConnection = CLUSTER.newConnection(); + TmsAgentEntityFactory entityFactory = new TmsAgentEntityFactory(managementConnection, AbstractClusteringManagementTest.class.getName()); + tmsAgentEntity = entityFactory.retrieveOrCreate(new TmsAgentConfig()); + tmsServerEntityIdentifier = readTopology() + .activeServerEntityStream() + .filter(serverEntity -> serverEntity.getType().equals(TmsAgentConfig.ENTITY_TYPE)) + .findFirst() + .get() // throws if not found + .getServerEntityIdentifier(); cacheManager = newCacheManagerBuilder() // cluster config @@ -152,14 +160,14 @@ public static void beforeClass() throws Exception { // ensure the CM is running and get its client id assertThat(cacheManager.getStatus(), equalTo(Status.AVAILABLE)); - clientIdentifier = consumer.readTopology().getClients().values() + ehcacheClientIdentifier = readTopology().getClients().values() .stream() .filter(client -> client.getName().equals("Ehcache:my-server-entity-1")) .findFirst() .map(Client::getClientIdentifier) .get(); - serverEntityIdentifier = consumer.readTopology() + ehcacheServerEntityIdentifier = readTopology() .activeServerEntityStream() .filter(serverEntity -> serverEntity.getName().equals("my-server-entity-1")) .findFirst() @@ -167,14 +175,27 @@ public static void beforeClass() throws Exception { .getServerEntityIdentifier(); // test_notifs_sent_at_CM_init - List messages = consumer.drainMessageBuffer(); + List messages = readMessages(); List notificationTypes = notificationTypes(messages); - assertThat(notificationTypes.get(0), equalTo("CLIENT_CONNECTED")); - assertThat(notificationTypes.containsAll(Arrays.asList( - "SERVER_ENTITY_CREATED", "SERVER_ENTITY_FETCHED", - "ENTITY_REGISTRY_AVAILABLE", "ENTITY_REGISTRY_UPDATED", "EHCACHE_RESOURCE_POOLS_CONFIGURED", "EHCACHE_CLIENT_VALIDATED", "EHCACHE_SERVER_STORE_CREATED", - "CLIENT_REGISTRY_AVAILABLE", "CLIENT_TAGS_UPDATED")), is(true)); - assertThat(consumer.readMessageBuffer(), is(nullValue())); + + Map> counts = notificationTypes.stream().collect(Collectors.groupingBy(o -> o)); + assertThat(counts.keySet(), hasSize(12)); + assertThat(counts.get("CLIENT_CONNECTED"), hasSize(1)); + assertThat(counts.get("CLIENT_REGISTRY_AVAILABLE"), hasSize(1)); + assertThat(counts.get("CLIENT_TAGS_UPDATED"), hasSize(1)); + assertThat(counts.get("EHCACHE_CLIENT_VALIDATED"), hasSize(1)); + assertThat(counts.get("EHCACHE_RESOURCE_POOLS_CONFIGURED"), hasSize(1)); + assertThat(counts.get("EHCACHE_SERVER_STORE_CREATED"), hasSize(3)); + assertThat(counts.get("ENTITY_REGISTRY_AVAILABLE"), hasSize(2)); + assertThat(counts.get("ENTITY_REGISTRY_UPDATED"), hasSize(11)); + assertThat(counts.get("SERVER_ENTITY_CREATED"), hasSize(5)); + assertThat(counts.get("SERVER_ENTITY_DESTROYED"), hasSize(1)); + assertThat(counts.get("SERVER_ENTITY_FETCHED"), hasSize(7)); + assertThat(counts.get("SERVER_ENTITY_UNFETCHED"), hasSize(3)); + + assertThat(readMessages(), hasSize(0)); + + sendManagementCallOnEntityToCollectStats(); } @AfterClass @@ -189,11 +210,19 @@ public static void afterClass() throws Exception { @Before public void init() throws Exception { - if (consumer != null) { - consumer.clearMessageBuffer(); + if (tmsAgentEntity != null) { + readMessages(); } } + protected static org.terracotta.management.model.cluster.Cluster readTopology() throws Exception { + return tmsAgentEntity.readTopology().get(); + } + + protected static List readMessages() throws Exception { + return tmsAgentEntity.readMessages().get(); + } + protected static ContextualReturn sendManagementCallOnClientToCollectStats(String... statNames) throws Exception { Connection managementConnection = CLUSTER.newConnection(); try { @@ -204,7 +233,7 @@ protected static ContextualReturn sendManagementCallOnClientToCollectStats(St agent.setContextualReturnListener((from, id, aReturn) -> { try { - assertEquals(clientIdentifier, from); + assertEquals(ehcacheClientIdentifier, from); assertEquals(managementCallId.get(), id); exchanger.exchange(aReturn); } catch (InterruptedException e) { @@ -213,7 +242,7 @@ protected static ContextualReturn sendManagementCallOnClientToCollectStats(St }); managementCallId.set(agent.call( - clientIdentifier, + ehcacheClientIdentifier, Context.create("cacheManagerName", "my-super-cache-manager"), "StatisticCollectorCapability", "updateCollectedStatistics", @@ -230,61 +259,10 @@ protected static ContextualReturn sendManagementCallOnClientToCollectStats(St } } - protected static void sendManagementCallOnEntityToCollectStats() throws Exception { - Connection managementConnection = CLUSTER.newConnection(); - try { - TmsAgentEntityFactory entityFactory = new TmsAgentEntityFactory(managementConnection, AbstractClusteringManagementTest.class.getName()); - TmsAgentEntity tmsAgentEntity = entityFactory.retrieveOrCreate(new TmsAgentConfig()); - - // get the context from the topology for the ehcache server entity - Context context = tmsAgentEntity.readTopology().get().getSingleStripe().getActiveServerEntity(serverEntityIdentifier).get().getContext(); - - ContextualReturn result = tmsAgentEntity.call( - context, - "StatisticCollectorCapability", - "updateCollectedStatistics", - Void.TYPE, - new Parameter("PoolStatistics"), - new Parameter(asList( - "Pool:AllocatedSize" - ), Collection.class.getName()) - ).get(); - - assertThat(result.hasExecuted(), is(true)); - - result = tmsAgentEntity.call( - context, - "StatisticCollectorCapability", - "updateCollectedStatistics", - Void.TYPE, - new Parameter("ServerStoreStatistics"), - new Parameter(asList( - "Store:AllocatedMemory", - "Store:DataAllocatedMemory", - "Store:OccupiedMemory", - "Store:DataOccupiedMemory", - "Store:Entries", - "Store:UsedSlotCount", - "Store:DataVitalMemory", - "Store:VitalMemory", - "Store:ReprobeLength", - "Store:RemovedSlotCount", - "Store:DataSize", - "Store:TableCapacity" - ), Collection.class.getName()) - ).get(); - - assertThat(result.hasExecuted(), is(true)); - - } finally { - managementConnection.close(); - } - } - - protected static List waitForNextStats() { + protected static List waitForNextStats() throws Exception { // uses the monitoring consumre entity to get the content of the stat buffer when some stats are collected while (!Thread.currentThread().isInterrupted()) { - List messages = consumer.drainMessageBuffer() + List messages = readMessages() .stream() .filter(message -> message.getType().equals("STATISTICS")) .flatMap(message -> message.unwrap(ContextualStatistics.class).stream()) @@ -324,4 +302,59 @@ protected static String normalizeForLineEndings(String stringToNormalize) { return stringToNormalize.replace("\r\n", "\n").replace("\r", "\n"); } + private static void sendManagementCallOnEntityToCollectStats() throws Exception { + + Context context = readTopology().getSingleStripe().getActiveServerEntity(tmsServerEntityIdentifier).get().getContext(); + + ContextualReturn result = tmsAgentEntity.call( + context, + "StatisticCollectorCapability", + "updateCollectedStatistics", + Void.TYPE, + new Parameter("PoolStatistics"), + new Parameter(asList( + "Pool:AllocatedSize" + ), Collection.class.getName()) + ).get(); + + assertThat(result.hasExecuted(), is(true)); + + result = tmsAgentEntity.call( + context, + "StatisticCollectorCapability", + "updateCollectedStatistics", + Void.TYPE, + new Parameter("ServerStoreStatistics"), + new Parameter(asList( + "Store:AllocatedMemory", + "Store:DataAllocatedMemory", + "Store:OccupiedMemory", + "Store:DataOccupiedMemory", + "Store:Entries", + "Store:UsedSlotCount", + "Store:DataVitalMemory", + "Store:VitalMemory", + "Store:ReprobeLength", + "Store:RemovedSlotCount", + "Store:DataSize", + "Store:TableCapacity" + ), Collection.class.getName()) + ).get(); + + assertThat(result.hasExecuted(), is(true)); + + result = tmsAgentEntity.call( + context, + "StatisticCollectorCapability", + "updateCollectedStatistics", + Void.TYPE, + new Parameter("OffHeapResourceStatistics"), + new Parameter(asList( + "OffHeapResource:AllocatedMemory" + ), Collection.class.getName()) + ).get(); + + assertThat(result.hasExecuted(), is(true)); + } + } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java index e55b4e7ca0..e6361eeba6 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java @@ -47,8 +47,6 @@ import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.collection.IsCollectionWithSize.hasSize; import static org.junit.Assert.assertThat; @@ -63,24 +61,25 @@ public class ClusteringManagementServiceTest extends AbstractClusteringManagemen private static final Collection CACHE_DESCRIPTORS = new ArrayList<>(); private static final Collection POOL_DESCRIPTORS = new ArrayList<>(); private static final Collection SERVER_STORE_DESCRIPTORS = new ArrayList<>(); + private static final Collection OFFHEAP_RES_DESCRIPTORS = new ArrayList<>(); @Test @Ignore("This is not a test, but something useful to show a json print of a cluster topology with all management metadata inside") public void test_A_topology() throws Exception { - Cluster cluster = consumer.readTopology(); + Cluster cluster = tmsAgentEntity.readTopology().get(); String json = mapper.writeValueAsString(cluster.toMap()); System.out.println(json); } @Test public void test_A_client_tags_exposed() throws Exception { - String[] tags = consumer.readTopology().getClient(clientIdentifier).get().getTags().toArray(new String[0]); + String[] tags = readTopology().getClient(ehcacheClientIdentifier).get().getTags().toArray(new String[0]); assertThat(tags, equalTo(new String[]{"server-node-1", "webapp-1"})); } @Test public void test_B_client_contextContainer_exposed() throws Exception { - ContextContainer contextContainer = consumer.readTopology().getClient(clientIdentifier).get().getManagementRegistry().get().getContextContainer(); + ContextContainer contextContainer = readTopology().getClient(ehcacheClientIdentifier).get().getManagementRegistry().get().getContextContainer(); assertThat(contextContainer.getValue(), equalTo("my-super-cache-manager")); Collection subContexts = contextContainer.getSubContexts(); TreeSet cacheNames = subContexts.stream().map(ContextContainer::getValue).collect(Collectors.toCollection(TreeSet::new)); @@ -91,7 +90,7 @@ public void test_B_client_contextContainer_exposed() throws Exception { @Test public void test_C_client_capabilities_exposed() throws Exception { - Capability[] capabilities = consumer.readTopology().getClient(clientIdentifier).get().getManagementRegistry().get().getCapabilities().toArray(new Capability[0]); + Capability[] capabilities = readTopology().getClient(ehcacheClientIdentifier).get().getManagementRegistry().get().getCapabilities().toArray(new Capability[0]); assertThat(capabilities.length, equalTo(5)); assertThat(capabilities[0].getName(), equalTo("ActionsCapability")); assertThat(capabilities[1].getName(), equalTo("StatisticsCapability")); @@ -113,27 +112,25 @@ public void test_C_client_capabilities_exposed() throws Exception { @Test public void test_D_server_capabilities_exposed() throws Exception { - Capability[] capabilities = consumer.readTopology().getSingleStripe().getActiveServerEntity(serverEntityIdentifier).get().getManagementRegistry().get().getCapabilities().toArray(new Capability[0]); + Capability[] capabilities = readTopology().getSingleStripe().getActiveServerEntity(ehcacheServerEntityIdentifier).get().getManagementRegistry().get().getCapabilities().toArray(new Capability[0]); - assertThat(capabilities.length, equalTo(7)); + assertThat(capabilities.length, equalTo(5)); assertThat(capabilities[0].getName(), equalTo("ClientStateSettings")); - assertThat(capabilities[1].getName(), equalTo("OffHeapResourceSettings")); - assertThat(capabilities[2].getName(), equalTo("ServerStoreSettings")); - assertThat(capabilities[3].getName(), equalTo("PoolSettings")); - assertThat(capabilities[4].getName(), equalTo("ServerStoreStatistics")); - assertThat(capabilities[5].getName(), equalTo("PoolStatistics")); - assertThat(capabilities[6].getName(), equalTo("StatisticCollectorCapability")); + assertThat(capabilities[1].getName(), equalTo("ServerStoreSettings")); + assertThat(capabilities[2].getName(), equalTo("PoolSettings")); + assertThat(capabilities[3].getName(), equalTo("ServerStoreStatistics")); + assertThat(capabilities[4].getName(), equalTo("PoolStatistics")); + - assertThat(capabilities[1].getDescriptors(), hasSize(3)); // time + 2 resources - assertThat(capabilities[2].getDescriptors(), hasSize(4)); // time descriptor + 3 dedicated store + assertThat(capabilities[1].getDescriptors(), hasSize(4)); // time descriptor + 3 dedicated store // stats - assertThat(capabilities[4].getDescriptors(), containsInAnyOrder(SERVER_STORE_DESCRIPTORS.toArray())); - assertThat(capabilities[4].getDescriptors(), hasSize(SERVER_STORE_DESCRIPTORS.size())); - assertThat(capabilities[5].getDescriptors(), containsInAnyOrder(POOL_DESCRIPTORS.toArray())); - assertThat(capabilities[5].getDescriptors(), hasSize(POOL_DESCRIPTORS.size())); + assertThat(capabilities[3].getDescriptors(), containsInAnyOrder(SERVER_STORE_DESCRIPTORS.toArray())); + assertThat(capabilities[3].getDescriptors(), hasSize(SERVER_STORE_DESCRIPTORS.size())); + assertThat(capabilities[4].getDescriptors(), containsInAnyOrder(POOL_DESCRIPTORS.toArray())); + assertThat(capabilities[4].getDescriptors(), hasSize(POOL_DESCRIPTORS.size())); // ClientStateSettings @@ -143,26 +140,26 @@ public void test_D_server_capabilities_exposed() throws Exception { // EhcacheStateServiceSettings - List descriptors = new ArrayList<>(capabilities[3].getDescriptors()); + List descriptors = new ArrayList<>(capabilities[2].getDescriptors()); assertThat(descriptors, hasSize(4)); settings = (Settings) descriptors.get(0); assertThat(settings.get("alias"), equalTo("resource-pool-b")); - assertThat(settings.get("type"), equalTo("PoolBinding")); + assertThat(settings.get("type"), equalTo("Pool")); assertThat(settings.get("serverResource"), equalTo("primary-server-resource")); assertThat(settings.get("size"), equalTo(16 * 1024 * 1024L)); assertThat(settings.get("allocationType"), equalTo("shared")); settings = (Settings) descriptors.get(1); assertThat(settings.get("alias"), equalTo("resource-pool-a")); - assertThat(settings.get("type"), equalTo("PoolBinding")); + assertThat(settings.get("type"), equalTo("Pool")); assertThat(settings.get("serverResource"), equalTo("secondary-server-resource")); assertThat(settings.get("size"), equalTo(28 * 1024 * 1024L)); assertThat(settings.get("allocationType"), equalTo("shared")); settings = (Settings) descriptors.get(2); assertThat(settings.get("alias"), equalTo("dedicated-cache-1")); - assertThat(settings.get("type"), equalTo("PoolBinding")); + assertThat(settings.get("type"), equalTo("Pool")); assertThat(settings.get("serverResource"), equalTo("primary-server-resource")); assertThat(settings.get("size"), equalTo(4 * 1024 * 1024L)); assertThat(settings.get("allocationType"), equalTo("dedicated")); @@ -170,6 +167,20 @@ public void test_D_server_capabilities_exposed() throws Exception { settings = (Settings) descriptors.get(3); assertThat(settings.get("type"), equalTo("PoolSettingsManagementProvider")); assertThat(settings.get("defaultServerResource"), equalTo("primary-server-resource")); + + // tms entity + + capabilities = readTopology().activeServerEntityStream().filter(serverEntity -> serverEntity.is(tmsServerEntityIdentifier)).findFirst().get().getManagementRegistry().get().getCapabilities().toArray(new Capability[0]); + assertThat(capabilities.length, equalTo(3)); + + assertThat(capabilities[0].getName(), equalTo("OffHeapResourceSettings")); + assertThat(capabilities[1].getName(), equalTo("OffHeapResourceStatistics")); + assertThat(capabilities[2].getName(), equalTo("StatisticCollectorCapability")); + + assertThat(capabilities[0].getDescriptors(), hasSize(3)); // time + 2 resources + + assertThat(capabilities[1].getDescriptors(), containsInAnyOrder(OFFHEAP_RES_DESCRIPTORS.toArray())); + assertThat(capabilities[1].getDescriptors(), hasSize(OFFHEAP_RES_DESCRIPTORS.size())); } @Test @@ -182,32 +193,31 @@ public void test_E_notifs_on_add_cache() throws Exception { .with(clusteredDedicated("primary-server-resource", 2, MemoryUnit.MB))) .build()); - ContextContainer contextContainer = consumer.readTopology().getClient(clientIdentifier).get().getManagementRegistry().get().getContextContainer(); + ContextContainer contextContainer = readTopology().getClient(ehcacheClientIdentifier).get().getManagementRegistry().get().getContextContainer(); assertThat(contextContainer.getSubContexts(), hasSize(4)); TreeSet cNames = contextContainer.getSubContexts().stream().map(ContextContainer::getValue).collect(Collectors.toCollection(TreeSet::new)); assertThat(cNames, equalTo(new TreeSet<>(Arrays.asList("cache-2", "dedicated-cache-1", "shared-cache-2", "shared-cache-3")))); - List messages = consumer.drainMessageBuffer(); + List messages = readMessages(); assertThat(notificationTypes(messages), equalTo(Arrays.asList( "ENTITY_REGISTRY_UPDATED", "EHCACHE_SERVER_STORE_CREATED", "ENTITY_REGISTRY_UPDATED", "CLIENT_REGISTRY_UPDATED", "CACHE_ADDED"))); - assertThat(consumer.readMessageBuffer(), is(nullValue())); + assertThat(readMessages(), hasSize(0)); } @Test public void test_F_notifs_on_remove_cache() throws Exception { cacheManager.removeCache("cache-2"); - List messages = consumer.drainMessageBuffer(); + List messages = readMessages(); assertThat(notificationTypes(messages), equalTo(Arrays.asList("CLIENT_REGISTRY_UPDATED", "CACHE_REMOVED", "ENTITY_REGISTRY_UPDATED"))); - assertThat(consumer.readMessageBuffer(), is(nullValue())); + assertThat(readMessages(), hasSize(0)); } @Test public void test_G_stats_collection() throws Exception { - sendManagementCallOnEntityToCollectStats(); sendManagementCallOnClientToCollectStats("Cache:HitCount"); Cache cache1 = cacheManager.getCache("dedicated-cache-1", String.class, String.class); @@ -274,8 +284,8 @@ public void test_G_stats_collection() throws Exception { assertThat( serverStats.stream() .map(ContextualStatistics::getCapability) - .collect(Collectors.toSet()), - equalTo(new HashSet<>(Arrays.asList("PoolStatistics", "ServerStoreStatistics")))); + .collect(Collectors.toCollection(TreeSet::new)), + equalTo(new TreeSet<>(Arrays.asList("PoolStatistics", "ServerStoreStatistics", "OffHeapResourceStatistics")))); // ensure we collect stats from all registered objects (pools and stores) @@ -293,6 +303,13 @@ public void test_G_stats_collection() throws Exception { .collect(Collectors.toSet()), equalTo(new HashSet<>(Arrays.asList("shared-cache-3", "shared-cache-2", "dedicated-cache-1", "cache-2")))); + assertThat( + serverStats.stream() + .filter(statistics -> statistics.getCapability().equals("OffHeapResourceStatistics")) + .map(statistics -> statistics.getContext().get("alias")) + .collect(Collectors.toSet()), + equalTo(new HashSet<>(Arrays.asList("primary-server-resource", "secondary-server-resource")))); + // ensure we collect all the stat names assertThat( @@ -308,6 +325,13 @@ public void test_G_stats_collection() throws Exception { .flatMap(statistics -> statistics.getStatistics().keySet().stream()) .collect(Collectors.toSet()), equalTo(SERVER_STORE_DESCRIPTORS.stream().map(StatisticDescriptor::getName).collect(Collectors.toSet()))); + + assertThat( + serverStats.stream() + .filter(statistics -> statistics.getCapability().equals("OffHeapResourceStatistics")) + .flatMap(statistics -> statistics.getStatistics().keySet().stream()) + .collect(Collectors.toSet()), + equalTo(OFFHEAP_RES_DESCRIPTORS.stream().map(StatisticDescriptor::getName).collect(Collectors.toSet()))); } @BeforeClass @@ -430,6 +454,8 @@ public static void initDescriptors() throws ClassNotFoundException { SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:RemovedSlotCount", StatisticType.COUNTER_HISTORY)); SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:DataSize", StatisticType.SIZE_HISTORY)); SERVER_STORE_DESCRIPTORS.add(new StatisticDescriptor("Store:TableCapacity", StatisticType.SIZE_HISTORY)); + + OFFHEAP_RES_DESCRIPTORS.add(new StatisticDescriptor("OffHeapResource:AllocatedMemory", StatisticType.SIZE_HISTORY)); } } diff --git a/clustered/server/build.gradle b/clustered/server/build.gradle index 4ed58e062f..3bbba7ed2c 100644 --- a/clustered/server/build.gradle +++ b/clustered/server/build.gradle @@ -22,19 +22,15 @@ sourceCompatibility = 1.8 targetCompatibility = 1.8 dependencies { - compile ("org.terracotta:statistics:$parent.statisticVersion") { - exclude group:'org.slf4j', module:'slf4j-api' - } compile project(':clustered:common'), "org.slf4j:slf4j-api:$parent.slf4jVersion" - compile group: 'org.terracotta', name: 'offheap-resource', version: parent.offheapResourceVersion + compile("org.terracotta:offheap-resource:$parent.offheapResourceVersion") { + transitive = false + } compile group: 'org.terracotta', name: 'offheap-store', version: parent.offheapVersion compile group: 'org.slf4j', name: 'slf4j-api', version: parent.slf4jVersion compile("org.terracotta.management:monitoring-service-api:$parent.managementVersion") { transitive = false } - compile ("org.terracotta:statistics:$parent.statisticVersion") { - exclude group:'org.slf4j', module:'slf4j-api' - } compile"org.terracotta.management.dist:management-common:$parent.managementVersion" provided "org.terracotta:entity-server-api:$parent.entityApiVersion" provided "org.terracotta:standard-cluster-services:$parent.terracottaApisVersion" diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 5cd477aa6b..11781ae232 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -181,7 +181,7 @@ public Class getServiceType() { if (entityMessenger == null) { throw new AssertionError("Server failed to retrieve IEntityMessenger service."); } - this.management = new Management(services, ehcacheStateService, this.offHeapResourceIdentifiers); + this.management = new Management(services, ehcacheStateService); } /** diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index 4fee4e7e7a..3894c254ca 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -114,7 +114,7 @@ public void invoke(EhcacheEntityMessage message) { if (ehcacheStateService == null) { throw new AssertionError("Server failed to retrieve EhcacheStateService."); } - management = new Management(services, ehcacheStateService, offHeapResourceIdentifiers); + management = new Management(services, ehcacheStateService); } private void invokeRetirementMessages(PassiveReplicationMessage message) throws ClusterException { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractExposedStatistics.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractExposedStatistics.java deleted file mode 100644 index 8ab4bf4488..0000000000 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractExposedStatistics.java +++ /dev/null @@ -1,166 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.server.management; - -import org.terracotta.context.extended.RegisteredStatistic; -import org.terracotta.context.extended.StatisticsRegistry; -import org.terracotta.management.model.capabilities.descriptors.Descriptor; -import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; -import org.terracotta.management.model.stats.MemoryUnit; -import org.terracotta.management.model.stats.NumberUnit; -import org.terracotta.management.model.stats.Sample; -import org.terracotta.management.model.stats.Statistic; -import org.terracotta.management.model.stats.StatisticType; -import org.terracotta.management.model.stats.history.AverageHistory; -import org.terracotta.management.model.stats.history.CounterHistory; -import org.terracotta.management.model.stats.history.DurationHistory; -import org.terracotta.management.model.stats.history.RateHistory; -import org.terracotta.management.model.stats.history.RatioHistory; -import org.terracotta.management.model.stats.history.SizeHistory; -import org.terracotta.management.service.monitoring.registry.provider.AliasBinding; -import org.terracotta.management.service.monitoring.registry.provider.AliasBindingManagementProvider; -import org.terracotta.offheapstore.util.FindbugsSuppressWarnings; -import org.terracotta.statistics.extended.SampleType; -import org.terracotta.statistics.extended.SampledStatistic; - -import java.io.Closeable; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -@FindbugsSuppressWarnings("EQ_DOESNT_OVERRIDE_EQUALS") -class AbstractExposedStatistics extends AliasBindingManagementProvider.ExposedAliasBinding implements Closeable { - - private static final Map COMPOUND_SUFFIXES = new HashMap<>(); - - static { - COMPOUND_SUFFIXES.put("Count", SampleType.COUNTER); - COMPOUND_SUFFIXES.put("Rate", SampleType.RATE); - COMPOUND_SUFFIXES.put("LatencyMinimum", SampleType.LATENCY_MIN); - COMPOUND_SUFFIXES.put("LatencyMaximum", SampleType.LATENCY_MAX); - COMPOUND_SUFFIXES.put("LatencyAverage", SampleType.LATENCY_AVG); - } - - protected final StatisticsRegistry statisticsRegistry; - - AbstractExposedStatistics(long consumerId, T binding, StatisticConfiguration statisticConfiguration, ScheduledExecutorService executor, Object statisticContextObject) { - super(binding, consumerId); - if (statisticContextObject == null) { - this.statisticsRegistry = null; - - } else { - this.statisticsRegistry = new StatisticsRegistry( - statisticContextObject, - executor, - statisticConfiguration.averageWindowDuration(), - statisticConfiguration.averageWindowUnit(), - statisticConfiguration.historySize(), - statisticConfiguration.historyInterval(), - statisticConfiguration.historyIntervalUnit(), - statisticConfiguration.timeToDisable(), - statisticConfiguration.timeToDisableUnit()); - } - } - - void init() { - } - - @Override - public void close() { - if (statisticsRegistry != null) { - statisticsRegistry.clearRegistrations(); - } - } - - @SuppressWarnings("unchecked") - public Statistic queryStatistic(String statisticName, long since) { - // first search for a non-compound stat - SampledStatistic statistic = statisticsRegistry.findSampledStatistic(statisticName); - - // if not found, it can be a compound stat, so search for it - if (statistic == null) { - for (Iterator> it = COMPOUND_SUFFIXES.entrySet().iterator(); it.hasNext() && statistic == null; ) { - Entry entry = it.next(); - statistic = statisticsRegistry.findSampledCompoundStatistic(statisticName.substring(0, Math.max(0, statisticName.length() - entry.getKey().length())), entry.getValue()); - } - } - - if (statistic != null) { - List> samples = statistic - .history(since) - .stream() - .map(t -> new Sample<>(t.getTimestamp(), t.getSample())) - .collect(Collectors.toList()); - - switch (statistic.type()) { - case COUNTER: return new CounterHistory((List>) samples, NumberUnit.COUNT); - case RATE: return new RateHistory((List>) samples, TimeUnit.SECONDS); - case LATENCY_MIN: return new DurationHistory((List>) samples, TimeUnit.NANOSECONDS); - case LATENCY_MAX: return new DurationHistory((List>) samples, TimeUnit.NANOSECONDS); - case LATENCY_AVG: return new AverageHistory((List>) samples, TimeUnit.NANOSECONDS); - case RATIO: return new RatioHistory((List>) samples, NumberUnit.RATIO); - case SIZE: return new SizeHistory((List>) samples, MemoryUnit.B); - default: throw new UnsupportedOperationException(statistic.type().name()); - } - } - - throw new IllegalArgumentException("No registered statistic named '" + statisticName + "'"); - } - - @Override - public Collection getDescriptors() { - Set capabilities = new HashSet<>(); - - if (statisticsRegistry != null) { - Map registrations = statisticsRegistry.getRegistrations(); - for (Entry entry : registrations.entrySet()) { - String statisticName = entry.getKey(); - RegisteredStatistic registeredStatistic = registrations.get(statisticName); - switch (registeredStatistic.getType()) { - case COUNTER: - capabilities.add(new StatisticDescriptor(statisticName, StatisticType.COUNTER_HISTORY)); - break; - case RATIO: - capabilities.add(new StatisticDescriptor(entry.getKey() + "Ratio", StatisticType.RATIO_HISTORY)); - break; - case SIZE: - capabilities.add(new StatisticDescriptor(statisticName, StatisticType.SIZE_HISTORY)); - break; - case COMPOUND: - capabilities.add(new StatisticDescriptor(entry.getKey() + "Count", StatisticType.COUNTER_HISTORY)); - capabilities.add(new StatisticDescriptor(entry.getKey() + "Rate", StatisticType.RATE_HISTORY)); - capabilities.add(new StatisticDescriptor(entry.getKey() + "LatencyMinimum", StatisticType.DURATION_HISTORY)); - capabilities.add(new StatisticDescriptor(entry.getKey() + "LatencyMaximum", StatisticType.DURATION_HISTORY)); - capabilities.add(new StatisticDescriptor(entry.getKey() + "LatencyAverage", StatisticType.AVERAGE_HISTORY)); - break; - default: - throw new UnsupportedOperationException(registeredStatistic.getType().name()); - } - } - } - - return capabilities; - } - -} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractStatisticsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractStatisticsManagementProvider.java deleted file mode 100644 index bdfeacbb9b..0000000000 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/AbstractStatisticsManagementProvider.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.server.management; - -import org.terracotta.management.model.capabilities.Capability; -import org.terracotta.management.model.capabilities.StatisticsCapability; -import org.terracotta.management.model.context.Context; -import org.terracotta.management.model.stats.Statistic; -import org.terracotta.management.registry.action.ExposedObject; -import org.terracotta.management.registry.action.Named; -import org.terracotta.management.registry.action.RequiredContext; -import org.terracotta.management.service.monitoring.registry.provider.AliasBinding; -import org.terracotta.management.service.monitoring.registry.provider.AliasBindingManagementProvider; - -import java.util.Collection; -import java.util.HashMap; -import java.util.Map; - -@RequiredContext({@Named("consumerId")}) -abstract class AbstractStatisticsManagementProvider extends AliasBindingManagementProvider { - - private final StatisticConfiguration statisticConfiguration; - - public AbstractStatisticsManagementProvider(Class type, StatisticConfiguration statisticConfiguration) { - super(type); - this.statisticConfiguration = statisticConfiguration; - } - - public StatisticConfiguration getStatisticConfiguration() { - return statisticConfiguration; - } - - @Override - protected void dispose(ExposedObject exposedObject) { - ((AbstractExposedStatistics) exposedObject).close(); - } - - @Override - public Capability getCapability() { - StatisticsCapability.Properties properties = new StatisticsCapability.Properties( - statisticConfiguration.averageWindowDuration(), - statisticConfiguration.averageWindowUnit(), - statisticConfiguration.historySize(), - statisticConfiguration.historyInterval(), - statisticConfiguration.historyIntervalUnit(), - statisticConfiguration.timeToDisable(), - statisticConfiguration.timeToDisableUnit()); - return new StatisticsCapability(getCapabilityName(), properties, getDescriptors(), getCapabilityContext()); - } - - @Override - public Map> collectStatistics(Context context, Collection statisticNames, long since) { - Map> statistics = new HashMap>(statisticNames.size()); - AbstractExposedStatistics ehcacheStatistics = (AbstractExposedStatistics) findExposedObject(context); - if (ehcacheStatistics != null) { - for (String statisticName : statisticNames) { - try { - statistics.put(statisticName, ehcacheStatistics.queryStatistic(statisticName, since)); - } catch (IllegalArgumentException ignored) { - // ignore when statisticName does not exist and throws an exception - } - } - } - return statistics; - } - - @Override - protected AbstractExposedStatistics wrap(T managedObject) { - AbstractExposedStatistics exposed = internalWrap(managedObject); - exposed.init(); - return exposed; - } - - protected abstract AbstractExposedStatistics internalWrap(T managedObject); - -} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateSettingsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateSettingsManagementProvider.java index 9ad74992c2..ba2d9e33e3 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateSettingsManagementProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ClientStateSettingsManagementProvider.java @@ -18,7 +18,6 @@ import org.ehcache.clustered.server.ClientState; import org.terracotta.management.model.capabilities.descriptors.Descriptor; import org.terracotta.management.model.capabilities.descriptors.Settings; -import org.terracotta.management.model.cluster.ClientIdentifier; import org.terracotta.management.model.context.Context; import org.terracotta.management.registry.action.Named; import org.terracotta.management.registry.action.RequiredContext; @@ -38,14 +37,14 @@ class ClientStateSettingsManagementProvider extends ClientBindingManagementProvi } @Override - protected ExposedClientStateBinding internalWrap(ClientStateBinding managedObject, long consumerId, ClientIdentifier clientIdentifier) { - return new ExposedClientStateBinding(managedObject, consumerId, clientIdentifier); + protected ExposedClientStateBinding internalWrap(Context context, ClientStateBinding managedObject) { + return new ExposedClientStateBinding(context, managedObject); } private static class ExposedClientStateBinding extends ExposedClientBinding { - ExposedClientStateBinding(ClientStateBinding clientBinding, long consumerId, ClientIdentifier clientIdentifier) { - super(clientBinding, consumerId, clientIdentifier); + ExposedClientStateBinding(Context context, ClientStateBinding clientBinding) { + super(context, clientBinding); } @Override diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java index aad30b7fda..fd0d242621 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java @@ -24,60 +24,43 @@ import org.terracotta.entity.ClientDescriptor; import org.terracotta.entity.ServiceRegistry; import org.terracotta.management.model.context.Context; +import org.terracotta.management.registry.collect.StatisticConfiguration; import org.terracotta.management.service.monitoring.ConsumerManagementRegistry; import org.terracotta.management.service.monitoring.ConsumerManagementRegistryConfiguration; import org.terracotta.management.service.monitoring.registry.provider.ClientBinding; -import org.terracotta.offheapresource.OffHeapResource; -import org.terracotta.offheapresource.OffHeapResourceIdentifier; -import java.util.Set; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.atomic.AtomicLong; +import static java.util.concurrent.TimeUnit.SECONDS; public class Management { private static final Logger LOGGER = LoggerFactory.getLogger(Management.class); - // TODO FIXME: the following things are just temporary and should be removed/changed asap - // - scheduling should be done by using a voltron service (not yet available: see https://github.com/Terracotta-OSS/terracotta-apis/issues/158) - // - stats config should be given when configuring the entities (https://github.com/ehcache/ehcache3/issues/1567) - private static final AtomicLong managementSchedulerCount = new AtomicLong(); - private ScheduledExecutorService managementScheduler; - private final StatisticConfiguration statisticConfiguration = new StatisticConfiguration(); + // TODO: if a day we want to make that configurable, we can, and per provider, or globally as it is now + private final StatisticConfiguration statisticConfiguration = new StatisticConfiguration( + 60, SECONDS, + 100, 1, SECONDS, + 30, SECONDS + ); private final ConsumerManagementRegistry managementRegistry; - private final ServiceRegistry services; private final EhcacheStateService ehcacheStateService; - private final Set offHeapResourceIdentifiers; - public Management(ServiceRegistry services, EhcacheStateService ehcacheStateService, Set offHeapResourceIdentifiers) { + public Management(ServiceRegistry services, EhcacheStateService ehcacheStateService) { managementRegistry = services.getService(new ConsumerManagementRegistryConfiguration(services)); - this.services = services; this.ehcacheStateService = ehcacheStateService; - this.offHeapResourceIdentifiers = offHeapResourceIdentifiers; if (managementRegistry != null) { // expose settings about attached stores managementRegistry.addManagementProvider(new ClientStateSettingsManagementProvider()); - // expose settings about off-heap server service - managementRegistry.addManagementProvider(new OffHeapResourceSettingsManagementProvider()); + // expose settings about server stores managementRegistry.addManagementProvider(new ServerStoreSettingsManagementProvider()); // expose settings about pools managementRegistry.addManagementProvider(new PoolSettingsManagementProvider(ehcacheStateService)); - managementScheduler = Executors.unconfigurableScheduledExecutorService(Executors.newSingleThreadScheduledExecutor( - r -> { - Thread t = Executors.defaultThreadFactory().newThread(r); - t.setDaemon(true); - t.setName("ManagementScheduler-" + managementSchedulerCount.incrementAndGet()); - return t; - })); - // expose stats about server stores - managementRegistry.addManagementProvider(new ServerStoreStatisticsManagementProvider(statisticConfiguration, managementScheduler)); + managementRegistry.addManagementProvider(new ServerStoreStatisticsManagementProvider(statisticConfiguration)); // expose stats about pools - managementRegistry.addManagementProvider(new PoolStatisticsManagementProvider(ehcacheStateService, statisticConfiguration, managementScheduler)); + managementRegistry.addManagementProvider(new PoolStatisticsManagementProvider(ehcacheStateService, statisticConfiguration)); } } @@ -86,30 +69,10 @@ public void init() { if (managementRegistry != null) { LOGGER.trace("init()"); - managementRegistry.register(ehcacheStateService); - // PoolBinding.ALL_SHARED is a marker so that we can send events not specifically related to 1 pool // this object is ignored from the stats and descriptors managementRegistry.register(PoolBinding.ALL_SHARED); - // exposes available offheap service resources - for (String identifier : offHeapResourceIdentifiers) { - OffHeapResource offHeapResource = services.getService(OffHeapResourceIdentifier.identifier(identifier)); - managementRegistry.register(new OffHeapResourceBinding(identifier, offHeapResource)); - } - - // expose management calls on statistic collector - StatisticCollectorManagementProvider collectorManagementProvider = new StatisticCollectorManagementProvider( - managementRegistry, - statisticConfiguration, - managementScheduler, - new String[]{"PoolStatistics", "ServerStoreStatistics"}); - - managementRegistry.addManagementProvider(collectorManagementProvider); - - // start the stat collector (it won't collect any stats though, because they need to be configured through a management call) - collectorManagementProvider.init(); - // expose the management registry inside voltorn managementRegistry.refresh(); } @@ -119,7 +82,6 @@ public void close() { if (managementRegistry != null) { LOGGER.trace("close()"); managementRegistry.close(); - managementScheduler.shutdown(); } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceBinding.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceBinding.java deleted file mode 100644 index 7db37d37de..0000000000 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceBinding.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.server.management; - -import org.terracotta.management.service.monitoring.registry.provider.AliasBinding; -import org.terracotta.offheapresource.OffHeapResource; - -class OffHeapResourceBinding extends AliasBinding { - - OffHeapResourceBinding(String identifier, OffHeapResource offHeapResource) { - super(identifier, offHeapResource); - } - - @Override - public OffHeapResource getValue() { - return (OffHeapResource) super.getValue(); - } - -} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceSettingsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceSettingsManagementProvider.java deleted file mode 100644 index cd25fd2d49..0000000000 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/OffHeapResourceSettingsManagementProvider.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.server.management; - -import org.terracotta.management.model.capabilities.descriptors.Descriptor; -import org.terracotta.management.model.capabilities.descriptors.Settings; -import org.terracotta.management.model.context.Context; -import org.terracotta.management.registry.action.Named; -import org.terracotta.management.registry.action.RequiredContext; -import org.terracotta.management.service.monitoring.registry.provider.AliasBindingManagementProvider; - -import java.util.Collection; -import java.util.Collections; - -@Named("OffHeapResourceSettings") -@RequiredContext({@Named("consumerId"), @Named("type"), @Named("alias")}) -class OffHeapResourceSettingsManagementProvider extends AliasBindingManagementProvider { - - OffHeapResourceSettingsManagementProvider() { - super(OffHeapResourceBinding.class); - } - - @Override - public Collection getDescriptors() { - Collection descriptors = super.getDescriptors(); - descriptors.add(new Settings() - .set("type", "OffHeapResourceSettingsManagementProvider") - .set("time", System.currentTimeMillis())); - return descriptors; - } - - @Override - protected ExposedOffHeapResourceBinding wrap(OffHeapResourceBinding managedObject) { - return new ExposedOffHeapResourceBinding(managedObject, getMonitoringService().getConsumerId()); - } - - private static class ExposedOffHeapResourceBinding extends ExposedAliasBinding { - - ExposedOffHeapResourceBinding(OffHeapResourceBinding binding, long consumerId) { - super(binding, consumerId); - } - - @Override - public Context getContext() { - return super.getContext().with("type", "OffHeapResource"); - } - - @Override - public Collection getDescriptors() { - return Collections.singleton(new Settings(getContext()) - .set("capacity", getBinding().getValue().capacity()) - .set("availableAtTime", getBinding().getValue().available()) - ); - } - } - -} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java index f5ca9d8bba..b10cbc1ed2 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java @@ -54,19 +54,19 @@ public Collection> getExposedObjects() { } @Override - protected ExposedPoolBinding wrap(PoolBinding managedObject) { - return new ExposedPoolBinding(managedObject, getMonitoringService().getConsumerId()); + protected ExposedPoolBinding internalWrap(Context context, PoolBinding managedObject) { + return new ExposedPoolBinding(context, managedObject); } private static class ExposedPoolBinding extends ExposedAliasBinding { - ExposedPoolBinding(PoolBinding binding, long consumerId) { - super(binding, consumerId); + ExposedPoolBinding(Context context, PoolBinding binding) { + super(context, binding); } @Override public Context getContext() { - return super.getContext().with("type", "PoolBinding"); + return super.getContext().with("type", "Pool"); } @Override diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java index 9b54fd5b2a..724ec913bf 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java @@ -17,16 +17,19 @@ import org.ehcache.clustered.server.state.EhcacheStateService; import org.ehcache.clustered.server.state.ResourcePageSource; +import org.terracotta.context.extended.StatisticsRegistry; import org.terracotta.management.model.context.Context; import org.terracotta.management.registry.action.ExposedObject; import org.terracotta.management.registry.action.Named; import org.terracotta.management.registry.action.RequiredContext; +import org.terracotta.management.registry.collect.StatisticConfiguration; +import org.terracotta.management.service.monitoring.registry.provider.AbstractExposedStatistics; +import org.terracotta.management.service.monitoring.registry.provider.AbstractStatisticsManagementProvider; import java.util.Collection; import java.util.HashSet; import java.util.Objects; import java.util.Set; -import java.util.concurrent.ScheduledExecutorService; import java.util.stream.Collectors; import static java.util.Arrays.asList; @@ -37,12 +40,10 @@ class PoolStatisticsManagementProvider extends AbstractStatisticsManagementProvider { private final EhcacheStateService ehcacheStateService; - private final ScheduledExecutorService executor; - PoolStatisticsManagementProvider(EhcacheStateService ehcacheStateService, StatisticConfiguration statisticConfiguration, ScheduledExecutorService executor) { + PoolStatisticsManagementProvider(EhcacheStateService ehcacheStateService, StatisticConfiguration statisticConfiguration) { super(PoolBinding.class, statisticConfiguration); this.ehcacheStateService = ehcacheStateService; - this.executor = executor; } @Override @@ -51,33 +52,42 @@ public Collection> getExposedObjects() { } @Override - protected AbstractExposedStatistics internalWrap(PoolBinding managedObject) { - ResourcePageSource resourcePageSource = null; - - if (managedObject != PoolBinding.ALL_SHARED) { - String poolName = managedObject.getAlias(); - resourcePageSource = managedObject.getAllocationType() == PoolBinding.AllocationType.DEDICATED ? - ehcacheStateService.getDedicatedResourcePageSource(poolName) : - ehcacheStateService.getSharedResourcePageSource(poolName); - Objects.requireNonNull(resourcePageSource, "Unable to locale pool " + poolName); + protected StatisticsRegistry createStatisticsRegistry(PoolBinding managedObject) { + if (managedObject == PoolBinding.ALL_SHARED) { + return null; } - return new PoolExposedStatistics(getMonitoringService().getConsumerId(), managedObject, getStatisticConfiguration(), executor, resourcePageSource); + String poolName = managedObject.getAlias(); + PoolBinding.AllocationType allocationType = managedObject.getAllocationType(); + + if (allocationType == PoolBinding.AllocationType.DEDICATED) { + ResourcePageSource resourcePageSource = Objects.requireNonNull(ehcacheStateService.getDedicatedResourcePageSource(poolName)); + return getStatisticsService().createStatisticsRegistry(getStatisticConfiguration(), resourcePageSource); + + } else { + ResourcePageSource resourcePageSource = Objects.requireNonNull(ehcacheStateService.getSharedResourcePageSource(poolName)); + return getStatisticsService().createStatisticsRegistry(getStatisticConfiguration(), resourcePageSource); + } + } + + @Override + protected AbstractExposedStatistics internalWrap(Context context, PoolBinding managedObject, StatisticsRegistry statisticsRegistry) { + return new PoolExposedStatistics(context, managedObject, statisticsRegistry); } private static class PoolExposedStatistics extends AbstractExposedStatistics { - PoolExposedStatistics(long consumerId, PoolBinding binding, StatisticConfiguration statisticConfiguration, ScheduledExecutorService executor, ResourcePageSource resourcePageSource) { - super(consumerId, binding, statisticConfiguration, executor, resourcePageSource); + PoolExposedStatistics(Context context, PoolBinding binding, StatisticsRegistry statisticsRegistry) { + super(context, binding, statisticsRegistry); - if (resourcePageSource != null) { + if (statisticsRegistry != null) { statisticsRegistry.registerSize("AllocatedSize", descriptor("allocatedSize", tags("tier", "Pool"))); } } @Override public Context getContext() { - return super.getContext().with("type", "PoolBinding"); + return super.getContext().with("type", "Pool"); } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java index edd9e9b3d1..0b4c66be94 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java @@ -44,14 +44,14 @@ public Collection getDescriptors() { } @Override - protected ExposedServerStoreBinding wrap(ServerStoreBinding managedObject) { - return new ExposedServerStoreBinding(managedObject, getMonitoringService().getConsumerId()); + protected ExposedServerStoreBinding internalWrap(Context context, ServerStoreBinding managedObject) { + return new ExposedServerStoreBinding(context, managedObject); } private static class ExposedServerStoreBinding extends ExposedAliasBinding { - ExposedServerStoreBinding(ServerStoreBinding binding, long consumerId) { - super(binding, consumerId); + ExposedServerStoreBinding(Context context, ServerStoreBinding binding) { + super(context, binding); } @Override diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java index 54d8d95184..ec5faeac57 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java @@ -15,13 +15,16 @@ */ package org.ehcache.clustered.server.management; +import org.terracotta.context.extended.StatisticsRegistry; import org.terracotta.management.model.context.Context; import org.terracotta.management.registry.action.Named; import org.terracotta.management.registry.action.RequiredContext; +import org.terracotta.management.registry.collect.StatisticConfiguration; +import org.terracotta.management.service.monitoring.registry.provider.AbstractExposedStatistics; +import org.terracotta.management.service.monitoring.registry.provider.AbstractStatisticsManagementProvider; import java.util.HashSet; import java.util.Set; -import java.util.concurrent.ScheduledExecutorService; import static java.util.Arrays.asList; import static org.terracotta.context.extended.ValueStatisticDescriptor.descriptor; @@ -30,22 +33,19 @@ @RequiredContext({@Named("consumerId"), @Named("type"), @Named("alias")}) class ServerStoreStatisticsManagementProvider extends AbstractStatisticsManagementProvider { - private final ScheduledExecutorService executor; - - ServerStoreStatisticsManagementProvider(StatisticConfiguration statisticConfiguration, ScheduledExecutorService executor) { + ServerStoreStatisticsManagementProvider(StatisticConfiguration statisticConfiguration) { super(ServerStoreBinding.class, statisticConfiguration); - this.executor = executor; } @Override - protected AbstractExposedStatistics internalWrap(ServerStoreBinding managedObject) { - return new ServerStoreExposedStatistics(getMonitoringService().getConsumerId(), managedObject, getStatisticConfiguration(), executor); + protected AbstractExposedStatistics internalWrap(Context context, ServerStoreBinding managedObject, StatisticsRegistry statisticsRegistry) { + return new ServerStoreExposedStatistics(context, managedObject, statisticsRegistry); } private static class ServerStoreExposedStatistics extends AbstractExposedStatistics { - ServerStoreExposedStatistics(long consumerId, ServerStoreBinding binding, StatisticConfiguration statisticConfiguration, ScheduledExecutorService executor) { - super(consumerId, binding, statisticConfiguration, executor, binding.getValue()); + ServerStoreExposedStatistics(Context context, ServerStoreBinding binding, StatisticsRegistry statisticsRegistry) { + super(context, binding, statisticsRegistry); statisticsRegistry.registerSize("AllocatedMemory", descriptor("allocatedMemory", tags("tier", "Store"))); statisticsRegistry.registerSize("DataAllocatedMemory", descriptor("dataAllocatedMemory", tags("tier", "Store"))); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/StatisticCollectorManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/StatisticCollectorManagementProvider.java deleted file mode 100644 index 64a7ad37ff..0000000000 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/StatisticCollectorManagementProvider.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.server.management; - -import org.terracotta.management.model.context.Context; -import org.terracotta.management.model.stats.ContextualStatistics; -import org.terracotta.management.registry.ManagementRegistry; -import org.terracotta.management.registry.action.ExposedObject; -import org.terracotta.management.registry.action.Named; -import org.terracotta.management.registry.action.RequiredContext; -import org.terracotta.management.registry.collect.DefaultStatisticCollector; -import org.terracotta.management.registry.collect.StatisticCollector; -import org.terracotta.management.registry.collect.StatisticCollectorProvider; -import org.terracotta.management.service.monitoring.MonitoringService; -import org.terracotta.management.service.monitoring.registry.provider.MonitoringServiceAware; - -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; - -@Named("StatisticCollectorCapability") -@RequiredContext({@Named("consumerId")}) -class StatisticCollectorManagementProvider extends StatisticCollectorProvider implements MonitoringServiceAware { - - private volatile MonitoringService monitoringService; - private final DefaultStatisticCollector statisticCollector; - - StatisticCollectorManagementProvider(ManagementRegistry managementRegistry, StatisticConfiguration statisticConfiguration, ScheduledExecutorService scheduledExecutorService, String[] statsCapabilitynames) { - super(StatisticCollector.class, Context.create(managementRegistry.getContextContainer().getName(), managementRegistry.getContextContainer().getValue())); - - long timeToDisableMs = TimeUnit.MILLISECONDS.convert(statisticConfiguration.timeToDisable(), statisticConfiguration.timeToDisableUnit()); - long pollingIntervalMs = Math.round(timeToDisableMs * 0.75); // we poll at 75% of the time to disable (before the time to disable happens) - - statisticCollector = new DefaultStatisticCollector( - managementRegistry, - scheduledExecutorService, - statistics -> monitoringService.pushServerEntityStatistics(statistics.toArray(new ContextualStatistics[statistics.size()])), - // TODO FIXME: there is no timesource service in voltron: https://github.com/Terracotta-OSS/terracotta-apis/issues/167 - System::currentTimeMillis, - pollingIntervalMs, - TimeUnit.MILLISECONDS, - statsCapabilitynames - ); - } - - @Override - public void setMonitoringService(MonitoringService monitoringService) { - this.monitoringService = monitoringService; - } - - @Override - protected void dispose(ExposedObject exposedObject) { - exposedObject.getTarget().stopStatisticCollector(); - } - - void init() { - register(statisticCollector); - statisticCollector.startStatisticCollector(); - } - -} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/StatisticConfiguration.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/StatisticConfiguration.java deleted file mode 100644 index cad3ebcf20..0000000000 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/StatisticConfiguration.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.ehcache.clustered.server.management; - -import org.terracotta.management.model.Objects; -import org.terracotta.management.registry.ManagementProvider; - -import java.util.concurrent.TimeUnit; - -class StatisticConfiguration { - - private long averageWindowDuration = 60; - private TimeUnit averageWindowUnit = TimeUnit.SECONDS; - private int historySize = 100; - private long historyInterval = 1; - private TimeUnit historyIntervalUnit = TimeUnit.SECONDS; - private long timeToDisable = 30; - private TimeUnit timeToDisableUnit = TimeUnit.SECONDS; - - StatisticConfiguration() { - } - - StatisticConfiguration(long averageWindowDuration, TimeUnit averageWindowUnit, int historySize, long historyInterval, TimeUnit historyIntervalUnit, long timeToDisable, TimeUnit timeToDisableUnit) { - this.averageWindowDuration = averageWindowDuration; - this.averageWindowUnit = Objects.requireNonNull(averageWindowUnit); - this.historySize = historySize; - this.historyInterval = historyInterval; - this.historyIntervalUnit = Objects.requireNonNull(historyIntervalUnit); - this.timeToDisable = timeToDisable; - this.timeToDisableUnit = Objects.requireNonNull(timeToDisableUnit); - } - - public long averageWindowDuration() { - return averageWindowDuration; - } - - public TimeUnit averageWindowUnit() { - return averageWindowUnit; - } - - public int historySize() { - return historySize; - } - - public long historyInterval() { - return historyInterval; - } - - public TimeUnit historyIntervalUnit() { - return historyIntervalUnit; - } - - public long timeToDisable() { - return timeToDisable; - } - - public TimeUnit timeToDisableUnit() { - return timeToDisableUnit; - } - - - @Override - public String toString() { - return "{averageWindowDuration=" + averageWindowDuration() + - ", averageWindowUnit=" + averageWindowUnit() + - ", historyInterval=" + historyInterval() + - ", historyIntervalUnit=" + historyIntervalUnit() + - ", historySize=" + historySize() + - ", timeToDisable=" + timeToDisable() + - ", timeToDisableUnit=" + timeToDisableUnit() + - '}'; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - StatisticConfiguration that = (StatisticConfiguration) o; - if (averageWindowDuration != that.averageWindowDuration) return false; - if (historySize != that.historySize) return false; - if (historyInterval != that.historyInterval) return false; - if (timeToDisable != that.timeToDisable) return false; - if (averageWindowUnit != that.averageWindowUnit) return false; - if (historyIntervalUnit != that.historyIntervalUnit) return false; - return timeToDisableUnit == that.timeToDisableUnit; - } - - @Override - public int hashCode() { - int result = (int) (averageWindowDuration ^ (averageWindowDuration >>> 32)); - result = 31 * result + averageWindowUnit.hashCode(); - result = 31 * result + historySize; - result = 31 * result + (int) (historyInterval ^ (historyInterval >>> 32)); - result = 31 * result + historyIntervalUnit.hashCode(); - result = 31 * result + (int) (timeToDisable ^ (timeToDisable >>> 32)); - result = 31 * result + timeToDisableUnit.hashCode(); - return result; - } - - public StatisticConfiguration setAverageWindowDuration(long averageWindowDuration) { - this.averageWindowDuration = averageWindowDuration; - return this; - } - - public StatisticConfiguration setAverageWindowUnit(TimeUnit averageWindowUnit) { - this.averageWindowUnit = averageWindowUnit; - return this; - } - - public StatisticConfiguration setHistoryInterval(long historyInterval) { - this.historyInterval = historyInterval; - return this; - } - - public StatisticConfiguration setHistoryIntervalUnit(TimeUnit historyIntervalUnit) { - this.historyIntervalUnit = historyIntervalUnit; - return this; - } - - public StatisticConfiguration setHistorySize(int historySize) { - this.historySize = historySize; - return this; - } - - public StatisticConfiguration setTimeToDisable(long timeToDisable) { - this.timeToDisable = timeToDisable; - return this; - } - - public StatisticConfiguration setTimeToDisableUnit(TimeUnit timeToDisableUnit) { - this.timeToDisableUnit = timeToDisableUnit; - return this; - } - -} diff --git a/management/build.gradle b/management/build.gradle index 3b6f1cd6b6..1f3ae8fc08 100644 --- a/management/build.gradle +++ b/management/build.gradle @@ -23,12 +23,14 @@ dependencies { // optional: if we want to use the clustered management layer compileOnly project(':clustered:client') compileOnly "org.terracotta:entity-client-api:$parent.entityApiVersion" - compileOnly "org.terracotta.management:management-entity-client:$parent.managementVersion" + compileOnly "org.terracotta.management.dist:management-client:$parent.managementVersion" compile project(':api') compile project(':core') compile project(':impl') - compile "org.terracotta.management:management-registry:$parent.managementVersion" + compile ("org.terracotta.management:management-registry:$parent.managementVersion") { + exclude group: 'org.terracotta', module: 'statistics' + } testCompile project(':xml') testCompile "com.fasterxml.jackson.core:jackson-databind:2.7.5" diff --git a/management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementService.java b/management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementService.java index 3bd311cbce..cde8cd0c1d 100644 --- a/management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementService.java +++ b/management/src/main/java/org/ehcache/management/cluster/DefaultClusteringManagementService.java @@ -16,6 +16,7 @@ package org.ehcache.management.cluster; import org.ehcache.Cache; +import org.ehcache.StateTransitionException; import org.ehcache.Status; import org.ehcache.clustered.client.service.ClientEntityFactory; import org.ehcache.clustered.client.service.ClusteringService; @@ -31,6 +32,7 @@ import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.spi.service.ServiceProvider; +import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terracotta.exception.EntityAlreadyExistsException; import org.terracotta.exception.EntityNotFoundException; @@ -52,6 +54,8 @@ @ServiceDependencies({CacheManagerProviderService.class, ExecutionService.class, TimeSourceService.class, ManagementRegistryService.class, EntityService.class, ClusteringService.class}) public class DefaultClusteringManagementService implements ClusteringManagementService, CacheManagerListener, CollectorService.Collector { + private static final Logger LOGGER = LoggerFactory.getLogger(DefaultClusteringManagementService.class); + private final ClusteringManagementServiceConfiguration configuration; private volatile ManagementRegistryService managementRegistryService; @@ -140,14 +144,22 @@ public void stateTransition(Status from, Status to) { } managementAgentService = new ManagementAgentService(managementAgentEntity); managementAgentService.setOperationTimeout(configuration.getManagementCallTimeoutSec(), TimeUnit.SECONDS); + managementAgentService.setManagementRegistry(managementRegistryService); // setup the executor that will handle the management call requests received from the server. We log failures. managementAgentService.setManagementCallExecutor(new LoggingExecutor( managementCallExecutor, LoggerFactory.getLogger(getClass().getName() + ".managementCallExecutor"))); - managementAgentService.bridge(managementRegistryService); - // expose tags - managementAgentService.setTags(managementRegistryService.getConfiguration().getTags()); + try { + managementAgentService.init(); + // expose tags + managementAgentService.setTags(managementRegistryService.getConfiguration().getTags()); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new StateTransitionException(e); + } catch (Exception e) { + e.printStackTrace(); + } break; } @@ -171,7 +183,14 @@ public void stateTransition(Status from, Status to) { public void onNotification(ContextualNotification notification) { ManagementAgentService service = managementAgentService; if (service != null && clusteringService.isConnected()) { - service.pushNotification(notification); + try { + service.pushNotification(notification); + } catch (InterruptedException e) { + LOGGER.error("Failed to push notification " + notification + ": " + e.getMessage(), e); + Thread.currentThread().interrupt(); + } catch (Exception e) { + LOGGER.error("Failed to push notification " + notification + ": " + e.getMessage(), e); + } } } @@ -179,7 +198,14 @@ public void onNotification(ContextualNotification notification) { public void onStatistics(Collection statistics) { ManagementAgentService service = managementAgentService; if (service != null && clusteringService.isConnected()) { - service.pushStatistics(statistics); + try { + service.pushStatistics(statistics); + } catch (InterruptedException e) { + LOGGER.error("Failed to push statistics " + statistics + ": " + e.getMessage(), e); + Thread.currentThread().interrupt(); + } catch (Exception e) { + LOGGER.error("Failed to push statistics " + statistics + ": " + e.getMessage(), e); + } } } diff --git a/management/src/main/java/org/ehcache/management/config/DefaultStatisticsProviderConfiguration.java b/management/src/main/java/org/ehcache/management/config/DefaultStatisticsProviderConfiguration.java index bbba956b4e..081ec14697 100644 --- a/management/src/main/java/org/ehcache/management/config/DefaultStatisticsProviderConfiguration.java +++ b/management/src/main/java/org/ehcache/management/config/DefaultStatisticsProviderConfiguration.java @@ -17,152 +17,67 @@ import org.terracotta.management.model.Objects; import org.terracotta.management.registry.ManagementProvider; +import org.terracotta.management.registry.collect.StatisticConfiguration; import java.util.concurrent.TimeUnit; -public class DefaultStatisticsProviderConfiguration implements StatisticsProviderConfiguration { +public class DefaultStatisticsProviderConfiguration extends StatisticConfiguration implements StatisticsProviderConfiguration { - private final Class provider; + private static final long serialVersionUID = 1L; - private long averageWindowDuration = 60; - private TimeUnit averageWindowUnit = TimeUnit.SECONDS; - private int historySize = 100; - private long historyInterval = 1; - private TimeUnit historyIntervalUnit = TimeUnit.SECONDS; - private long timeToDisable = 30; - private TimeUnit timeToDisableUnit = TimeUnit.SECONDS; + private final Class provider; public DefaultStatisticsProviderConfiguration(Class provider, long averageWindowDuration, TimeUnit averageWindowUnit, int historySize, long historyInterval, TimeUnit historyIntervalUnit, long timeToDisable, TimeUnit timeToDisableUnit) { + super(averageWindowDuration, averageWindowUnit, historySize, historyInterval, historyIntervalUnit, timeToDisable, timeToDisableUnit); this.provider = Objects.requireNonNull(provider); - this.averageWindowDuration = averageWindowDuration; - this.averageWindowUnit = Objects.requireNonNull(averageWindowUnit); - this.historySize = historySize; - this.historyInterval = historyInterval; - this.historyIntervalUnit = Objects.requireNonNull(historyIntervalUnit); - this.timeToDisable = timeToDisable; - this.timeToDisableUnit = Objects.requireNonNull(timeToDisableUnit); } public DefaultStatisticsProviderConfiguration(Class provider) { this.provider = Objects.requireNonNull(provider); } - @Override - public long averageWindowDuration() { - return averageWindowDuration; - } - - @Override - public TimeUnit averageWindowUnit() { - return averageWindowUnit; - } - - @Override - public int historySize() { - return historySize; - } - - @Override - public long historyInterval() { - return historyInterval; - } - - @Override - public TimeUnit historyIntervalUnit() { - return historyIntervalUnit; - } - - @Override - public long timeToDisable() { - return timeToDisable; - } - - @Override - public TimeUnit timeToDisableUnit() { - return timeToDisableUnit; - } - @Override public Class getStatisticsProviderType() { return provider; } - @Override - public String toString() { - return "{statisticsProviderType=" + getStatisticsProviderType() + - ", averageWindowDuration=" + averageWindowDuration() + - ", averageWindowUnit=" + averageWindowUnit() + - ", historyInterval=" + historyInterval() + - ", historyIntervalUnit=" + historyIntervalUnit() + - ", historySize=" + historySize() + - ", timeToDisable=" + timeToDisable() + - ", timeToDisableUnit=" + timeToDisableUnit() + - '}'; - } - @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - + if (!super.equals(o)) return false; DefaultStatisticsProviderConfiguration that = (DefaultStatisticsProviderConfiguration) o; - - if (!provider.equals(that.provider)) return false; - if (averageWindowDuration != that.averageWindowDuration) return false; - if (historySize != that.historySize) return false; - if (historyInterval != that.historyInterval) return false; - if (timeToDisable != that.timeToDisable) return false; - if (averageWindowUnit != that.averageWindowUnit) return false; - if (historyIntervalUnit != that.historyIntervalUnit) return false; - return timeToDisableUnit == that.timeToDisableUnit; - + return provider.equals(that.provider); } @Override public int hashCode() { - int result = (int) (averageWindowDuration ^ (averageWindowDuration >>> 32)); + int result = super.hashCode(); result = 31 * result + provider.hashCode(); - result = 31 * result + averageWindowUnit.hashCode(); - result = 31 * result + historySize; - result = 31 * result + (int) (historyInterval ^ (historyInterval >>> 32)); - result = 31 * result + historyIntervalUnit.hashCode(); - result = 31 * result + (int) (timeToDisable ^ (timeToDisable >>> 32)); - result = 31 * result + timeToDisableUnit.hashCode(); return result; } - public DefaultStatisticsProviderConfiguration setAverageWindowDuration(long averageWindowDuration) { - this.averageWindowDuration = averageWindowDuration; - return this; - } - - public DefaultStatisticsProviderConfiguration setAverageWindowUnit(TimeUnit averageWindowUnit) { - this.averageWindowUnit = averageWindowUnit; - return this; - } - - public DefaultStatisticsProviderConfiguration setHistoryInterval(long historyInterval) { - this.historyInterval = historyInterval; + @Override + public DefaultStatisticsProviderConfiguration setAverageWindowDuration(long averageWindowDuration, TimeUnit averageWindowUnit) { + super.setAverageWindowDuration(averageWindowDuration, averageWindowUnit); return this; } - public DefaultStatisticsProviderConfiguration setHistoryIntervalUnit(TimeUnit historyIntervalUnit) { - this.historyIntervalUnit = historyIntervalUnit; + @Override + public DefaultStatisticsProviderConfiguration setHistoryInterval(long historyInterval, TimeUnit historyIntervalUnit) { + super.setHistoryInterval(historyInterval, historyIntervalUnit); return this; } + @Override public DefaultStatisticsProviderConfiguration setHistorySize(int historySize) { - this.historySize = historySize; + super.setHistorySize(historySize); return this; } - public DefaultStatisticsProviderConfiguration setTimeToDisable(long timeToDisable) { - this.timeToDisable = timeToDisable; - return this; - } - - public DefaultStatisticsProviderConfiguration setTimeToDisableUnit(TimeUnit timeToDisableUnit) { - this.timeToDisableUnit = timeToDisableUnit; + @Override + public DefaultStatisticsProviderConfiguration setTimeToDisable(long timeToDisable, TimeUnit timeToDisableUnit) { + super.setTimeToDisable(timeToDisable, timeToDisableUnit); return this; } diff --git a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java index 6d2e5beba0..162ed2d8fe 100644 --- a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java +++ b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java @@ -22,37 +22,14 @@ import org.ehcache.management.providers.CacheBinding; import org.ehcache.management.providers.ExposedCacheBinding; import org.terracotta.context.extended.OperationStatisticDescriptor; -import org.terracotta.context.extended.RegisteredStatistic; import org.terracotta.context.extended.StatisticsRegistry; import org.terracotta.management.model.capabilities.descriptors.Descriptor; -import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; -import org.terracotta.management.model.stats.MemoryUnit; -import org.terracotta.management.model.stats.NumberUnit; -import org.terracotta.management.model.stats.Sample; import org.terracotta.management.model.stats.Statistic; -import org.terracotta.management.model.stats.StatisticType; -import org.terracotta.management.model.stats.history.AverageHistory; -import org.terracotta.management.model.stats.history.CounterHistory; -import org.terracotta.management.model.stats.history.DurationHistory; -import org.terracotta.management.model.stats.history.RateHistory; -import org.terracotta.management.model.stats.history.RatioHistory; -import org.terracotta.management.model.stats.history.SizeHistory; -import org.terracotta.statistics.archive.Timestamped; -import org.terracotta.statistics.extended.SampleType; -import org.terracotta.statistics.extended.SampledStatistic; - -import java.util.ArrayList; +import org.terracotta.management.registry.collect.StatisticsRegistryMetadata; + import java.util.Collection; import java.util.EnumSet; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; import static java.util.Collections.singleton; import static java.util.EnumSet.allOf; @@ -61,17 +38,8 @@ class StandardEhcacheStatistics extends ExposedCacheBinding { - private static final Map COMPOUND_SUFFIXES = new HashMap(); - - static { - COMPOUND_SUFFIXES.put("Count", SampleType.COUNTER); - COMPOUND_SUFFIXES.put("Rate", SampleType.RATE); - COMPOUND_SUFFIXES.put("LatencyMinimum", SampleType.LATENCY_MIN); - COMPOUND_SUFFIXES.put("LatencyMaximum", SampleType.LATENCY_MAX); - COMPOUND_SUFFIXES.put("LatencyAverage", SampleType.LATENCY_AVG); - } - private final StatisticsRegistry statisticsRegistry; + private final StatisticsRegistryMetadata statisticsRegistryMetadata; StandardEhcacheStatistics(ManagementRegistryServiceConfiguration registryConfiguration, CacheBinding cacheBinding, StatisticsProviderConfiguration statisticsProviderConfiguration, ScheduledExecutorService executor) { super(registryConfiguration, cacheBinding); @@ -79,6 +47,8 @@ class StandardEhcacheStatistics extends ExposedCacheBinding { statisticsProviderConfiguration.averageWindowUnit(), statisticsProviderConfiguration.historySize(), statisticsProviderConfiguration.historyInterval(), statisticsProviderConfiguration.historyIntervalUnit(), statisticsProviderConfiguration.timeToDisable(), statisticsProviderConfiguration.timeToDisableUnit()); + this.statisticsRegistryMetadata = new StatisticsRegistryMetadata(statisticsRegistry); + EnumSet hit = of(CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER, CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER); EnumSet miss = of(CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER, CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER); OperationStatisticDescriptor getCacheStatisticDescriptor = OperationStatisticDescriptor.descriptor("get", singleton("cache"), CacheOperationOutcomes.GetOutcome.class); @@ -106,72 +76,13 @@ class StandardEhcacheStatistics extends ExposedCacheBinding { statisticsRegistry.registerSize("OccupiedByteSize", descriptor("occupiedMemory", singleton("tier"))); } - @SuppressWarnings("unchecked") - Statistic queryStatistic(String statisticName, long since) { - // first search for a non-compound stat - SampledStatistic statistic = statisticsRegistry.findSampledStatistic(statisticName); - - // if not found, it can be a compound stat, so search for it - if (statistic == null) { - for (Iterator> it = COMPOUND_SUFFIXES.entrySet().iterator(); it.hasNext() && statistic == null; ) { - Entry entry = it.next(); - statistic = statisticsRegistry.findSampledCompoundStatistic(statisticName.substring(0, Math.max(0, statisticName.length() - entry.getKey().length())), entry.getValue()); - } - } - - if (statistic != null) { - List> history = statistic.history(since); - List samples = new ArrayList>(history.size()); - for (Timestamped timestamped : history) { - Sample sample = new Sample(timestamped.getTimestamp(), timestamped.getSample()); - samples.add(sample); - } - - switch (statistic.type()) { - case COUNTER: return new CounterHistory((List>) samples, NumberUnit.COUNT); - case RATE: return new RateHistory((List>) samples, TimeUnit.SECONDS); - case LATENCY_MIN: return new DurationHistory((List>) samples, TimeUnit.NANOSECONDS); - case LATENCY_MAX: return new DurationHistory((List>) samples, TimeUnit.NANOSECONDS); - case LATENCY_AVG: return new AverageHistory((List>) samples, TimeUnit.NANOSECONDS); - case RATIO: return new RatioHistory((List>) samples, NumberUnit.RATIO); - case SIZE: return new SizeHistory((List>) samples, MemoryUnit.B); - default: throw new UnsupportedOperationException(statistic.type().name()); - } - } - - throw new IllegalArgumentException("No registered statistic named '" + statisticName + "'"); + public Statistic queryStatistic(String fullStatisticName, long since) { + return statisticsRegistryMetadata.queryStatistic(fullStatisticName, since); } @Override public Collection getDescriptors() { - Set capabilities = new HashSet(); - Map registrations = statisticsRegistry.getRegistrations(); - for (Entry entry : registrations.entrySet()) { - String statisticName = entry.getKey(); - RegisteredStatistic registeredStatistic = registrations.get(statisticName); - switch (registeredStatistic.getType()) { - case COUNTER: - capabilities.add(new StatisticDescriptor(statisticName, StatisticType.COUNTER_HISTORY)); - break; - case RATIO: - capabilities.add(new StatisticDescriptor(statisticName, StatisticType.RATIO_HISTORY)); - break; - case SIZE: - capabilities.add(new StatisticDescriptor(statisticName, StatisticType.SIZE_HISTORY)); - break; - case COMPOUND: - capabilities.add(new StatisticDescriptor(entry.getKey() + "Count", StatisticType.COUNTER_HISTORY)); - capabilities.add(new StatisticDescriptor(entry.getKey() + "Rate", StatisticType.RATE_HISTORY)); - capabilities.add(new StatisticDescriptor(entry.getKey() + "LatencyMinimum", StatisticType.DURATION_HISTORY)); - capabilities.add(new StatisticDescriptor(entry.getKey() + "LatencyMaximum", StatisticType.DURATION_HISTORY)); - capabilities.add(new StatisticDescriptor(entry.getKey() + "LatencyAverage", StatisticType.AVERAGE_HISTORY)); - break; - default: - throw new UnsupportedOperationException(registeredStatistic.getType().name()); - } - } - - return capabilities; + return statisticsRegistryMetadata.getDescriptors(); } void dispose() { diff --git a/management/src/main/java/org/ehcache/management/registry/DefaultCollectorService.java b/management/src/main/java/org/ehcache/management/registry/DefaultCollectorService.java index 37a3c82e2d..f23d7f6ba8 100644 --- a/management/src/main/java/org/ehcache/management/registry/DefaultCollectorService.java +++ b/management/src/main/java/org/ehcache/management/registry/DefaultCollectorService.java @@ -35,10 +35,10 @@ import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.management.registry.collect.DefaultStatisticCollector; import org.terracotta.management.registry.collect.StatisticCollector; +import org.terracotta.management.registry.collect.StatisticConfiguration; import java.util.Collection; import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; import static org.ehcache.impl.internal.executor.ExecutorUtil.shutdownNow; @@ -80,8 +80,6 @@ public synchronized void start(ServiceProvider serviceProvider) { scheduledExecutorService = serviceProvider.getService(ExecutionService.class).getScheduledExecutor(configuration.getCollectorExecutorAlias()); StatisticsProviderConfiguration providerConfiguration = configuration.getConfigurationFor(EhcacheStatisticsProvider.class); - long timeToDisableMs = TimeUnit.MILLISECONDS.convert(providerConfiguration.timeToDisable(), providerConfiguration.timeToDisableUnit()); - long pollingIntervalMs = Math.round(timeToDisableMs * 0.75); // we poll at 75% of the time to disable (before the time to disable happens) statisticCollector = new DefaultStatisticCollector( managementRegistry, @@ -98,10 +96,16 @@ public long getTimeMillis() { return timeSource.getTimeMillis(); } }, - pollingIntervalMs, - TimeUnit.MILLISECONDS, - new String[]{"StatisticsCapability"} // the only stats capability available at the moment - ); + providerConfiguration instanceof StatisticConfiguration ? + (StatisticConfiguration) providerConfiguration : + new StatisticConfiguration( + providerConfiguration.averageWindowDuration(), + providerConfiguration.averageWindowUnit(), + providerConfiguration.historySize(), + providerConfiguration.historyInterval(), + providerConfiguration.historyIntervalUnit(), + providerConfiguration.timeToDisable(), + providerConfiguration.timeToDisableUnit())); cacheManager.registerListener(this); } diff --git a/management/src/main/java/org/ehcache/management/registry/ManagementRegistryServiceConfigurationParser.java b/management/src/main/java/org/ehcache/management/registry/ManagementRegistryServiceConfigurationParser.java index c66fd57c80..f093257933 100644 --- a/management/src/main/java/org/ehcache/management/registry/ManagementRegistryServiceConfigurationParser.java +++ b/management/src/main/java/org/ehcache/management/registry/ManagementRegistryServiceConfigurationParser.java @@ -100,14 +100,16 @@ public ServiceCreationConfiguration parseServiceCreat // average-window for (Element averageWindow : NodeListIterable.elements(statisticConfiguration, NAMESPACE, "average-window")) { - providerConfiguration.setAverageWindowDuration(Long.parseLong(val(averageWindow, String.valueOf(providerConfiguration.averageWindowDuration())))); - providerConfiguration.setAverageWindowUnit(unit(averageWindow, providerConfiguration.averageWindowUnit())); + providerConfiguration.setAverageWindowDuration( + Long.parseLong(val(averageWindow, String.valueOf(providerConfiguration.averageWindowDuration()))), + unit(averageWindow, providerConfiguration.averageWindowUnit())); } // history-interval for (Element historyInterval : NodeListIterable.elements(statisticConfiguration, NAMESPACE, "history-interval")) { - providerConfiguration.setHistoryInterval(Long.parseLong(val(historyInterval, String.valueOf(providerConfiguration.historyInterval())))); - providerConfiguration.setHistoryIntervalUnit(unit(historyInterval, providerConfiguration.historyIntervalUnit())); + providerConfiguration.setHistoryInterval( + Long.parseLong(val(historyInterval, String.valueOf(providerConfiguration.historyInterval()))), + unit(historyInterval, providerConfiguration.historyIntervalUnit())); } // history-size @@ -117,8 +119,9 @@ public ServiceCreationConfiguration parseServiceCreat // time-to-disable for (Element timeToDisable : NodeListIterable.elements(statisticConfiguration, NAMESPACE, "time-to-disable")) { - providerConfiguration.setTimeToDisable(Long.parseLong(val(timeToDisable, String.valueOf(providerConfiguration.timeToDisable())))); - providerConfiguration.setTimeToDisableUnit(unit(timeToDisable, providerConfiguration.timeToDisableUnit())); + providerConfiguration.setTimeToDisable( + Long.parseLong(val(timeToDisable, String.valueOf(providerConfiguration.timeToDisable()))), + unit(timeToDisable, providerConfiguration.timeToDisableUnit())); } registryConfiguration.addConfiguration(providerConfiguration); From 07d527a797c5bcf631bb96afcd235df3a688d9e1 Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Mon, 21 Nov 2016 16:53:42 -0500 Subject: [PATCH 152/218] :bug: Update to new tc-platform version to fix 2 bugs - since must not be initialized to Long.MIN, otherwise comparators will not work (0-Long.Min is still negative).See Terracotta-OSS/terracotta-platform#189 - fix bad context passed because of the tms entity (Terracotta-OSS/terracotta-platform#190) --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index f475a2c893..f5123009dd 100644 --- a/build.gradle +++ b/build.gradle @@ -28,7 +28,7 @@ ext { sizeofVersion = '0.3.0' // Clustered - terracottaPlatformVersion = '5.0.11.beta3' + terracottaPlatformVersion = '5.0.11.beta5' managementVersion = terracottaPlatformVersion terracottaApisVersion = '1.0.11.beta' terracottaCoreVersion = '5.0.11-beta2' From 878def400c8d2b567e344d21dbad99b890a4246e Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Fri, 18 Nov 2016 15:15:45 +0100 Subject: [PATCH 153/218] :construction: #1482 Runnel encoding for EhcacheEntityResponse * Add enum for response types * Remove obsolete methods from EhcacheEntityResponse --- .../client/internal/EhcacheClientEntity.java | 4 +- .../store/CommonServerStoreProxy.java | 9 +- .../messages/EhcacheEntityResponse.java | 105 +++------ .../messages/EhcacheResponseType.java | 51 +++++ .../internal/messages/ResponseCodec.java | 212 ++++++++++++------ .../internal/messages/ResponseCodecTest.java | 71 +++++- .../server/EhcacheActiveEntityTest.java | 9 +- 7 files changed, 299 insertions(+), 162 deletions(-) create mode 100644 clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheResponseType.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java index fef0188288..ed3e953f42 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java @@ -32,7 +32,7 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.Failure; -import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.Type; +import org.ehcache.clustered.common.internal.messages.EhcacheResponseType; import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; import org.ehcache.clustered.common.internal.messages.EhcacheOperationMessage; import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; @@ -322,7 +322,7 @@ private EhcacheEntityResponse invokeInternal(TimeoutDuration timeLimit, EhcacheE try { EhcacheEntityResponse response = waitFor(timeLimit, invokeAsync(message, replicate)); - if (Type.FAILURE.equals(response.getType())) { + if (EhcacheResponseType.FAILURE.equals(response.getResponseType())) { throw ((Failure)response).getCause(); } else { return response; diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxy.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxy.java index 1258b1eb48..8bc103a176 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxy.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/store/CommonServerStoreProxy.java @@ -18,6 +18,7 @@ import org.ehcache.clustered.client.internal.EhcacheClientEntity; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.ehcache.clustered.common.internal.messages.EhcacheResponseType; import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; import org.ehcache.clustered.common.internal.store.Chain; import org.slf4j.Logger; @@ -161,11 +162,11 @@ public Chain get(long key) throws TimeoutException { } catch (Exception e) { throw new ServerStoreProxyException(e); } - if (response != null && response.getType() == EhcacheEntityResponse.Type.GET_RESPONSE) { + if (response != null && response.getResponseType() == EhcacheResponseType.GET_RESPONSE) { return ((EhcacheEntityResponse.GetResponse)response).getChain(); } else { throw new ServerStoreProxyException("Response for get operation was invalid : " + - (response != null ? response.getType().toString() : "null message")); + (response != null ? response.getResponseType() : "null message")); } } @@ -190,11 +191,11 @@ public Chain getAndAppend(long key, ByteBuffer payLoad) throws TimeoutException } catch (Exception e) { throw new ServerStoreProxyException(e); } - if (response != null && response.getType() == EhcacheEntityResponse.Type.GET_RESPONSE) { + if (response != null && response.getResponseType() == EhcacheResponseType.GET_RESPONSE) { return ((EhcacheEntityResponse.GetResponse)response).getChain(); } else { throw new ServerStoreProxyException("Response for getAndAppend operation was invalid : " + - (response != null ? response.getType().toString() : "null message")); + (response != null ? response.getResponseType() : "null message")); } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityResponse.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityResponse.java index 572679bd50..f01605b88e 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityResponse.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityResponse.java @@ -23,55 +23,7 @@ public abstract class EhcacheEntityResponse implements EntityResponse { - public enum Type { - SUCCESS((byte) 0), - FAILURE((byte) 1), - GET_RESPONSE((byte) 2), - HASH_INVALIDATION_DONE((byte) 3), - ALL_INVALIDATION_DONE((byte) 4), - CLIENT_INVALIDATE_HASH((byte) 5), - CLIENT_INVALIDATE_ALL((byte) 6), - SERVER_INVALIDATE_HASH((byte) 7), - MAP_VALUE((byte) 8), - ; - - private final byte opCode; - - Type(byte opCode) { - this.opCode = opCode; - } - - public byte getOpCode() { - return this.opCode; - } - - public static Type responseType(byte opCode) { - switch (opCode) { - case 0: - return SUCCESS; - case 1: - return FAILURE; - case 2: - return GET_RESPONSE; - case 3: - return HASH_INVALIDATION_DONE; - case 4: - return ALL_INVALIDATION_DONE; - case 5: - return CLIENT_INVALIDATE_HASH; - case 6: - return CLIENT_INVALIDATE_ALL; - case 7: - return SERVER_INVALIDATE_HASH; - case 8: - return MAP_VALUE; - default: - throw new IllegalArgumentException("Store operation not defined for : " + opCode); - } - } - } - - public abstract Type getType(); + public abstract EhcacheResponseType getResponseType(); public static class Success extends EhcacheEntityResponse { @@ -82,10 +34,9 @@ private Success() { } @Override - public Type getType() { - return Type.SUCCESS; + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.SUCCESS; } - } public static class Failure extends EhcacheEntityResponse { @@ -96,15 +47,14 @@ public static class Failure extends EhcacheEntityResponse { this.cause = cause; } - @Override - public Type getType() { - return Type.FAILURE; - } - public ClusterException getCause() { return cause; } + @Override + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.FAILURE; + } } public static class GetResponse extends EhcacheEntityResponse { @@ -115,15 +65,14 @@ public static class GetResponse extends EhcacheEntityResponse { this.chain = chain; } - @Override - public Type getType() { - return Type.GET_RESPONSE; - } - public Chain getChain() { return chain; } + @Override + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.GET_RESPONSE; + } } public static HashInvalidationDone hashInvalidationDone(String cacheId, long key) { @@ -148,10 +97,9 @@ public long getKey() { } @Override - public Type getType() { - return Type.HASH_INVALIDATION_DONE; + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.HASH_INVALIDATION_DONE; } - } public static AllInvalidationDone allInvalidationDone(String cacheId) { @@ -170,10 +118,9 @@ public String getCacheId() { } @Override - public Type getType() { - return Type.ALL_INVALIDATION_DONE; + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.ALL_INVALIDATION_DONE; } - } public static ServerInvalidateHash serverInvalidateHash(String cacheId, long key) { @@ -198,8 +145,8 @@ public long getKey() { } @Override - public Type getType() { - return Type.SERVER_INVALIDATE_HASH; + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.SERVER_INVALIDATE_HASH; } } @@ -231,8 +178,8 @@ public int getInvalidationId() { } @Override - public Type getType() { - return Type.CLIENT_INVALIDATE_HASH; + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.CLIENT_INVALIDATE_HASH; } } @@ -258,8 +205,8 @@ public int getInvalidationId() { } @Override - public Type getType() { - return Type.CLIENT_INVALIDATE_ALL; + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.CLIENT_INVALIDATE_ALL; } } @@ -275,14 +222,14 @@ public MapValue(Object value) { this.value = value; } - @Override - public Type getType() { - return Type.MAP_VALUE; - } - public Object getValue() { return this.value; } + + @Override + public final EhcacheResponseType getResponseType() { + return EhcacheResponseType.MAP_VALUE; + } } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheResponseType.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheResponseType.java new file mode 100644 index 0000000000..f33298f731 --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheResponseType.java @@ -0,0 +1,51 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.terracotta.runnel.EnumMapping; + +import static org.terracotta.runnel.EnumMappingBuilder.newEnumMappingBuilder; + +/** + * EhcacheResponseType + */ +public enum EhcacheResponseType { + SUCCESS, + FAILURE, + GET_RESPONSE, + HASH_INVALIDATION_DONE, + CLIENT_INVALIDATE_HASH, + CLIENT_INVALIDATE_ALL, + SERVER_INVALIDATE_HASH, + MAP_VALUE, + ALL_INVALIDATION_DONE; + + + public static final String RESPONSE_TYPE_FIELD_NAME = "opCode"; + public static final int RESPONSE_TYPE_FIELD_INDEX = 10; + public static final EnumMapping EHCACHE_RESPONSE_TYPES_ENUM_MAPPING = newEnumMappingBuilder(EhcacheResponseType.class) + .mapping(EhcacheResponseType.SUCCESS, 80) + .mapping(EhcacheResponseType.FAILURE, 81) + .mapping(EhcacheResponseType.GET_RESPONSE, 82) + .mapping(EhcacheResponseType.HASH_INVALIDATION_DONE, 83) + .mapping(EhcacheResponseType.ALL_INVALIDATION_DONE, 84) + .mapping(EhcacheResponseType.CLIENT_INVALIDATE_HASH, 85) + .mapping(EhcacheResponseType.CLIENT_INVALIDATE_ALL, 86) + .mapping(EhcacheResponseType.SERVER_INVALIDATE_HASH, 87) + .mapping(EhcacheResponseType.MAP_VALUE, 88) + .build(); +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java index 02453540ca..4e1070a4aa 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java @@ -18,19 +18,76 @@ import org.ehcache.clustered.common.internal.exceptions.ClusterException; import org.ehcache.clustered.common.internal.store.Util; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.StructBuilder; +import org.terracotta.runnel.decoding.Enm; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.StructEncoder; +import org.terracotta.runnel.encoding.StructEncoderFunction; import java.nio.ByteBuffer; +import static java.nio.ByteBuffer.wrap; +import static org.ehcache.clustered.common.internal.messages.ChainCodec.CHAIN_STRUCT; import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.AllInvalidationDone; import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.ClientInvalidateAll; import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.ClientInvalidateHash; import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.HashInvalidationDone; import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.ServerInvalidateHash; import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.MapValue; +import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.EHCACHE_RESPONSE_TYPES_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.RESPONSE_TYPE_FIELD_INDEX; +import static org.ehcache.clustered.common.internal.messages.EhcacheResponseType.RESPONSE_TYPE_FIELD_NAME; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.KEY_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; class ResponseCodec { - private static final byte OP_CODE_SIZE = 1; + private static final String EXCEPTION_FIELD = "exception"; + private static final String INVALIDATION_ID_FIELD = "invalidationId"; + private static final String CHAIN_FIELD = "chain"; + private static final String MAP_VALUE_FIELD = "mapValue"; + + private static final Struct SUCCESS_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .build(); + private static final Struct FAILURE_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .byteBuffer(EXCEPTION_FIELD, 20) + .build(); + private static final Struct GET_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .struct(CHAIN_FIELD, 20, CHAIN_STRUCT) + .build(); + private static final Struct HASH_INVALIDATION_DONE_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .string(SERVER_STORE_NAME_FIELD, 20) + .int64(KEY_FIELD, 30) + .build(); + private static final Struct ALL_INVALIDATION_DONE_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .string(SERVER_STORE_NAME_FIELD, 20) + .build(); + private static final Struct CLIENT_INVALIDATE_HASH_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .string(SERVER_STORE_NAME_FIELD, 20) + .int64(KEY_FIELD, 30) + .int32(INVALIDATION_ID_FIELD, 40) + .build(); + private static final Struct CLIENT_INVALIDATE_ALL_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .string(SERVER_STORE_NAME_FIELD, 20) + .int32(INVALIDATION_ID_FIELD, 30) + .build(); + private static final Struct SERVER_INVALIDATE_HASH_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .string(SERVER_STORE_NAME_FIELD, 20) + .int64(KEY_FIELD, 30) + .build(); + private static final Struct MAP_VALUE_RESPONSE_STRUCT = StructBuilder.newStructBuilder() + .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) + .byteBuffer(MAP_VALUE_FIELD, 20) + .build(); private final ChainCodec chainCodec; @@ -39,126 +96,143 @@ class ResponseCodec { } public byte[] encode(EhcacheEntityResponse response) { - switch (response.getType()) { + switch (response.getResponseType()) { case FAILURE: EhcacheEntityResponse.Failure failure = (EhcacheEntityResponse.Failure)response; byte[] failureMsg = Util.marshall(failure.getCause()); - ByteBuffer buffer = ByteBuffer.allocate(OP_CODE_SIZE + failureMsg.length); - buffer.put(EhcacheEntityResponse.Type.FAILURE.getOpCode()); - buffer.put(failureMsg); - return buffer.array(); + return FAILURE_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, failure.getResponseType()) + .byteBuffer(EXCEPTION_FIELD, wrap(failureMsg)) + .encode().array(); case SUCCESS: - buffer = ByteBuffer.allocate(OP_CODE_SIZE); - buffer.put(EhcacheEntityResponse.Type.SUCCESS.getOpCode()); - return buffer.array(); + return SUCCESS_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, response.getResponseType()) + .encode().array(); case GET_RESPONSE: - EhcacheEntityResponse.GetResponse getResponse = (EhcacheEntityResponse.GetResponse)response; - byte[] encodedChain = chainCodec.encode(getResponse.getChain()); - int chainLen = encodedChain.length; - buffer = ByteBuffer.allocate(OP_CODE_SIZE + chainLen); - buffer.put(EhcacheEntityResponse.Type.GET_RESPONSE.getOpCode()); - buffer.put(encodedChain); - return buffer.array(); + final EhcacheEntityResponse.GetResponse getResponse = (EhcacheEntityResponse.GetResponse)response; + return GET_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, getResponse.getResponseType()) + .struct(CHAIN_FIELD, new StructEncoderFunction() { + @Override + public void encode(StructEncoder encoder) { + chainCodec.encode(encoder, getResponse.getChain()); + } + }) + .encode().array(); case HASH_INVALIDATION_DONE: { HashInvalidationDone hashInvalidationDone = (HashInvalidationDone) response; - buffer = ByteBuffer.allocate(OP_CODE_SIZE + hashInvalidationDone.getCacheId().length() * 2 + 8); - buffer.put(EhcacheEntityResponse.Type.HASH_INVALIDATION_DONE.getOpCode()); - CodecUtil.putStringAsCharArray(buffer, hashInvalidationDone.getCacheId()); - buffer.putLong(hashInvalidationDone.getKey()); - return buffer.array(); + return HASH_INVALIDATION_DONE_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, hashInvalidationDone.getResponseType()) + .string(SERVER_STORE_NAME_FIELD, hashInvalidationDone.getCacheId()) + .int64(KEY_FIELD, hashInvalidationDone.getKey()) + .encode().array(); } case ALL_INVALIDATION_DONE: { AllInvalidationDone allInvalidationDone = (AllInvalidationDone) response; - buffer = ByteBuffer.allocate(OP_CODE_SIZE + allInvalidationDone.getCacheId().length() * 2); - buffer.put(EhcacheEntityResponse.Type.ALL_INVALIDATION_DONE.getOpCode()); - CodecUtil.putStringAsCharArray(buffer, allInvalidationDone.getCacheId()); - return buffer.array(); + return ALL_INVALIDATION_DONE_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, allInvalidationDone.getResponseType()) + .string(SERVER_STORE_NAME_FIELD, allInvalidationDone.getCacheId()) + .encode().array(); } case CLIENT_INVALIDATE_HASH: { ClientInvalidateHash clientInvalidateHash = (ClientInvalidateHash) response; - buffer = ByteBuffer.allocate(OP_CODE_SIZE + clientInvalidateHash.getCacheId().length() * 2 + 12); - buffer.put(EhcacheEntityResponse.Type.CLIENT_INVALIDATE_HASH.getOpCode()); - CodecUtil.putStringAsCharArray(buffer, clientInvalidateHash.getCacheId()); - buffer.putLong(clientInvalidateHash.getKey()); - buffer.putInt(((ClientInvalidateHash) response).getInvalidationId()); - return buffer.array(); + return CLIENT_INVALIDATE_HASH_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, clientInvalidateHash.getResponseType()) + .string(SERVER_STORE_NAME_FIELD, clientInvalidateHash.getCacheId()) + .int64(KEY_FIELD, clientInvalidateHash.getKey()) + .int32(INVALIDATION_ID_FIELD, clientInvalidateHash.getInvalidationId()) + .encode().array(); } case CLIENT_INVALIDATE_ALL: { ClientInvalidateAll clientInvalidateAll = (ClientInvalidateAll) response; - buffer = ByteBuffer.allocate(OP_CODE_SIZE + clientInvalidateAll.getCacheId().length() * 2 + 4); - buffer.put(EhcacheEntityResponse.Type.CLIENT_INVALIDATE_ALL.getOpCode()); - CodecUtil.putStringAsCharArray(buffer, clientInvalidateAll.getCacheId()); - buffer.putInt(((ClientInvalidateAll) response).getInvalidationId()); - return buffer.array(); + return CLIENT_INVALIDATE_ALL_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, clientInvalidateAll.getResponseType()) + .string(SERVER_STORE_NAME_FIELD, clientInvalidateAll.getCacheId()) + .int32(INVALIDATION_ID_FIELD, clientInvalidateAll.getInvalidationId()) + .encode().array(); } case SERVER_INVALIDATE_HASH: { ServerInvalidateHash serverInvalidateHash = (ServerInvalidateHash) response; - buffer = ByteBuffer.allocate(OP_CODE_SIZE + serverInvalidateHash.getCacheId().length() * 2 + 8); - buffer.put(EhcacheEntityResponse.Type.SERVER_INVALIDATE_HASH.getOpCode()); - CodecUtil.putStringAsCharArray(buffer, serverInvalidateHash.getCacheId()); - buffer.putLong(serverInvalidateHash.getKey()); - return buffer.array(); + return SERVER_INVALIDATE_HASH_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, serverInvalidateHash.getResponseType()) + .string(SERVER_STORE_NAME_FIELD, serverInvalidateHash.getCacheId()) + .int64(KEY_FIELD, serverInvalidateHash.getKey()) + .encode().array(); } case MAP_VALUE: { MapValue mapValue = (MapValue) response; byte[] encodedMapValue = Util.marshall(mapValue.getValue()); - buffer = ByteBuffer.allocate(OP_CODE_SIZE + encodedMapValue.length); - buffer.put(EhcacheEntityResponse.Type.MAP_VALUE.getOpCode()); - buffer.put(encodedMapValue); - return buffer.array(); + return MAP_VALUE_RESPONSE_STRUCT.encoder() + .enm(RESPONSE_TYPE_FIELD_NAME, mapValue.getResponseType()) + .byteBuffer(MAP_VALUE_FIELD, wrap(encodedMapValue)) + .encode().array(); } default: - throw new UnsupportedOperationException("The operation is not supported : " + response.getType()); + throw new UnsupportedOperationException("The operation is not supported : " + response.getResponseType()); } } public EhcacheEntityResponse decode(byte[] payload) { - ByteBuffer buffer = ByteBuffer.wrap(payload); - byte opCode = buffer.get(); - EhcacheEntityResponse.Type type = EhcacheEntityResponse.Type.responseType(opCode); - byte[] payArr = new byte[buffer.remaining()]; - buffer.get(payArr); - switch (type) { + ByteBuffer buffer = wrap(payload); + StructDecoder decoder = SUCCESS_RESPONSE_STRUCT.decoder(buffer); + Enm opCodeEnm = decoder.enm(RESPONSE_TYPE_FIELD_NAME); + + if (!opCodeEnm.isFound()) { + throw new AssertionError("Got a response without an opCode"); + } + if (!opCodeEnm.isValid()) { + // Need to ignore the response here as we do not understand its type - coming from the future? + return null; + } + + EhcacheResponseType opCode = opCodeEnm.get(); + buffer.rewind(); + switch (opCode) { case SUCCESS: return EhcacheEntityResponse.Success.INSTANCE; case FAILURE: - ClusterException exception = (ClusterException)Util.unmarshall(payArr); + decoder = FAILURE_RESPONSE_STRUCT.decoder(buffer); + ClusterException exception = (ClusterException)Util.unmarshall(decoder.byteBuffer(EXCEPTION_FIELD)); return new EhcacheEntityResponse.Failure(exception.withClientStackTrace()); case GET_RESPONSE: - return new EhcacheEntityResponse.GetResponse(chainCodec.decode(payArr)); + decoder = GET_RESPONSE_STRUCT.decoder(buffer); + return new EhcacheEntityResponse.GetResponse(chainCodec.decode(decoder.struct(CHAIN_FIELD))); case HASH_INVALIDATION_DONE: { - String cacheId = ByteBuffer.wrap(payArr, 0, payArr.length - 8).asCharBuffer().toString(); - long key = ByteBuffer.wrap(payArr, payArr.length - 8, 8).getLong(); + decoder = HASH_INVALIDATION_DONE_RESPONSE_STRUCT.decoder(buffer); + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); + long key = decoder.int64(KEY_FIELD); return EhcacheEntityResponse.hashInvalidationDone(cacheId, key); } case ALL_INVALIDATION_DONE: { - String cacheId = ByteBuffer.wrap(payArr).asCharBuffer().toString(); + decoder = ALL_INVALIDATION_DONE_RESPONSE_STRUCT.decoder(buffer); + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); return EhcacheEntityResponse.allInvalidationDone(cacheId); } case CLIENT_INVALIDATE_HASH: { - String cacheId = ByteBuffer.wrap(payArr, 0, payArr.length - 12).asCharBuffer().toString(); - ByteBuffer byteBuffer = ByteBuffer.wrap(payArr, payArr.length - 12, 12); - long key = byteBuffer.getLong(); - int invalidationId = byteBuffer.getInt(); + decoder = CLIENT_INVALIDATE_HASH_RESPONSE_STRUCT.decoder(buffer); + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); + long key = decoder.int64(KEY_FIELD); + int invalidationId = decoder.int32(INVALIDATION_ID_FIELD); return EhcacheEntityResponse.clientInvalidateHash(cacheId, key, invalidationId); } case CLIENT_INVALIDATE_ALL: { - String cacheId = ByteBuffer.wrap(payArr, 0, payArr.length - 4).asCharBuffer().toString(); - ByteBuffer byteBuffer = ByteBuffer.wrap(payArr, payArr.length - 4, 4); - int invalidationId = byteBuffer.getInt(); + decoder = CLIENT_INVALIDATE_ALL_RESPONSE_STRUCT.decoder(buffer); + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); + int invalidationId = decoder.int32(INVALIDATION_ID_FIELD); return EhcacheEntityResponse.clientInvalidateAll(cacheId, invalidationId); } case SERVER_INVALIDATE_HASH: { - String cacheId = ByteBuffer.wrap(payArr, 0, payArr.length - 8).asCharBuffer().toString(); - ByteBuffer byteBuffer = ByteBuffer.wrap(payArr, payArr.length - 8, 8); - long key = byteBuffer.getLong(); + decoder = SERVER_INVALIDATE_HASH_RESPONSE_STRUCT.decoder(buffer); + String cacheId = decoder.string(SERVER_STORE_NAME_FIELD); + long key = decoder.int64(KEY_FIELD); return EhcacheEntityResponse.serverInvalidateHash(cacheId, key); } case MAP_VALUE: { - return EhcacheEntityResponse.mapValue(Util.unmarshall(payArr)); + decoder = MAP_VALUE_RESPONSE_STRUCT.decoder(buffer); + return EhcacheEntityResponse.mapValue(Util.unmarshall(decoder.byteBuffer(MAP_VALUE_FIELD))); } default: - throw new UnsupportedOperationException("The operation is not supported with opCode : " + type); + throw new UnsupportedOperationException("The operation is not supported with opCode : " + opCode); } } } diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ResponseCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ResponseCodecTest.java index 5199634b71..3f3a939153 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ResponseCodecTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/ResponseCodecTest.java @@ -18,6 +18,7 @@ import org.ehcache.clustered.common.internal.exceptions.IllegalMessageException; import org.ehcache.clustered.common.internal.store.Chain; +import org.hamcrest.Matchers; import org.junit.Test; import java.util.Date; @@ -28,13 +29,13 @@ import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; -/** - * - */ public class ResponseCodecTest { private static final EhcacheEntityResponseFactory RESPONSE_FACTORY = new EhcacheEntityResponseFactory(); private static final ResponseCodec RESPONSE_CODEC = new ResponseCodec(); + private static final String STORE_ID = "storeId"; + private static final long KEY = 42L; + private static final int INVALIDATION_ID = 134; @Test public void testFailureResponseCodec() { @@ -65,4 +66,68 @@ public void testMapValueCodec() throws Exception { (EhcacheEntityResponse.MapValue) RESPONSE_CODEC.decode(RESPONSE_CODEC.encode(mapValue)); assertThat(decoded.getValue(), equalTo(subject)); } + + @Test + public void testSuccess() throws Exception { + byte[] encoded = RESPONSE_CODEC.encode(EhcacheEntityResponse.Success.INSTANCE); + assertThat(RESPONSE_CODEC.decode(encoded), Matchers.sameInstance(EhcacheEntityResponse.Success.INSTANCE)); + } + + @Test + public void testHashInvalidationDone() throws Exception { + EhcacheEntityResponse.HashInvalidationDone response = new EhcacheEntityResponse.HashInvalidationDone(STORE_ID, KEY); + byte[] encoded = RESPONSE_CODEC.encode(response); + EhcacheEntityResponse.HashInvalidationDone decodedResponse = (EhcacheEntityResponse.HashInvalidationDone) RESPONSE_CODEC.decode(encoded); + + assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.HASH_INVALIDATION_DONE)); + assertThat(decodedResponse.getCacheId(), is(STORE_ID)); + assertThat(decodedResponse.getKey(), is(KEY)); + } + + @Test + public void testAllInvalidationDone() throws Exception { + EhcacheEntityResponse.AllInvalidationDone response = new EhcacheEntityResponse.AllInvalidationDone(STORE_ID); + + byte[] encoded = RESPONSE_CODEC.encode(response); + EhcacheEntityResponse.AllInvalidationDone decodedResponse = (EhcacheEntityResponse.AllInvalidationDone) RESPONSE_CODEC.decode(encoded); + + assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.ALL_INVALIDATION_DONE)); + assertThat(decodedResponse.getCacheId(), is(STORE_ID)); + } + + @Test + public void testClientInvalidateHash() throws Exception { + EhcacheEntityResponse.ClientInvalidateHash response = new EhcacheEntityResponse.ClientInvalidateHash(STORE_ID, KEY, INVALIDATION_ID); + byte[] encoded = RESPONSE_CODEC.encode(response); + EhcacheEntityResponse.ClientInvalidateHash decodedResponse = (EhcacheEntityResponse.ClientInvalidateHash) RESPONSE_CODEC.decode(encoded); + + assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.CLIENT_INVALIDATE_HASH)); + assertThat(decodedResponse.getCacheId(), is(STORE_ID)); + assertThat(decodedResponse.getKey(), is(KEY)); + assertThat(decodedResponse.getInvalidationId(), is(INVALIDATION_ID)); + } + + @Test + public void testClientInvalidateAll() throws Exception { + EhcacheEntityResponse.ClientInvalidateAll response = new EhcacheEntityResponse.ClientInvalidateAll(STORE_ID, INVALIDATION_ID); + + byte[] encoded = RESPONSE_CODEC.encode(response); + EhcacheEntityResponse.ClientInvalidateAll decodedResponse = (EhcacheEntityResponse.ClientInvalidateAll) RESPONSE_CODEC.decode(encoded); + + assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.CLIENT_INVALIDATE_ALL)); + assertThat(decodedResponse.getCacheId(), is(STORE_ID)); + assertThat(decodedResponse.getInvalidationId(), is(INVALIDATION_ID)); + } + + @Test + public void testServerInvalidateHash() throws Exception { + EhcacheEntityResponse.ServerInvalidateHash response = new EhcacheEntityResponse.ServerInvalidateHash(STORE_ID, KEY); + + byte[] encoded = RESPONSE_CODEC.encode(response); + EhcacheEntityResponse.ServerInvalidateHash decodedResponse = (EhcacheEntityResponse.ServerInvalidateHash) RESPONSE_CODEC.decode(encoded); + + assertThat(decodedResponse.getResponseType(), is(EhcacheResponseType.SERVER_INVALIDATE_HASH)); + assertThat(decodedResponse.getCacheId(), is(STORE_ID)); + assertThat(decodedResponse.getKey(), is(KEY)); + } } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index 5033d8b9fc..1455d805e1 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -35,6 +35,7 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.Failure; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponseFactory; +import org.ehcache.clustered.common.internal.messages.EhcacheResponseType; import org.ehcache.clustered.common.internal.messages.LifecycleMessage; import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; import org.ehcache.clustered.common.internal.messages.LifecycleMessage.CreateServerStore; @@ -75,9 +76,7 @@ import java.util.concurrent.atomic.AtomicInteger; import org.ehcache.clustered.common.PoolAllocation.Dedicated; -import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.Type; -import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.Type.FAILURE; import static org.ehcache.clustered.common.internal.store.Util.createPayload; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -2445,7 +2444,7 @@ public void testValidateIdenticalConfiguration() { ClientDescriptor validator = new TestClientDescriptor(); activeEntity.connected(validator); - assertThat(activeEntity.invoke(validator, MESSAGE_FACTORY.validateStoreManager(validateConfig)).getType(), is(Type.SUCCESS)); + assertThat(activeEntity.invoke(validator, MESSAGE_FACTORY.validateStoreManager(validateConfig)).getResponseType(), is(EhcacheResponseType.SUCCESS)); } @Test @@ -2946,12 +2945,12 @@ private void assertSuccess(EhcacheEntityResponse response) throws Exception { } private void assertFailure(EhcacheEntityResponse response, Class expectedException) { - assertThat(response.getType(), is(FAILURE)); + assertThat(response.getResponseType(), is(EhcacheResponseType.FAILURE)); assertThat(((Failure) response).getCause(), is(instanceOf(expectedException))); } private void assertFailure(EhcacheEntityResponse response, Class expectedException, String expectedMessageContent) { - assertThat(response.getType(), is(FAILURE)); + assertThat(response.getResponseType(), is(EhcacheResponseType.FAILURE)); Exception cause = ((Failure) response).getCause(); assertThat(cause, is(instanceOf(expectedException))); assertThat(cause.getMessage(), containsString(expectedMessageContent)); From 85d34ecbcbfbdce0e3b38161d5d70ae7fcc8b20a Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Mon, 21 Nov 2016 13:22:22 +0100 Subject: [PATCH 154/218] :construction: #1482 Runnel encoding for EhcacheSyncMessage * Add enum for sync message types --- .../client/internal/EhcacheClientEntity.java | 4 - .../common/internal/messages/ChainCodec.java | 2 +- .../messages/LifeCycleMessageCodec.java | 6 +- .../internal/messages/MessageCodecUtils.java | 37 +-- .../messages/EhcacheDataSyncMessage.java | 5 + .../messages/EhcacheStateSyncMessage.java | 5 + .../internal/messages/EhcacheSyncMessage.java | 5 + .../messages/EhcacheSyncMessageCodec.java | 237 ++++++++++++++---- .../internal/messages/SyncMessageType.java | 39 +++ .../messages/EhcacheSyncMessageCodecTest.java | 26 +- 10 files changed, 273 insertions(+), 93 deletions(-) create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/SyncMessageType.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java index ed3e953f42..803109f3df 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntity.java @@ -38,7 +38,6 @@ import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; import org.ehcache.clustered.common.internal.messages.ReconnectMessage; import org.ehcache.clustered.common.internal.messages.ReconnectMessageCodec; -import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ServerStoreOp; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terracotta.connection.entity.Entity; @@ -62,9 +61,6 @@ import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicLong; -import static org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ServerStoreOp.GET; -import static org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage.ServerStoreOp.getServerStoreOp; - /** * The client-side {@link Entity} through which clustered cache operations are performed. * An instance of this class is created by the {@link EhcacheClientEntityService}. diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java index cb3bdb5c38..04365460a0 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java @@ -38,7 +38,7 @@ public class ChainCodec { .byteBuffer("payload", 20) .build(); - static final Struct CHAIN_STRUCT = StructBuilder.newStructBuilder() + public static final Struct CHAIN_STRUCT = StructBuilder.newStructBuilder() .structs("elements", 10, ELEMENT_STRUCT) .build(); diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java index a480918e19..50cd52ca32 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java @@ -33,9 +33,12 @@ import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_INDEX; import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.CONSISTENCY_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.DEFAULT_RESOURCE_FIELD; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.LSB_UUID_FIELD; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSB_UUID_FIELD; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSG_ID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.POOLS_SUB_STRUCT; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.POOL_NAME_FIELD; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.POOL_RESOURCE_NAME_FIELD; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.POOL_SIZE_FIELD; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; @@ -49,9 +52,6 @@ class LifeCycleMessageCodec { private static final String CONFIG_PRESENT_FIELD = "configPresent"; - private static final String DEFAULT_RESOURCE_FIELD = "defaultResource"; - private static final String POOLS_SUB_STRUCT = "pools"; - private static final String POOL_NAME_FIELD = "poolName"; private static final Struct POOLS_STRUCT = newStructBuilder() .string(POOL_NAME_FIELD, 10) diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java index 29fa43c7ae..26048e087e 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java @@ -21,7 +21,9 @@ import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.terracotta.runnel.EnumMapping; import org.terracotta.runnel.decoding.Enm; +import org.terracotta.runnel.decoding.PrimitiveDecodingSupport; import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.PrimitiveEncodingSupport; import org.terracotta.runnel.encoding.StructEncoder; import java.util.UUID; @@ -31,22 +33,25 @@ /** * MessageCodecUtils */ -class MessageCodecUtils { +public class MessageCodecUtils { - static final String MSG_ID_FIELD = "msgId"; - static final String LSB_UUID_FIELD = "lsbUUID"; - static final String MSB_UUID_FIELD = "msbUUID"; - static final String SERVER_STORE_NAME_FIELD = "serverStoreName"; - static final String KEY_FIELD = "key"; - static final String STORE_CONFIG_KEY_TYPE_FIELD = "keyType"; - static final String STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD = "keySerializerType"; - static final String STORE_CONFIG_VALUE_TYPE_FIELD = "valueType"; - static final String STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD = "valueSerializerType"; - static final String STORE_CONFIG_CONSISTENCY_FIELD = "consistency"; - static final String POOL_SIZE_FIELD = "poolSize"; - static final String POOL_RESOURCE_NAME_FIELD = "resourceName"; + public static final String MSG_ID_FIELD = "msgId"; + public static final String LSB_UUID_FIELD = "lsbUUID"; + public static final String MSB_UUID_FIELD = "msbUUID"; + public static final String SERVER_STORE_NAME_FIELD = "serverStoreName"; + public static final String KEY_FIELD = "key"; + public static final String DEFAULT_RESOURCE_FIELD = "defaultResource"; + public static final String STORE_CONFIG_KEY_TYPE_FIELD = "keyType"; + public static final String STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD = "keySerializerType"; + public static final String STORE_CONFIG_VALUE_TYPE_FIELD = "valueType"; + public static final String STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD = "valueSerializerType"; + public static final String STORE_CONFIG_CONSISTENCY_FIELD = "consistency"; + public static final String POOLS_SUB_STRUCT = "pools"; + public static final String POOL_NAME_FIELD = "poolName"; + public static final String POOL_SIZE_FIELD = "poolSize"; + public static final String POOL_RESOURCE_NAME_FIELD = "resourceName"; - static final EnumMapping CONSISTENCY_ENUM_MAPPING = newEnumMappingBuilder(Consistency.class) + public static final EnumMapping CONSISTENCY_ENUM_MAPPING = newEnumMappingBuilder(Consistency.class) .mapping(Consistency.EVENTUAL, 1) .mapping(Consistency.STRONG, 2) .build(); @@ -62,7 +67,7 @@ UUID decodeUUID(StructDecoder decoder) { return new UUID(decoder.int64(MSB_UUID_FIELD), decoder.int64(LSB_UUID_FIELD)); } - void encodeServerStoreConfiguration(StructEncoder encoder, ServerStoreConfiguration configuration) { + public void encodeServerStoreConfiguration(PrimitiveEncodingSupport encoder, ServerStoreConfiguration configuration) { encoder.string(STORE_CONFIG_KEY_TYPE_FIELD, configuration.getStoredKeyType()) .string(STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD, configuration.getKeySerializerType()) .string(STORE_CONFIG_VALUE_TYPE_FIELD, configuration.getStoredValueType()) @@ -83,7 +88,7 @@ void encodeServerStoreConfiguration(StructEncoder encoder, ServerStoreConfigurat } } - ServerStoreConfiguration decodeServerStoreConfiguration(StructDecoder decoder) { + public ServerStoreConfiguration decodeServerStoreConfiguration(PrimitiveDecodingSupport decoder) { String keyType = decoder.string(STORE_CONFIG_KEY_TYPE_FIELD); String keySerializer = decoder.string(STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD); String valueType = decoder.string(STORE_CONFIG_VALUE_TYPE_FIELD); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java index 7026bef639..008266f71d 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java @@ -38,6 +38,11 @@ public SyncOp operation() { return SyncOp.DATA; } + @Override + public SyncMessageType getMessageType() { + return SyncMessageType.DATA; + } + public String getCacheId() { return cacheId; } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheStateSyncMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheStateSyncMessage.java index 2e440214b8..79ddde1bdd 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheStateSyncMessage.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheStateSyncMessage.java @@ -57,4 +57,9 @@ public Set getTrackedClients() { public SyncOp operation() { return SyncOp.STATE; } + + @Override + public SyncMessageType getMessageType() { + return SyncMessageType.STATE; + } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessage.java index 7d5c51e5d8..bd536dbd10 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessage.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessage.java @@ -56,13 +56,16 @@ public static SyncOp getSyncOp(byte syncOpCode) { } @Override + @Deprecated public Type getType() { return Type.SYNC_OP; } + @Deprecated public abstract SyncOp operation(); @Override + @Deprecated public byte getOpCode() { return operation().getOpCode(); } @@ -82,4 +85,6 @@ public UUID getClientId() { throw new UnsupportedOperationException(); } + public abstract SyncMessageType getMessageType(); + } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java index 79026ee3b3..5de875935a 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java @@ -16,79 +16,224 @@ package org.ehcache.clustered.server.internal.messages; +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.messages.ChainCodec; -import org.ehcache.clustered.common.internal.messages.CodecUtil; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; +import org.ehcache.clustered.common.internal.messages.MessageCodecUtils; import org.ehcache.clustered.common.internal.store.Chain; -import org.ehcache.clustered.common.internal.store.Util; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.terracotta.entity.MessageCodecException; import org.terracotta.entity.SyncMessageCodec; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.decoding.Enm; +import org.terracotta.runnel.decoding.StructArrayDecoder; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.StructEncoder; import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.UUID; + +import static org.ehcache.clustered.common.internal.messages.ChainCodec.CHAIN_STRUCT; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.CONSISTENCY_ENUM_MAPPING; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.DEFAULT_RESOURCE_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.KEY_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.LSB_UUID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSB_UUID_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.POOLS_SUB_STRUCT; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.POOL_NAME_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.POOL_RESOURCE_NAME_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.POOL_SIZE_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.STORE_CONFIG_CONSISTENCY_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.STORE_CONFIG_KEY_TYPE_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD; +import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.STORE_CONFIG_VALUE_TYPE_FIELD; +import static org.ehcache.clustered.server.internal.messages.SyncMessageType.DATA; +import static org.ehcache.clustered.server.internal.messages.SyncMessageType.STATE; +import static org.ehcache.clustered.server.internal.messages.SyncMessageType.SYNC_MESSAGE_TYPE_FIELD_INDEX; +import static org.ehcache.clustered.server.internal.messages.SyncMessageType.SYNC_MESSAGE_TYPE_FIELD_NAME; +import static org.ehcache.clustered.server.internal.messages.SyncMessageType.SYNC_MESSAGE_TYPE_MAPPING; +import static org.terracotta.runnel.StructBuilder.newStructBuilder; public class EhcacheSyncMessageCodec implements SyncMessageCodec { - private static final byte OPCODE_SIZE = 1; - private static final byte KEY_SIZE = 8; - private static final byte CACHE_ID_LEN_SIZE = 4; + private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheSyncMessageCodec.class); + + private static final String STORES_SUB_STRUCT = "stores"; + private static final String CLIENTS_SUB_STRUCT = "clients"; + private static final String CHAIN_FIELD = "chain"; + + private static final Struct POOLS_STRUCT = newStructBuilder() + .string(POOL_NAME_FIELD, 10) + .int64(POOL_SIZE_FIELD, 20) + .string(POOL_RESOURCE_NAME_FIELD, 30).build(); + + private static final Struct SERVER_STORE_CONFIGURATION_STRUCT = newStructBuilder() + .string(SERVER_STORE_NAME_FIELD, 10) + .string(STORE_CONFIG_KEY_TYPE_FIELD, 20) + .string(STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD, 21) + .string(STORE_CONFIG_VALUE_TYPE_FIELD, 25) + .string(STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD, 26) + .enm(STORE_CONFIG_CONSISTENCY_FIELD, 30, CONSISTENCY_ENUM_MAPPING) + .int64(POOL_SIZE_FIELD, 40) + .string(POOL_RESOURCE_NAME_FIELD, 45) + .build(); + + private static final Struct UUID_STRUCT = newStructBuilder() + .int64(MSB_UUID_FIELD, 10) + .int64(LSB_UUID_FIELD, 15) + .build(); + + private static final Struct STATE_SYNC_STRUCT = newStructBuilder() + .enm(SYNC_MESSAGE_TYPE_FIELD_NAME, SYNC_MESSAGE_TYPE_FIELD_INDEX, SYNC_MESSAGE_TYPE_MAPPING) + .string(DEFAULT_RESOURCE_FIELD, 20) + .structs(POOLS_SUB_STRUCT, 30, POOLS_STRUCT) + .structs(STORES_SUB_STRUCT, 40, SERVER_STORE_CONFIGURATION_STRUCT) + .structs(CLIENTS_SUB_STRUCT, 50, UUID_STRUCT) + .build(); + + private static final Struct DATA_SYNC_STRUCT = newStructBuilder() + .enm(SYNC_MESSAGE_TYPE_FIELD_NAME, SYNC_MESSAGE_TYPE_FIELD_INDEX, SYNC_MESSAGE_TYPE_MAPPING) + .string(SERVER_STORE_NAME_FIELD, 20) + .int64(KEY_FIELD, 30) + .struct(CHAIN_FIELD, 40, CHAIN_STRUCT) + .build(); private final ChainCodec chainCodec = new ChainCodec(); + private final MessageCodecUtils codecUtils = new MessageCodecUtils(); @Override public byte[] encode(final int concurrencyKey, final EhcacheEntityMessage message) throws MessageCodecException { - if(message.getType() == EhcacheEntityMessage.Type.SYNC_OP) { - EhcacheSyncMessage syncMessage = (EhcacheSyncMessage)message; - switch (syncMessage.operation()) { - case STATE: { - byte[] encodedMsg = Util.marshall(syncMessage); - ByteBuffer buffer = ByteBuffer.allocate(OPCODE_SIZE + encodedMsg.length); - buffer.put(syncMessage.getOpCode()); - buffer.put(encodedMsg); - return buffer.array(); - } - case DATA: { - EhcacheDataSyncMessage dataSyncMessage = (EhcacheDataSyncMessage)message; - String cacheId = dataSyncMessage.getCacheId(); - byte[] encodedChain = chainCodec.encode(dataSyncMessage.getChain()); - ByteBuffer buffer = ByteBuffer.allocate(OPCODE_SIZE + KEY_SIZE + CACHE_ID_LEN_SIZE + - 2 * cacheId.length() + encodedChain.length); - buffer.put(dataSyncMessage.getOpCode()); - buffer.putLong(dataSyncMessage.getKey()); - buffer.putInt(cacheId.length()); - CodecUtil.putStringAsCharArray(buffer, cacheId); - buffer.put(encodedChain); - return buffer.array(); - } + if (message instanceof EhcacheSyncMessage) { + EhcacheSyncMessage syncMessage = (EhcacheSyncMessage) message; + StructEncoder encoder; + switch (syncMessage.getMessageType()) { + case STATE: + encoder = STATE_SYNC_STRUCT.encoder(); + EhcacheStateSyncMessage stateSyncMessage = (EhcacheStateSyncMessage) syncMessage; + encoder.enm(SYNC_MESSAGE_TYPE_FIELD_NAME, STATE); + encodeServerSideConfiguration(encoder, stateSyncMessage.getConfiguration()); + encoder.structs(STORES_SUB_STRUCT, stateSyncMessage.getStoreConfigs().entrySet(), (storeEncoder, storeEntry) -> { + storeEncoder.string(SERVER_STORE_NAME_FIELD, storeEntry.getKey()); + codecUtils.encodeServerStoreConfiguration(storeEncoder, storeEntry.getValue()); + }); + encoder.structs(CLIENTS_SUB_STRUCT, stateSyncMessage.getTrackedClients(), (uuidEncoder, uuid) -> { + uuidEncoder.int64(MSB_UUID_FIELD, uuid.getMostSignificantBits()); + uuidEncoder.int64(LSB_UUID_FIELD, uuid.getLeastSignificantBits()); + }); + return encoder.encode().array(); + case DATA: + encoder = DATA_SYNC_STRUCT.encoder(); + EhcacheDataSyncMessage dataSyncMessage = (EhcacheDataSyncMessage) syncMessage; + encoder.enm(SYNC_MESSAGE_TYPE_FIELD_NAME, DATA); + encoder.string(SERVER_STORE_NAME_FIELD, dataSyncMessage.getCacheId()); + encoder.int64(KEY_FIELD, dataSyncMessage.getKey()); + encoder.struct(CHAIN_FIELD, (chainEncoder) -> chainCodec.encode(chainEncoder, dataSyncMessage.getChain())); + return encoder.encode().array(); default: - throw new IllegalArgumentException(this.getClass().getName() + " can not encode " + syncMessage.operation()); + throw new IllegalArgumentException("Sync message codec can not encode " + syncMessage.getMessageType()); } } else { throw new IllegalArgumentException(this.getClass().getName() + " can not encode " + message + " which is not a " + EhcacheStateSyncMessage.class); } } + private void encodeServerSideConfiguration(StructEncoder encoder, ServerSideConfiguration configuration) { + if (configuration.getDefaultServerResource() != null) { + encoder.string(DEFAULT_RESOURCE_FIELD, configuration.getDefaultServerResource()); + } + encoder.structs(POOLS_SUB_STRUCT, configuration.getResourcePools().entrySet(), (poolEncoder, poolEntry) -> { + poolEncoder.string(POOL_NAME_FIELD, poolEntry.getKey()); + ServerSideConfiguration.Pool pool = poolEntry.getValue(); + poolEncoder.int64(POOL_SIZE_FIELD, pool.getSize()); + if (pool.getServerResource() != null) { + poolEncoder.string(POOL_RESOURCE_NAME_FIELD, pool.getServerResource()); + } + }); + } + @Override public EhcacheSyncMessage decode(final int concurrencyKey, final byte[] payload) throws MessageCodecException { ByteBuffer message = ByteBuffer.wrap(payload); - EhcacheSyncMessage.SyncOp syncOp = EhcacheSyncMessage.SyncOp.getSyncOp(message.get()); - switch (syncOp) { - case STATE: { - byte[] encodedMsg = new byte[message.capacity() - OPCODE_SIZE]; - message.get(encodedMsg, 0, encodedMsg.length); - return (EhcacheStateSyncMessage) Util.unmarshall(encodedMsg); + StructDecoder decoder = STATE_SYNC_STRUCT.decoder(message); + Enm enm = decoder.enm(SYNC_MESSAGE_TYPE_FIELD_NAME); + if (!enm.isFound()) { + throw new AssertionError("Invalid message format - misses the message type field"); + } + if (!enm.isValid()) { + LOGGER.warn("Unknown sync message received - ignoring {}", enm.raw()); + return null; + } + + switch (enm.get()) { + case STATE: + ServerSideConfiguration configuration = decodeServerSideConfiguration(decoder); + Map storeConfigs = decodeStoreConfigurations(decoder); + Set trackedClients = decodeTrackedClients(decoder); + return new EhcacheStateSyncMessage(configuration, storeConfigs, trackedClients); + case DATA: + message.rewind(); + decoder = DATA_SYNC_STRUCT.decoder(message); + String storeName = decoder.string(SERVER_STORE_NAME_FIELD); + Long key = decoder.int64(KEY_FIELD); + Chain chain = chainCodec.decode(decoder.struct(CHAIN_FIELD)); + return new EhcacheDataSyncMessage(storeName, key, chain); + default: + throw new AssertionError("Cannot happen given earlier checks"); + } + } + + private Set decodeTrackedClients(StructDecoder decoder) { + Set result = new HashSet<>(); + StructArrayDecoder clientsDecoder = decoder.structs(CLIENTS_SUB_STRUCT); + + if (clientsDecoder != null) { + for (int i = 0; i < clientsDecoder.length(); i++) { + result.add(new UUID(clientsDecoder.int64(MSB_UUID_FIELD), clientsDecoder.int64(LSB_UUID_FIELD))); + clientsDecoder.next(); } - case DATA: { - long key = message.getLong(); - int cacheIdLength = message.getInt(); - String cacheId = CodecUtil.getStringFromBuffer(message, cacheIdLength); - int chainPayloadSize = message.remaining(); - byte[] chainPayload = new byte[chainPayloadSize]; - message.get(chainPayload); - Chain chain = chainCodec.decode(chainPayload); - return new EhcacheDataSyncMessage(cacheId, key, chain); + } + return result; + } + + private Map decodeStoreConfigurations(StructDecoder decoder) { + Map result = new HashMap<>(); + + StructArrayDecoder storesDecoder = decoder.structs(STORES_SUB_STRUCT); + if (storesDecoder != null) { + for (int i = 0; i < storesDecoder.length(); i++) { + String storeName = storesDecoder.string(SERVER_STORE_NAME_FIELD); + result.put(storeName, codecUtils.decodeServerStoreConfiguration(storesDecoder)); + storesDecoder.next(); } - default: - throw new IllegalArgumentException("EhcacheStateSyncMessage operation not defined for : " + syncOp); + } + return result; + } + + private ServerSideConfiguration decodeServerSideConfiguration(StructDecoder decoder) { + String defaultResource = decoder.string(DEFAULT_RESOURCE_FIELD); + Map pools = new HashMap<>(); + StructArrayDecoder poolsDecoder = decoder.structs(POOLS_SUB_STRUCT); + if (poolsDecoder != null) { + for (int i = 0; i < poolsDecoder.length(); i++) { + String poolName = poolsDecoder.string(POOL_NAME_FIELD); + Long poolSize = poolsDecoder.int64(POOL_SIZE_FIELD); + String poolResource = poolsDecoder.string(POOL_RESOURCE_NAME_FIELD); + pools.put(poolName, new ServerSideConfiguration.Pool(poolSize, poolResource)); + poolsDecoder.next(); + } + } + if (defaultResource == null) { + return new ServerSideConfiguration(pools); + } else { + return new ServerSideConfiguration(defaultResource, pools); } } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/SyncMessageType.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/SyncMessageType.java new file mode 100644 index 0000000000..73ac2f2dbd --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/SyncMessageType.java @@ -0,0 +1,39 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.internal.messages; + +import org.terracotta.runnel.EnumMapping; + +import com.tc.classloader.CommonComponent; + +import static org.terracotta.runnel.EnumMappingBuilder.newEnumMappingBuilder; + +/** + * SyncMessageType + */ +@CommonComponent +public enum SyncMessageType { + STATE, + DATA; + + public static final String SYNC_MESSAGE_TYPE_FIELD_NAME = "msgType"; + public static final int SYNC_MESSAGE_TYPE_FIELD_INDEX = 10; + public static final EnumMapping SYNC_MESSAGE_TYPE_MAPPING = newEnumMappingBuilder(SyncMessageType.class) + .mapping(STATE, 1) + .mapping(DATA, 10) + .build(); +} diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java index b54927bda0..7c44c30c08 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java @@ -48,23 +48,17 @@ public void testStateSyncMessageEncodeDecode() throws Exception { PoolAllocation poolAllocation1 = new PoolAllocation.Dedicated("dedicated", 4); ServerStoreConfiguration serverStoreConfiguration1 = new ServerStoreConfiguration(poolAllocation1, - "storedKeyType1", "storedValueType1", "actualKeyType1", "actualValueType1", + "storedKeyType1", "storedValueType1", null, null, "keySerializerType1", "valueSerializerType1", Consistency.STRONG); PoolAllocation poolAllocation2 = new PoolAllocation.Shared("shared"); ServerStoreConfiguration serverStoreConfiguration2 = new ServerStoreConfiguration(poolAllocation2, - "storedKeyType2", "storedValueType2", "actualKeyType2", "actualValueType2", + "storedKeyType2", "storedValueType2", null, null, "keySerializerType2", "valueSerializerType2", Consistency.EVENTUAL); - PoolAllocation poolAllocation3 = new PoolAllocation.Unknown(); - ServerStoreConfiguration serverStoreConfiguration3 = new ServerStoreConfiguration(poolAllocation3, - "storedKeyType3", "storedValueType3", "actualKeyType3", "actualValueType3", - "keySerializerType3", "valueSerializerType3", Consistency.STRONG); - Map storeConfigs = new HashMap<>(); storeConfigs.put("cache1", serverStoreConfiguration1); storeConfigs.put("cache2", serverStoreConfiguration2); - storeConfigs.put("cache3", serverStoreConfiguration3); UUID clientId1 = UUID.randomUUID(); UUID clientId2 = UUID.randomUUID(); @@ -79,7 +73,7 @@ public void testStateSyncMessageEncodeDecode() throws Exception { assertThat(decodedMessage.getConfiguration().getDefaultServerResource(), is("default-pool")); assertThat(decodedMessage.getConfiguration().getResourcePools(), is(sharedPools)); assertThat(decodedMessage.getTrackedClients(), is(clientIds)); - assertThat(decodedMessage.getStoreConfigs().keySet(), containsInAnyOrder("cache1", "cache2", "cache3")); + assertThat(decodedMessage.getStoreConfigs().keySet(), containsInAnyOrder("cache1", "cache2")); ServerStoreConfiguration serverStoreConfiguration = decodedMessage.getStoreConfigs().get("cache1"); assertThat(serverStoreConfiguration.getPoolAllocation(), instanceOf(PoolAllocation.Dedicated.class)); @@ -88,8 +82,6 @@ public void testStateSyncMessageEncodeDecode() throws Exception { assertThat(dedicatedPool.getSize(), is(4L)); assertThat(serverStoreConfiguration.getStoredKeyType(), is("storedKeyType1")); assertThat(serverStoreConfiguration.getStoredValueType(), is("storedValueType1")); - assertThat(serverStoreConfiguration.getActualKeyType(), is("actualKeyType1")); - assertThat(serverStoreConfiguration.getActualValueType(), is("actualValueType1")); assertThat(serverStoreConfiguration.getKeySerializerType(), is("keySerializerType1")); assertThat(serverStoreConfiguration.getValueSerializerType(), is("valueSerializerType1")); assertThat(serverStoreConfiguration.getConsistency(), is(Consistency.STRONG)); @@ -100,21 +92,9 @@ public void testStateSyncMessageEncodeDecode() throws Exception { assertThat(sharedPool.getResourcePoolName(), is("shared")); assertThat(serverStoreConfiguration.getStoredKeyType(), is("storedKeyType2")); assertThat(serverStoreConfiguration.getStoredValueType(), is("storedValueType2")); - assertThat(serverStoreConfiguration.getActualKeyType(), is("actualKeyType2")); - assertThat(serverStoreConfiguration.getActualValueType(), is("actualValueType2")); assertThat(serverStoreConfiguration.getKeySerializerType(), is("keySerializerType2")); assertThat(serverStoreConfiguration.getValueSerializerType(), is("valueSerializerType2")); assertThat(serverStoreConfiguration.getConsistency(), is(Consistency.EVENTUAL)); - - serverStoreConfiguration = decodedMessage.getStoreConfigs().get("cache3"); - assertThat(serverStoreConfiguration.getPoolAllocation(), instanceOf(PoolAllocation.Unknown.class)); - assertThat(serverStoreConfiguration.getStoredKeyType(), is("storedKeyType3")); - assertThat(serverStoreConfiguration.getStoredValueType(), is("storedValueType3")); - assertThat(serverStoreConfiguration.getActualKeyType(), is("actualKeyType3")); - assertThat(serverStoreConfiguration.getActualValueType(), is("actualValueType3")); - assertThat(serverStoreConfiguration.getKeySerializerType(), is("keySerializerType3")); - assertThat(serverStoreConfiguration.getValueSerializerType(), is("valueSerializerType3")); - assertThat(serverStoreConfiguration.getConsistency(), is(Consistency.STRONG)); } @Test From 40d7f57a0e58540d60741d80b33f2fbf68331e5d Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Mon, 21 Nov 2016 13:45:04 +0100 Subject: [PATCH 155/218] :construction: #1482 Remove obsolete message methods and enums --- .../messages/EhcacheEntityMessage.java | 46 +--------- .../internal/messages/LifecycleMessage.java | 51 ----------- .../messages/PassiveReplicationMessage.java | 65 -------------- .../internal/messages/ServerStoreOpCodec.java | 2 +- .../messages/ServerStoreOpMessage.java | 85 +------------------ .../messages/StateRepositoryOpMessage.java | 33 ------- .../messages/EhcacheEntityMessageTest.java | 35 -------- .../clustered/server/EhcacheActiveEntity.java | 32 ++++--- .../server/EhcacheExecutionStrategy.java | 8 +- .../server/EhcachePassiveEntity.java | 48 +++++------ .../messages/EhcacheDataSyncMessage.java | 5 -- .../messages/EhcacheStateSyncMessage.java | 5 -- .../internal/messages/EhcacheSyncMessage.java | 45 ---------- .../server/repo/ServerStateRepository.java | 6 +- 14 files changed, 53 insertions(+), 413 deletions(-) delete mode 100644 clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessageTest.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java index 7ca37615b4..0863aaa51b 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessage.java @@ -25,51 +25,7 @@ */ public abstract class EhcacheEntityMessage implements EntityMessage { - public static final long NOT_REPLICATED = -1; - - /** - * These types represent the top level Ehcache entity message types. - * Each of these top level types can have subtypes of messages. - * The byte code values represents the upper bound of the subtypes messages' byte values if there are any. - */ - public enum Type { - LIFECYCLE_OP((byte) 10), - SERVER_STORE_OP((byte) 20), - STATE_REPO_OP((byte) 30), - SYNC_OP((byte) 40), - REPLICATION_OP((byte) 50) - ; - - private final byte code; - - Type(byte code) { - this.code = code; - } - - public byte getCode() { - return this.code; - } - - public static Type toType(byte code) { - for (Type type: Type.values()) { - if(type.getCode() == code) { - return type; - } - } - throw new IllegalArgumentException("Invalid message type code: " + code); - } - } - - @Deprecated - public abstract Type getType(); - - @Deprecated - public abstract byte getOpCode(); - - @Override - public String toString() { - return getType().toString(); - } + static final long NOT_REPLICATED = -1; public abstract void setId(long id); diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java index 27adffc26e..5549a9e309 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifecycleMessage.java @@ -24,15 +24,6 @@ public abstract class LifecycleMessage extends EhcacheOperationMessage implements Serializable { - public enum LifeCycleOp { - CONFIGURE, - VALIDATE, - CREATE_SERVER_STORE, - VALIDATE_SERVER_STORE, - RELEASE_SERVER_STORE, - DESTROY_SERVER_STORE, - } - protected UUID clientId; protected long id = NOT_REPLICATED; @@ -54,18 +45,6 @@ public void setId(long id) { this.id = id; } - @Override - public byte getOpCode() { - return getType().getCode(); - } - - @Override - public Type getType() { - return Type.LIFECYCLE_OP; - } - - public abstract LifeCycleOp operation(); - public static class ValidateStoreManager extends LifecycleMessage { private static final long serialVersionUID = 5742152283115139745L; @@ -76,11 +55,6 @@ public static class ValidateStoreManager extends LifecycleMessage { this.clientId = clientId; } - @Override - public LifeCycleOp operation() { - return LifeCycleOp.VALIDATE; - } - @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.VALIDATE; @@ -101,11 +75,6 @@ public static class ConfigureStoreManager extends LifecycleMessage { this.clientId = clientId; } - @Override - public LifeCycleOp operation() { - return LifeCycleOp.CONFIGURE; - } - @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.CONFIGURE; @@ -148,11 +117,6 @@ public static class CreateServerStore extends BaseServerStore { super(name, storeConfiguration, clientId); } - @Override - public LifeCycleOp operation() { - return LifeCycleOp.CREATE_SERVER_STORE; - } - @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.CREATE_SERVER_STORE; @@ -169,11 +133,6 @@ public static class ValidateServerStore extends BaseServerStore { super(name, storeConfiguration, clientId); } - @Override - public LifeCycleOp operation() { - return LifeCycleOp.VALIDATE_SERVER_STORE; - } - @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.VALIDATE_SERVER_STORE; @@ -193,11 +152,6 @@ public static class ReleaseServerStore extends LifecycleMessage { this.clientId = clientId; } - @Override - public LifeCycleOp operation() { - return LifeCycleOp.RELEASE_SERVER_STORE; - } - @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.RELEASE_SERVER_STORE; @@ -221,11 +175,6 @@ public static class DestroyServerStore extends LifecycleMessage { this.clientId = clientId; } - @Override - public LifeCycleOp operation() { - return LifeCycleOp.DESTROY_SERVER_STORE; - } - @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.DESTROY_SERVER_STORE; diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java index 574c4919b3..baeeb4b12d 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java @@ -26,60 +26,11 @@ */ public abstract class PassiveReplicationMessage extends EhcacheOperationMessage { - public enum ReplicationOp { - CHAIN_REPLICATION_OP((byte) 41), - CLIENTID_TRACK_OP((byte) 42), - CLEAR_INVALIDATION_COMPLETE((byte) 43), - INVALIDATION_COMPLETE((byte) 44), - SERVER_STORE_LIFECYCLE_REPLICATION_OP((byte) 45) - ; - - private final byte replicationOpCode; - - ReplicationOp(byte replicationOpCode) { - this.replicationOpCode = replicationOpCode; - } - - public byte getReplicationOpCode() { - return replicationOpCode; - } - - - public static ReplicationOp getReplicationOp(byte replicationOpCode) { - switch (replicationOpCode) { - case 41: - return CHAIN_REPLICATION_OP; - case 42: - return CLIENTID_TRACK_OP; - case 43: - return CLEAR_INVALIDATION_COMPLETE; - case 44: - return INVALIDATION_COMPLETE; - case 45: - return SERVER_STORE_LIFECYCLE_REPLICATION_OP; - default: - throw new IllegalArgumentException("Replication operation not defined for : " + replicationOpCode); - } - } - } - - @Override - public Type getType() { - return Type.REPLICATION_OP; - } - - @Override - public byte getOpCode() { - return operation().getReplicationOpCode(); - } - @Override public void setId(long id) { throw new UnsupportedOperationException("This method is not supported on replication message"); } - public abstract ReplicationOp operation(); - public static class ClientIDTrackerMessage extends PassiveReplicationMessage { private final UUID clientId; private final long msgId; @@ -89,9 +40,6 @@ public ClientIDTrackerMessage(long msgId, UUID clientId) { this.clientId = clientId; } - public ReplicationOp operation() { - return ReplicationOp.CLIENTID_TRACK_OP; - } public long getId() { return msgId; } @@ -136,11 +84,6 @@ public EhcacheMessageType getMessageType() { return EhcacheMessageType.CHAIN_REPLICATION_OP; } - @Override - public ReplicationOp operation() { - return ReplicationOp.CHAIN_REPLICATION_OP; - } - @Override public long concurrencyKey() { return key; @@ -169,10 +112,6 @@ public UUID getClientId() { throw new UnsupportedOperationException("Not supported for ClearInvalidationCompleteMessage"); } - public ReplicationOp operation() { - return ReplicationOp.CLEAR_INVALIDATION_COMPLETE; - } - @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.CLEAR_INVALIDATION_COMPLETE; @@ -197,10 +136,6 @@ public long concurrencyKey() { return (getCacheId().hashCode() + key); } - public ReplicationOp operation() { - return ReplicationOp.INVALIDATION_COMPLETE; - } - @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.INVALIDATION_COMPLETE; diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java index 67a8157e2b..e4651fee7c 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java @@ -180,7 +180,7 @@ public void encode(StructEncoder encoder) { .encode() .array(); default: - throw new RuntimeException("Unhandled message operation : " + message.operation()); + throw new RuntimeException("Unhandled message operation : " + message.getMessageType()); } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java index 22beaa36af..05f0534e48 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpMessage.java @@ -22,46 +22,6 @@ import java.util.UUID; public abstract class ServerStoreOpMessage extends EhcacheOperationMessage { - public enum ServerStoreOp { - - GET_AND_APPEND((byte) 11), - APPEND((byte) 12), - REPLACE((byte) 13), - CLIENT_INVALIDATION_ACK((byte) 14), - CLEAR((byte) 15), - GET((byte) 16), - ; - - private final byte storeOpCode; - - ServerStoreOp(byte storeOpCode) { - this.storeOpCode = storeOpCode; - } - - public byte getStoreOpCode() { - return this.storeOpCode; - } - - public static ServerStoreOp getServerStoreOp(byte storeOpCode) { - switch (storeOpCode) { - case 11: - return GET_AND_APPEND; - case 12: - return APPEND; - case 13: - return REPLACE; - case 14: - return CLIENT_INVALIDATION_ACK; - case 15: - return CLEAR; - case 16: - return GET; - default: - throw new IllegalArgumentException("Store operation not defined for : " + storeOpCode); - } - } - - } protected UUID clientId; protected long id = NOT_REPLICATED; @@ -69,7 +29,7 @@ public static ServerStoreOp getServerStoreOp(byte storeOpCode) { @Override public UUID getClientId() { if (clientId == null) { - throw new AssertionError("Client Id is not supported for message type " + this.operation() ); + throw new AssertionError("Client Id is not supported for message type " + this.getMessageType() ); } return this.clientId; } @@ -94,19 +54,6 @@ public String getCacheId() { return cacheId; } - @Override - public Type getType() { - return Type.SERVER_STORE_OP; - } - - @Deprecated - public abstract ServerStoreOp operation(); - - @Override - public byte getOpCode() { - return operation().getStoreOpCode(); - } - public static abstract class KeyBasedServerStoreOpMessage extends ServerStoreOpMessage implements ConcurrentEntityMessage { private final long key; @@ -132,11 +79,6 @@ public static class GetMessage extends KeyBasedServerStoreOpMessage { super(cacheId, key); } - @Override - public ServerStoreOp operation() { - return ServerStoreOp.GET; - } - @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.GET_STORE; @@ -153,11 +95,6 @@ public static class GetAndAppendMessage extends KeyBasedServerStoreOpMessage { this.clientId = clientId; } - @Override - public ServerStoreOp operation() { - return ServerStoreOp.GET_AND_APPEND; - } - @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.GET_AND_APPEND; @@ -179,11 +116,6 @@ public static class AppendMessage extends KeyBasedServerStoreOpMessage { this.clientId = clientId; } - @Override - public ServerStoreOp operation() { - return ServerStoreOp.APPEND; - } - @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.APPEND; @@ -207,11 +139,6 @@ public static class ReplaceAtHeadMessage extends KeyBasedServerStoreOpMessage { this.clientId = clientId; } - @Override - public ServerStoreOp operation() { - return ServerStoreOp.REPLACE; - } - @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.REPLACE; @@ -239,11 +166,6 @@ public int getInvalidationId() { return invalidationId; } - @Override - public ServerStoreOp operation() { - return ServerStoreOp.CLIENT_INVALIDATION_ACK; - } - @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.CLIENT_INVALIDATION_ACK; @@ -257,11 +179,6 @@ public static class ClearMessage extends ServerStoreOpMessage { this.clientId = clientId; } - @Override - public ServerStoreOp operation() { - return ServerStoreOp.CLEAR; - } - @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.CLEAR; diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java index 10800439bd..53412e3cb8 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpMessage.java @@ -21,12 +21,6 @@ public abstract class StateRepositoryOpMessage extends EhcacheOperationMessage implements Serializable { - public enum StateRepositoryOp { - GET, - PUT_IF_ABSENT, - ENTRY_SET, - } - private final String cacheId; private final String mapId; @@ -65,18 +59,6 @@ public String getMapId() { return mapId; } - @Override - public Type getType() { - return Type.STATE_REPO_OP; - } - - public abstract StateRepositoryOp operation(); - - @Override - public byte getOpCode() { - return getType().getCode(); - } - private static abstract class KeyBasedMessage extends StateRepositoryOpMessage { private final Object key; @@ -98,11 +80,6 @@ public GetMessage(final String cacheId, final String mapId, final Object key, fi super(cacheId, mapId, key, clientId); } - @Override - public StateRepositoryOp operation() { - return StateRepositoryOp.GET; - } - @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.GET_STATE_REPO; @@ -122,11 +99,6 @@ public Object getValue() { return value; } - @Override - public StateRepositoryOp operation() { - return StateRepositoryOp.PUT_IF_ABSENT; - } - @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.PUT_IF_ABSENT; @@ -139,11 +111,6 @@ public EntrySetMessage(final String cacheId, final String mapId, final UUID clie super(cacheId, mapId, clientId); } - @Override - public StateRepositoryOp operation() { - return StateRepositoryOp.ENTRY_SET; - } - @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.ENTRY_SET; diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessageTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessageTest.java deleted file mode 100644 index 4ca522e6f0..0000000000 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheEntityMessageTest.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright Terracotta, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.ehcache.clustered.common.internal.messages; - -import org.junit.Test; - -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; - -public class EhcacheEntityMessageTest { - - @Test - public void testEhcacheEntityMessageTypes() { - assertThat(EhcacheEntityMessage.Type.LIFECYCLE_OP.getCode(), is((byte) 10)); - assertThat(EhcacheEntityMessage.Type.SERVER_STORE_OP.getCode(), is((byte) 20)); - assertThat(EhcacheEntityMessage.Type.STATE_REPO_OP.getCode(), is((byte) 30)); - assertThat(EhcacheEntityMessage.Type.SYNC_OP.getCode(), is((byte) 40)); - assertThat(EhcacheEntityMessage.Type.REPLICATION_OP.getCode(), is((byte) 50)); - } - -} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 11781ae232..67788c29e2 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -46,6 +46,8 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponseFactory; +import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; +import org.ehcache.clustered.common.internal.messages.EhcacheOperationMessage; import org.ehcache.clustered.common.internal.messages.LifecycleMessage; import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage; import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; @@ -85,6 +87,10 @@ import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.hashInvalidationDone; import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.serverInvalidateHash; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isLifecycleMessage; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isPassiveSynchroMessage; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStateRepoOperationMessage; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStoreOperationMessage; import static org.ehcache.clustered.common.internal.messages.LifecycleMessage.ConfigureStoreManager; import static org.ehcache.clustered.common.internal.messages.LifecycleMessage.CreateServerStore; import static org.ehcache.clustered.common.internal.messages.LifecycleMessage.DestroyServerStore; @@ -94,8 +100,6 @@ import static org.ehcache.clustered.server.ConcurrencyStrategies.DefaultConcurrencyStrategy.DATA_CONCURRENCY_KEY_OFFSET; import static org.ehcache.clustered.server.ConcurrencyStrategies.DefaultConcurrencyStrategy.DEFAULT_KEY; -// TODO: Provide some mechanism to report on storage utilization -- PageSource provides little visibility -// TODO: Ensure proper operations for concurrent requests class EhcacheActiveEntity implements ActiveServerEntity { private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheActiveEntity.class); @@ -271,18 +275,20 @@ public EhcacheEntityResponse invoke(ClientDescriptor clientDescriptor, EhcacheEn " Check your server configuration and define at least one offheap resource."); } - switch (message.getType()) { - case LIFECYCLE_OP: - return invokeLifeCycleOperation(clientDescriptor, (LifecycleMessage) message); - case SERVER_STORE_OP: + if (message instanceof EhcacheOperationMessage) { + EhcacheOperationMessage operationMessage = (EhcacheOperationMessage) message; + EhcacheMessageType messageType = operationMessage.getMessageType(); + if (isStoreOperationMessage(messageType)) { return invokeServerStoreOperation(clientDescriptor, (ServerStoreOpMessage) message); - case STATE_REPO_OP: + } else if (isLifecycleMessage(messageType)) { + return invokeLifeCycleOperation(clientDescriptor, (LifecycleMessage) message); + } else if (isStateRepoOperationMessage(messageType)) { return invokeStateRepositoryOperation(clientDescriptor, (StateRepositoryOpMessage) message); - case REPLICATION_OP: + } else if (isPassiveSynchroMessage(messageType)) { return responseFactory.success(); - default: - throw new IllegalMessageException("Unknown message : " + message); + } } + throw new IllegalMessageException("Unknown message : " + message); } catch (ClusterException e) { return responseFactory.failure(e); } catch (Exception e) { @@ -425,7 +431,7 @@ private void validateClusteredTierManagerConfigured(ClientDescriptor clientDescr } private EhcacheEntityResponse invokeLifeCycleOperation(ClientDescriptor clientDescriptor, LifecycleMessage message) throws ClusterException { - switch (message.operation()) { + switch (message.getMessageType()) { case CONFIGURE: configure(clientDescriptor, (ConfigureStoreManager) message); break; @@ -480,8 +486,8 @@ private EhcacheEntityResponse invokeServerStoreOperation(ClientDescriptor client }); } - switch (message.operation()) { - case GET: { + switch (message.getMessageType()) { + case GET_STORE: { ServerStoreOpMessage.GetMessage getMessage = (ServerStoreOpMessage.GetMessage) message; return responseFactory.response(cacheStore.get(getMessage.getKey())); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java index 164ca24ce1..e609cb00c5 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java @@ -31,10 +31,10 @@ class EhcacheExecutionStrategy implements ExecutionStrategy { @@ -77,25 +79,23 @@ public void invoke(EhcacheEntityMessage message) { " Check your server configuration and define at least one offheap resource."); } - switch (message.getType()) { - case LIFECYCLE_OP: - invokeLifeCycleOperation((LifecycleMessage) message); - break; - case SERVER_STORE_OP: + if (message instanceof EhcacheOperationMessage) { + EhcacheOperationMessage operationMessage = (EhcacheOperationMessage) message; + EhcacheMessageType messageType = operationMessage.getMessageType(); + if (isStoreOperationMessage(messageType)) { invokeServerStoreOperation((ServerStoreOpMessage)message); - break; - case STATE_REPO_OP: + } else if (isLifecycleMessage(messageType)) { + invokeLifeCycleOperation((LifecycleMessage) message); + } else if (isStateRepoOperationMessage(messageType)) { ehcacheStateService.getStateRepositoryManager().invoke((StateRepositoryOpMessage)message); - break; - case SYNC_OP: - invokeSyncOperation((EhcacheSyncMessage) message); - break; - case REPLICATION_OP: + } else if (isPassiveSynchroMessage(messageType)) { invokeRetirementMessages((PassiveReplicationMessage)message); - break; - default: - throw new IllegalMessageException("Unknown message : " + message); + } + } else if (message instanceof EhcacheSyncMessage) { + invokeSyncOperation((EhcacheSyncMessage) message); } + + throw new IllegalMessageException("Unknown message : " + message); } catch (Exception e) { LOGGER.error("Unexpected exception raised during operation: " + message, e); } @@ -187,7 +187,7 @@ private void invokeServerStoreOperation(ServerStoreOpMessage message) throws Clu throw new LifecycleException("Clustered tier does not exist : '" + message.getCacheId() + "'"); } - switch (message.operation()) { + switch (message.getMessageType()) { case APPEND: case GET_AND_APPEND: { LOGGER.debug("ServerStore append/getAndAppend message for msgId {} & client Id {} is tracked now.", message.getId(), message.getClientId()); @@ -213,7 +213,7 @@ private void invokeServerStoreOperation(ServerStoreOpMessage message) throws Clu } private void invokeSyncOperation(EhcacheSyncMessage message) throws ClusterException { - switch (message.operation()) { + switch (message.getMessageType()) { case STATE: EhcacheStateSyncMessage stateSyncMessage = (EhcacheStateSyncMessage) message; @@ -227,19 +227,19 @@ private void invokeSyncOperation(EhcacheSyncMessage message) throws ClusterExcep } management.serverStoreCreated(entry.getKey()); } - stateSyncMessage.getTrackedClients().stream().forEach(id -> ehcacheStateService.getClientMessageTracker().add(id)); + stateSyncMessage.getTrackedClients().forEach(id -> ehcacheStateService.getClientMessageTracker().add(id)); break; case DATA: EhcacheDataSyncMessage dataSyncMessage = (EhcacheDataSyncMessage) message; ehcacheStateService.getStore(dataSyncMessage.getCacheId()).put(dataSyncMessage.getKey(), dataSyncMessage.getChain()); break; default: - throw new IllegalMessageException("Unknown Sync operation " + message.operation()); + throw new IllegalMessageException("Unknown Sync operation " + message.getMessageType()); } } private void invokeLifeCycleOperation(LifecycleMessage message) throws ClusterException { - switch (message.operation()) { + switch (message.getMessageType()) { case CONFIGURE: configure((ConfigureStoreManager) message); break; diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java index 008266f71d..603c470e1b 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java @@ -33,11 +33,6 @@ public EhcacheDataSyncMessage(final String cacheId, final long key, final Chain this.chain = chain; } - @Override - public SyncOp operation() { - return SyncOp.DATA; - } - @Override public SyncMessageType getMessageType() { return SyncMessageType.DATA; diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheStateSyncMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheStateSyncMessage.java index 79ddde1bdd..605d7b2f9d 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheStateSyncMessage.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheStateSyncMessage.java @@ -53,11 +53,6 @@ public Set getTrackedClients() { return trackedClients; } - @Override - public SyncOp operation() { - return SyncOp.STATE; - } - @Override public SyncMessageType getMessageType() { return SyncMessageType.STATE; diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessage.java index bd536dbd10..13d04bba43 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessage.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessage.java @@ -25,51 +25,6 @@ @CommonComponent public abstract class EhcacheSyncMessage extends EhcacheEntityMessage { - @CommonComponent - public enum SyncOp { - - STATE((byte) 31), - DATA((byte) 32), - ; - - private final byte syncOpCode; - - SyncOp(byte syncOpCode) { - this.syncOpCode = syncOpCode; - } - - public byte getOpCode() { - return this.syncOpCode; - } - - public static SyncOp getSyncOp(byte syncOpCode) { - switch (syncOpCode) { - case 31: - return STATE; - case 32: - return DATA; - default: - throw new IllegalArgumentException("Sync operation not defined for : " + syncOpCode); - } - } - - } - - @Override - @Deprecated - public Type getType() { - return Type.SYNC_OP; - } - - @Deprecated - public abstract SyncOp operation(); - - @Override - @Deprecated - public byte getOpCode() { - return operation().getOpCode(); - } - @Override public void setId(final long id) { throw new UnsupportedOperationException(); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/repo/ServerStateRepository.java b/clustered/server/src/main/java/org/ehcache/clustered/server/repo/ServerStateRepository.java index ec6135df89..68a0184fa4 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/repo/ServerStateRepository.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/repo/ServerStateRepository.java @@ -42,8 +42,8 @@ EhcacheEntityResponse invoke(StateRepositoryOpMessage message) throws ClusterExc } Object result; - switch (message.operation()) { - case GET: + switch (message.getMessageType()) { + case GET_STATE_REPO: StateRepositoryOpMessage.GetMessage getMessage = (StateRepositoryOpMessage.GetMessage) message; result = map.get(getMessage.getKey()); break; @@ -58,7 +58,7 @@ EhcacheEntityResponse invoke(StateRepositoryOpMessage message) throws ClusterExc .collect(Collectors.toSet()); break; default: - throw new IllegalMessageException("Invalid operation: " + message.operation()); + throw new IllegalMessageException("Invalid operation: " + message.getMessageType()); } return EhcacheEntityResponse.mapValue(result); } From 77b2db756c24702a8f1e31ed236e40a9f226e8e0 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Mon, 21 Nov 2016 16:45:42 +0100 Subject: [PATCH 156/218] :construction: #1482 Move passive replication messages to server --- .../internal/messages/EhcacheCodec.java | 23 ++-- .../internal/messages/EhcacheMessageType.java | 9 +- .../internal/messages/MessageCodecUtils.java | 4 +- .../internal/messages/EhcacheCodecTest.java | 31 +---- .../clustered/server/EhcacheActiveEntity.java | 14 +- .../server/EhcacheExecutionStrategy.java | 2 +- .../server/EhcachePassiveEntity.java | 12 +- .../server/EhcacheServerEntityService.java | 4 +- .../internal/messages/EhcacheServerCodec.java | 93 ++++++++++++++ .../messages/PassiveReplicationMessage.java | 6 +- .../PassiveReplicationMessageCodec.java | 10 +- .../server/EhcacheActiveEntityTest.java | 4 +- .../server/EhcachePassiveEntityTest.java | 4 +- .../messages/EhcacheServerCodecTest.java | 120 ++++++++++++++++++ .../PassiveReplicationMessageCodecTest.java | 13 +- 15 files changed, 271 insertions(+), 78 deletions(-) create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodec.java rename clustered/{common/src/main/java/org/ehcache/clustered/common => server/src/main/java/org/ehcache/clustered/server}/internal/messages/PassiveReplicationMessage.java (93%) rename clustered/{common/src/main/java/org/ehcache/clustered/common => server/src/main/java/org/ehcache/clustered/server}/internal/messages/PassiveReplicationMessageCodec.java (96%) create mode 100644 clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodecTest.java rename clustered/{common/src/test/java/org/ehcache/clustered/common => server/src/test/java/org/ehcache/clustered/server}/internal/messages/PassiveReplicationMessageCodecTest.java (96%) diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java index b5c2ff065b..fb2982d595 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java @@ -30,7 +30,6 @@ import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_INDEX; import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME; import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isLifecycleMessage; -import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isPassiveSynchroMessage; import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStateRepoOperationMessage; import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStoreOperationMessage; import static org.terracotta.runnel.StructBuilder.newStructBuilder; @@ -39,28 +38,26 @@ public class EhcacheCodec implements MessageCodec SERVER_INSTANCE = - new EhcacheCodec(new ServerStoreOpCodec(), new LifeCycleMessageCodec(), new StateRepositoryOpCodec(), new ResponseCodec(), new PassiveReplicationMessageCodec()); + private static final EhcacheCodec SERVER_INSTANCE = + new EhcacheCodec(new ServerStoreOpCodec(), new LifeCycleMessageCodec(), new StateRepositoryOpCodec(), new ResponseCodec()); private final ServerStoreOpCodec serverStoreOpCodec; private final LifeCycleMessageCodec lifeCycleMessageCodec; private final StateRepositoryOpCodec stateRepositoryOpCodec; private final ResponseCodec responseCodec; - private final PassiveReplicationMessageCodec passiveReplicationMessageCodec; - public static MessageCodec messageCodec() { + public static EhcacheCodec messageCodec() { return SERVER_INSTANCE; } EhcacheCodec(ServerStoreOpCodec serverStoreOpCodec, LifeCycleMessageCodec lifeCycleMessageCodec, - StateRepositoryOpCodec stateRepositoryOpCodec, ResponseCodec responseCodec, PassiveReplicationMessageCodec passiveReplicationMessageCodec) { + StateRepositoryOpCodec stateRepositoryOpCodec, ResponseCodec responseCodec) { this.serverStoreOpCodec = serverStoreOpCodec; this.lifeCycleMessageCodec = lifeCycleMessageCodec; this.stateRepositoryOpCodec = stateRepositoryOpCodec; this.responseCodec = responseCodec; - this.passiveReplicationMessageCodec = passiveReplicationMessageCodec; } @Override @@ -75,8 +72,6 @@ public byte[] encodeMessage(EhcacheEntityMessage message) { return serverStoreOpCodec.encode((ServerStoreOpMessage) operationMessage); } else if (isStateRepoOperationMessage(operationMessage.getMessageType())) { return stateRepositoryOpCodec.encode((StateRepositoryOpMessage) operationMessage); - } else if (isPassiveSynchroMessage(operationMessage.getMessageType())) { - return passiveReplicationMessageCodec.encode((PassiveReplicationMessage) operationMessage); } throw new AssertionError("Unknown message type: " + operationMessage.getMessageType()); } @@ -97,16 +92,18 @@ public EhcacheEntityMessage decodeMessage(byte[] payload) throws MessageCodecExc byteBuffer.rewind(); EhcacheMessageType opCode = opCodeEnm.get(); + return decodeMessage(byteBuffer, opCode); + } + + public EhcacheEntityMessage decodeMessage(ByteBuffer byteBuffer, EhcacheMessageType opCode) { if (isLifecycleMessage(opCode)) { return lifeCycleMessageCodec.decode(opCode, byteBuffer); } else if (isStoreOperationMessage(opCode)) { return serverStoreOpCodec.decode(opCode, byteBuffer); } else if (isStateRepoOperationMessage(opCode)) { return stateRepositoryOpCodec.decode(opCode, byteBuffer); - } else if (isPassiveSynchroMessage(opCode)) { - return passiveReplicationMessageCodec.decode(opCode, byteBuffer); } else { - throw new UnsupportedOperationException("Undefined message code: " + opCodeEnm); + throw new UnsupportedOperationException("Unsupported message code: " + opCode); } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheMessageType.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheMessageType.java index 1ebcdc69f3..47e85aa7dd 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheMessageType.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheMessageType.java @@ -50,8 +50,7 @@ public enum EhcacheMessageType { PUT_IF_ABSENT, ENTRY_SET, - // TODO server to server only, should not exist in common - // Passive synchronization messages + // Passive replication messages CHAIN_REPLICATION_OP, CLIENT_ID_TRACK_OP, CLEAR_INVALIDATION_COMPLETE, @@ -103,8 +102,8 @@ public static boolean isStateRepoOperationMessage(EhcacheMessageType value) { return STATE_REPO_OPERATION_MESSAGES.contains(value); } - public static final EnumSet PASSIVE_SYNC_MESSAGES = of(CHAIN_REPLICATION_OP, CLIENT_ID_TRACK_OP, CLEAR_INVALIDATION_COMPLETE, INVALIDATION_COMPLETE, CREATE_SERVER_STORE_REPLICATION, DESTROY_SERVER_STORE_REPLICATION); - public static boolean isPassiveSynchroMessage(EhcacheMessageType value) { - return PASSIVE_SYNC_MESSAGES.contains(value); + public static final EnumSet PASSIVE_REPLICATION_MESSAGES = of(CHAIN_REPLICATION_OP, CLIENT_ID_TRACK_OP, CLEAR_INVALIDATION_COMPLETE, INVALIDATION_COMPLETE, CREATE_SERVER_STORE_REPLICATION, DESTROY_SERVER_STORE_REPLICATION); + public static boolean isPassiveReplicationMessage(EhcacheMessageType value) { + return PASSIVE_REPLICATION_MESSAGES.contains(value); } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java index 26048e087e..e3c7132226 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java @@ -56,14 +56,14 @@ public class MessageCodecUtils { .mapping(Consistency.STRONG, 2) .build(); - void encodeMandatoryFields(StructEncoder encoder, EhcacheOperationMessage message) { + public void encodeMandatoryFields(StructEncoder encoder, EhcacheOperationMessage message) { encoder.enm(EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) .int64(MSG_ID_FIELD, message.getId()) .int64(MSB_UUID_FIELD, message.getClientId().getMostSignificantBits()) .int64(LSB_UUID_FIELD, message.getClientId().getLeastSignificantBits()); } - UUID decodeUUID(StructDecoder decoder) { + public UUID decodeUUID(StructDecoder decoder) { return new UUID(decoder.int64(MSB_UUID_FIELD), decoder.int64(LSB_UUID_FIELD)); } diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java index cdf082d00e..ee62af2270 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/EhcacheCodecTest.java @@ -16,7 +16,6 @@ package org.ehcache.clustered.common.internal.messages; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; @@ -45,16 +44,13 @@ public class EhcacheCodecTest { @Mock private StateRepositoryOpCodec stateRepositoryOpCodec; - @Mock - private PassiveReplicationMessageCodec passiveReplicationMessageCodec; - private EhcacheCodec codec; @Before public void setUp() { initMocks(this); - codec = new EhcacheCodec(serverStoreOpCodec, lifeCycleMessageCodec, stateRepositoryOpCodec, null, passiveReplicationMessageCodec); + codec = new EhcacheCodec(serverStoreOpCodec, lifeCycleMessageCodec, stateRepositoryOpCodec, null); } @Test @@ -64,28 +60,18 @@ public void encodeMessage() throws Exception { verify(lifeCycleMessageCodec, only()).encode(any(LifecycleMessage.class)); verify(serverStoreOpCodec, never()).encode(any(ServerStoreOpMessage.class)); verify(stateRepositoryOpCodec, never()).encode(any(StateRepositoryOpMessage.class)); - verify(passiveReplicationMessageCodec, never()).encode(any(PassiveReplicationMessage.class)); ServerStoreOpMessage.ClearMessage serverStoreOpMessage = new ServerStoreOpMessage.ClearMessage("foo", CLIENT_ID); codec.encodeMessage(serverStoreOpMessage); verify(lifeCycleMessageCodec, only()).encode(any(LifecycleMessage.class)); verify(serverStoreOpCodec, only()).encode(any(ServerStoreOpMessage.class)); verify(stateRepositoryOpCodec, never()).encode(any(StateRepositoryOpMessage.class)); - verify(passiveReplicationMessageCodec, never()).encode(any(PassiveReplicationMessage.class)); StateRepositoryOpMessage.EntrySetMessage stateRepositoryOpMessage = new StateRepositoryOpMessage.EntrySetMessage("foo", "bar", CLIENT_ID); codec.encodeMessage(stateRepositoryOpMessage); verify(lifeCycleMessageCodec, only()).encode(any(LifecycleMessage.class)); verify(serverStoreOpCodec, only()).encode(any(ServerStoreOpMessage.class)); verify(stateRepositoryOpCodec, only()).encode(any(StateRepositoryOpMessage.class)); - verify(passiveReplicationMessageCodec, never()).encode(any(PassiveReplicationMessage.class)); - - ClientIDTrackerMessage clientIDTrackerMessage = new ClientIDTrackerMessage(20L, CLIENT_ID); - codec.encodeMessage(clientIDTrackerMessage); - verify(lifeCycleMessageCodec, only()).encode(any(LifecycleMessage.class)); - verify(serverStoreOpCodec, only()).encode(any(ServerStoreOpMessage.class)); - verify(stateRepositoryOpCodec, only()).encode(any(StateRepositoryOpMessage.class)); - verify(passiveReplicationMessageCodec, only()).encode(any(PassiveReplicationMessage.class)); } @@ -96,7 +82,7 @@ public void decodeLifeCycleMessages() throws Exception { codec.decodeMessage(encodedBuffer.array()); } verify(lifeCycleMessageCodec, times(EhcacheMessageType.LIFECYCLE_MESSAGES.size())).decode(any(EhcacheMessageType.class), any(ByteBuffer.class)); - verifyZeroInteractions(serverStoreOpCodec, stateRepositoryOpCodec, passiveReplicationMessageCodec); + verifyZeroInteractions(serverStoreOpCodec, stateRepositoryOpCodec); } @Test @@ -106,7 +92,7 @@ public void decodeServerStoreMessages() throws Exception { codec.decodeMessage(encodedBuffer.array()); } verify(serverStoreOpCodec, times(EhcacheMessageType.STORE_OPERATION_MESSAGES.size())).decode(any(EhcacheMessageType.class), any(ByteBuffer.class)); - verifyZeroInteractions(lifeCycleMessageCodec, stateRepositoryOpCodec, passiveReplicationMessageCodec); + verifyZeroInteractions(lifeCycleMessageCodec, stateRepositoryOpCodec); } @Test @@ -116,16 +102,7 @@ public void decodeStateRepoMessages() throws Exception { codec.decodeMessage(encodedBuffer.array()); } verify(stateRepositoryOpCodec, times(EhcacheMessageType.STATE_REPO_OPERATION_MESSAGES.size())).decode(any(EhcacheMessageType.class), any(ByteBuffer.class)); - verifyZeroInteractions(lifeCycleMessageCodec, serverStoreOpCodec, passiveReplicationMessageCodec); + verifyZeroInteractions(lifeCycleMessageCodec, serverStoreOpCodec); } - @Test - public void decodeClientIDTrackerMessages() throws Exception { - for (EhcacheMessageType messageType : EhcacheMessageType.PASSIVE_SYNC_MESSAGES) { - ByteBuffer encodedBuffer = EhcacheCodec.OP_CODE_DECODER.encoder().enm("opCode", messageType).encode(); - codec.decodeMessage(encodedBuffer.array()); - } - verify(passiveReplicationMessageCodec, times(EhcacheMessageType.PASSIVE_SYNC_MESSAGES.size())).decode(any(EhcacheMessageType.class), any(ByteBuffer.class)); - verifyZeroInteractions(lifeCycleMessageCodec, serverStoreOpCodec, stateRepositoryOpCodec); - } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 67788c29e2..518f575c19 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -49,11 +49,11 @@ import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; import org.ehcache.clustered.common.internal.messages.EhcacheOperationMessage; import org.ehcache.clustered.common.internal.messages.LifecycleMessage; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.InvalidationCompleteMessage; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ChainReplicationMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.InvalidationCompleteMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ChainReplicationMessage; import org.ehcache.clustered.common.internal.messages.ReconnectMessage; import org.ehcache.clustered.common.internal.messages.ReconnectMessageCodec; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; @@ -88,7 +88,7 @@ import static org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse.serverInvalidateHash; import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isLifecycleMessage; -import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isPassiveSynchroMessage; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isPassiveReplicationMessage; import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStateRepoOperationMessage; import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStoreOperationMessage; import static org.ehcache.clustered.common.internal.messages.LifecycleMessage.ConfigureStoreManager; @@ -284,7 +284,7 @@ public EhcacheEntityResponse invoke(ClientDescriptor clientDescriptor, EhcacheEn return invokeLifeCycleOperation(clientDescriptor, (LifecycleMessage) message); } else if (isStateRepoOperationMessage(messageType)) { return invokeStateRepositoryOperation(clientDescriptor, (StateRepositoryOpMessage) message); - } else if (isPassiveSynchroMessage(messageType)) { + } else if (isPassiveReplicationMessage(messageType)) { return responseFactory.success(); } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java index e609cb00c5..a4a81c0ffb 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheExecutionStrategy.java @@ -18,7 +18,7 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.LifecycleMessage; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; import org.ehcache.clustered.server.internal.messages.EhcacheSyncMessage; diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index b0183e824d..60e4eee57b 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -30,10 +30,10 @@ import org.ehcache.clustered.common.internal.messages.EhcacheOperationMessage; import org.ehcache.clustered.common.internal.messages.LifecycleMessage; import org.ehcache.clustered.common.internal.messages.LifecycleMessage.ConfigureStoreManager; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ChainReplicationMessage; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.InvalidationCompleteMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ChainReplicationMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.InvalidationCompleteMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreOpMessage; import org.ehcache.clustered.common.internal.messages.StateRepositoryOpMessage; import org.ehcache.clustered.server.internal.messages.EhcacheDataSyncMessage; @@ -57,7 +57,7 @@ import java.util.UUID; import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isLifecycleMessage; -import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isPassiveSynchroMessage; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isPassiveReplicationMessage; import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStateRepoOperationMessage; import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isStoreOperationMessage; @@ -88,7 +88,7 @@ public void invoke(EhcacheEntityMessage message) { invokeLifeCycleOperation((LifecycleMessage) message); } else if (isStateRepoOperationMessage(messageType)) { ehcacheStateService.getStateRepositoryManager().invoke((StateRepositoryOpMessage)message); - } else if (isPassiveSynchroMessage(messageType)) { + } else if (isPassiveReplicationMessage(messageType)) { invokeRetirementMessages((PassiveReplicationMessage)message); } } else if (message instanceof EhcacheSyncMessage) { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java index 0aaa732db3..cc1fc92fe4 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java @@ -15,9 +15,9 @@ */ package org.ehcache.clustered.server; -import org.ehcache.clustered.common.internal.messages.EhcacheCodec; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.ehcache.clustered.server.internal.messages.EhcacheServerCodec; import org.ehcache.clustered.server.internal.messages.EhcacheSyncMessageCodec; import org.terracotta.entity.CommonServerEntity; import org.terracotta.entity.ConcurrencyStrategy; @@ -63,7 +63,7 @@ public ConcurrencyStrategy getConcurrencyStrategy(byte[] c @Override public MessageCodec getMessageCodec() { - return EhcacheCodec.messageCodec(); + return EhcacheServerCodec.getInstance(); } @Override diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodec.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodec.java new file mode 100644 index 0000000000..3840d3a9a9 --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodec.java @@ -0,0 +1,93 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.internal.messages; + +import org.ehcache.clustered.common.internal.messages.EhcacheCodec; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.terracotta.entity.MessageCodec; +import org.terracotta.entity.MessageCodecException; +import org.terracotta.runnel.decoding.Enm; + +import java.nio.ByteBuffer; + +import static java.nio.ByteBuffer.wrap; +import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isPassiveReplicationMessage; + +/** + * EhcacheServerCodec + */ +public class EhcacheServerCodec implements MessageCodec { + + private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheServerCodec.class); + + private static final EhcacheServerCodec SERVER_INSTANCE = new EhcacheServerCodec((EhcacheCodec) EhcacheCodec.messageCodec(), new PassiveReplicationMessageCodec()); + + public static EhcacheServerCodec getInstance() { + return SERVER_INSTANCE; + } + + private final EhcacheCodec clientCodec; + private final PassiveReplicationMessageCodec replicationCodec; + + public EhcacheServerCodec(EhcacheCodec clientCodec, PassiveReplicationMessageCodec replicationCodec) { + this.clientCodec = clientCodec; + this.replicationCodec = replicationCodec; + } + + @Override + public byte[] encodeMessage(EhcacheEntityMessage message) throws MessageCodecException { + if (message instanceof PassiveReplicationMessage) { + return replicationCodec.encode((PassiveReplicationMessage) message); + } + return clientCodec.encodeMessage(message); + } + + @Override + public EhcacheEntityMessage decodeMessage(byte[] payload) throws MessageCodecException { + ByteBuffer byteBuffer = wrap(payload); + Enm opCodeEnm = EhcacheCodec.OP_CODE_DECODER.decoder(byteBuffer).enm("opCode"); + if (!opCodeEnm.isFound()) { + throw new AssertionError("Got a message without an opCode"); + } + if (!opCodeEnm.isValid()) { + LOGGER.warn("Received message with unknown operation code - more recent version at the other end?"); + return null; + } + + byteBuffer.rewind(); + + EhcacheMessageType messageType = opCodeEnm.get(); + if (isPassiveReplicationMessage(messageType)) { + return replicationCodec.decode(messageType, byteBuffer); + } + return clientCodec.decodeMessage(byteBuffer, messageType); + } + + @Override + public byte[] encodeResponse(EhcacheEntityResponse response) throws MessageCodecException { + return clientCodec.encodeResponse(response); + } + + @Override + public EhcacheEntityResponse decodeResponse(byte[] payload) throws MessageCodecException { + return clientCodec.decodeResponse(payload); + } +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessage.java similarity index 93% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java rename to clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessage.java index baeeb4b12d..687f091743 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessage.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessage.java @@ -14,9 +14,13 @@ * limitations under the License. */ -package org.ehcache.clustered.common.internal.messages; +package org.ehcache.clustered.server.internal.messages; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.messages.ConcurrentEntityMessage; +import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; +import org.ehcache.clustered.common.internal.messages.EhcacheOperationMessage; +import org.ehcache.clustered.common.internal.messages.LifecycleMessage; import org.ehcache.clustered.common.internal.store.Chain; import java.util.UUID; diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodec.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java similarity index 96% rename from clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodec.java rename to clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java index 65f724e22e..b158b20116 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodec.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java @@ -14,9 +14,13 @@ * limitations under the License. */ -package org.ehcache.clustered.common.internal.messages; +package org.ehcache.clustered.server.internal.messages; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.messages.ChainCodec; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; +import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; +import org.ehcache.clustered.common.internal.messages.MessageCodecUtils; import org.ehcache.clustered.common.internal.store.Chain; import org.terracotta.runnel.Struct; import org.terracotta.runnel.decoding.StructDecoder; @@ -43,9 +47,7 @@ import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.STORE_CONFIG_VALUE_TYPE_FIELD; import static org.terracotta.runnel.StructBuilder.newStructBuilder; - -// TODO move all this to server side - no use in common -class PassiveReplicationMessageCodec { +public class PassiveReplicationMessageCodec { private static final String CHAIN_FIELD = "chain"; diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index 1455d805e1..e5bd66f858 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -41,8 +41,8 @@ import org.ehcache.clustered.common.internal.messages.LifecycleMessage.CreateServerStore; import org.ehcache.clustered.common.internal.messages.LifecycleMessage.DestroyServerStore; import org.ehcache.clustered.common.internal.messages.LifecycleMessage.ValidateStoreManager; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; import org.ehcache.clustered.server.internal.messages.EhcacheStateSyncMessage; import org.ehcache.clustered.server.state.ClientMessageTracker; diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java index 742bdc6478..d5938338ab 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java @@ -23,8 +23,8 @@ import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; import org.ehcache.clustered.common.internal.messages.LifecycleMessage; import org.ehcache.clustered.server.state.EhcacheStateService; import org.hamcrest.Matchers; diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodecTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodecTest.java new file mode 100644 index 0000000000..aa35b83576 --- /dev/null +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodecTest.java @@ -0,0 +1,120 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.internal.messages; + +import org.ehcache.clustered.common.internal.messages.EhcacheCodec; +import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; +import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; +import org.ehcache.clustered.common.internal.messages.LifecycleMessage; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; + +import java.nio.ByteBuffer; +import java.util.UUID; + +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.MockitoAnnotations.initMocks; + +/** + * EhcacheServerCodecTest + */ +public class EhcacheServerCodecTest { + + private static final UUID CLIENT_ID = UUID.randomUUID(); + + @Mock + private EhcacheCodec clientCodec; + + @Mock + private PassiveReplicationMessageCodec replicationCodec; + + private EhcacheServerCodec serverCodec; + + @Before + public void setUp() { + initMocks(this); + serverCodec = new EhcacheServerCodec(clientCodec, replicationCodec); + } + + @Test + public void testDelegatesToEhcacheCodeForEncoding() throws Exception { + LifecycleMessage lifecycleMessage = new LifecycleMessage() { + + @Override + public EhcacheMessageType getMessageType() { + return EhcacheMessageType.APPEND; + } + }; + serverCodec.encodeMessage(lifecycleMessage); + + verify(clientCodec).encodeMessage(any(EhcacheEntityMessage.class)); + verifyZeroInteractions(replicationCodec); + } + + @Test + public void testDelegatesToPassiveReplicationCodeForEncoding() throws Exception { + PassiveReplicationMessage.ClientIDTrackerMessage message = new PassiveReplicationMessage.ClientIDTrackerMessage(42L, CLIENT_ID); + serverCodec.encodeMessage(message); + + verify(replicationCodec).encode(message); + verifyZeroInteractions(clientCodec); + } + + @Test + public void decodeLifeCycleMessages() throws Exception { + for (EhcacheMessageType messageType : EhcacheMessageType.LIFECYCLE_MESSAGES) { + ByteBuffer encodedBuffer = EhcacheCodec.OP_CODE_DECODER.encoder().enm("opCode", messageType).encode(); + serverCodec.decodeMessage(encodedBuffer.array()); + } + verify(clientCodec, times(EhcacheMessageType.LIFECYCLE_MESSAGES.size())).decodeMessage(any(ByteBuffer.class), any(EhcacheMessageType.class)); + verifyZeroInteractions(replicationCodec); + } + + @Test + public void decodeServerStoreMessages() throws Exception { + for (EhcacheMessageType messageType : EhcacheMessageType.STORE_OPERATION_MESSAGES) { + ByteBuffer encodedBuffer = EhcacheCodec.OP_CODE_DECODER.encoder().enm("opCode", messageType).encode(); + serverCodec.decodeMessage(encodedBuffer.array()); + } + verify(clientCodec, times(EhcacheMessageType.STORE_OPERATION_MESSAGES.size())).decodeMessage(any(ByteBuffer.class), any(EhcacheMessageType.class)); + verifyZeroInteractions(replicationCodec); + } + + @Test + public void decodeStateRepoMessages() throws Exception { + for (EhcacheMessageType messageType : EhcacheMessageType.STATE_REPO_OPERATION_MESSAGES) { + ByteBuffer encodedBuffer = EhcacheCodec.OP_CODE_DECODER.encoder().enm("opCode", messageType).encode(); + serverCodec.decodeMessage(encodedBuffer.array()); + } + verify(clientCodec, times(EhcacheMessageType.STATE_REPO_OPERATION_MESSAGES.size())).decodeMessage(any(ByteBuffer.class), any(EhcacheMessageType.class)); + verifyZeroInteractions(replicationCodec); + } + + @Test + public void decodeClientIDTrackerMessages() throws Exception { + for (EhcacheMessageType messageType : EhcacheMessageType.PASSIVE_REPLICATION_MESSAGES) { + ByteBuffer encodedBuffer = EhcacheCodec.OP_CODE_DECODER.encoder().enm("opCode", messageType).encode(); + serverCodec.decodeMessage(encodedBuffer.array()); + } + verify(replicationCodec, times(EhcacheMessageType.PASSIVE_REPLICATION_MESSAGES.size())).decode(any(EhcacheMessageType.class), any(ByteBuffer.class)); + verifyZeroInteractions(clientCodec); + } +} diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodecTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodecTest.java similarity index 96% rename from clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodecTest.java rename to clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodecTest.java index 6348cd3c37..66f87507a1 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/PassiveReplicationMessageCodecTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodecTest.java @@ -14,24 +14,25 @@ * limitations under the License. */ -package org.ehcache.clustered.common.internal.messages; +package org.ehcache.clustered.server.internal.messages; import org.ehcache.clustered.common.Consistency; import org.ehcache.clustered.common.PoolAllocation; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ChainReplicationMessage; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; -import org.ehcache.clustered.common.internal.messages.PassiveReplicationMessage.InvalidationCompleteMessage; +import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ChainReplicationMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.InvalidationCompleteMessage; import org.junit.Test; import java.util.UUID; import static java.nio.ByteBuffer.wrap; +import static org.ehcache.clustered.common.internal.store.Util.chainsEqual; import static org.ehcache.clustered.common.internal.store.Util.createPayload; import static org.ehcache.clustered.common.internal.store.Util.getChain; -import static org.ehcache.clustered.common.internal.store.Util.chainsEqual; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; From 572792472f5f31f5cf363d25ff6527533c38f733 Mon Sep 17 00:00:00 2001 From: Anthony Dahanne Date: Wed, 23 Nov 2016 15:53:52 -0500 Subject: [PATCH 157/218] :arrow_up: Close #1647 : Upgrade dependencies versions --- build.gradle | 11 ++++++----- .../ehcache/clustered/BasicClusteredCacheOpsTest.java | 2 +- .../CacheManagerLifecycleEhcacheIntegrationTest.java | 2 +- .../EhcacheClientEntityFactoryIntegrationTest.java | 2 +- .../org/ehcache/clustered/JCacheClusteredTest.java | 2 +- .../org/ehcache/clustered/TerminatedServerTest.java | 2 +- .../management/AbstractClusteringManagementTest.java | 2 +- .../management/EhcacheConfigWithManagementTest.java | 2 +- ...ClusteredCacheOpsReplicationMultiThreadedTest.java | 2 +- .../BasicClusteredCacheOpsReplicationTest.java | 2 +- ...redCacheOpsReplicationWithMulitpleClientsTest.java | 2 +- .../BasicLifeCyclePassiveReplicationTest.java | 2 +- .../org/ehcache/clustered/sync/PassiveSyncTest.java | 2 +- .../server/VoltronReadWriteLockPassiveEntity.java | 3 --- .../clustered/server/EhcachePassiveEntity.java | 5 ----- .../server/state/EhcacheStateServiceProvider.java | 2 +- .../server/state/EhcacheStateServiceProviderTest.java | 4 +--- 17 files changed, 20 insertions(+), 29 deletions(-) diff --git a/build.gradle b/build.gradle index f5123009dd..1119f75e52 100644 --- a/build.gradle +++ b/build.gradle @@ -28,15 +28,15 @@ ext { sizeofVersion = '0.3.0' // Clustered - terracottaPlatformVersion = '5.0.11.beta5' + terracottaPlatformVersion = '5.0.12.beta2' managementVersion = terracottaPlatformVersion - terracottaApisVersion = '1.0.11.beta' - terracottaCoreVersion = '5.0.11-beta2' + terracottaApisVersion = '1.0.12.beta' + terracottaCoreVersion = '5.0.12-beta' offheapResourceVersion = terracottaPlatformVersion entityApiVersion = terracottaApisVersion - terracottaPassthroughTestingVersion = '1.0.11.beta' + terracottaPassthroughTestingVersion = '1.0.12.beta' entityTestLibVersion = terracottaPassthroughTestingVersion - galvanVersion = '1.0.11-beta2' + galvanVersion = '1.0.12-beta2' // Tools findbugsVersion = '3.0.1' @@ -98,6 +98,7 @@ subprojects { repositories { mavenCentral() + mavenLocal() maven { url "http://repo.terracotta.org/maven2" } } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java index 4fd8dcb7e2..837925a296 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java @@ -53,7 +53,7 @@ public class BasicClusteredCacheOpsTest { private static final String RESOURCE_CONFIG = - "" + "" + "" + "64" + "" + diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/CacheManagerLifecycleEhcacheIntegrationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/CacheManagerLifecycleEhcacheIntegrationTest.java index 14f4648a2a..0cd052ba89 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/CacheManagerLifecycleEhcacheIntegrationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/CacheManagerLifecycleEhcacheIntegrationTest.java @@ -60,7 +60,7 @@ public class CacheManagerLifecycleEhcacheIntegrationTest { private static final String RESOURCE_CONFIG = - "" + "" + "" + "64" + "" + diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java index b937b7a34b..ad45ab7d26 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java @@ -46,7 +46,7 @@ public class EhcacheClientEntityFactoryIntegrationTest { private static final Map EMPTY_RESOURCE_MAP = Collections.emptyMap(); private static final String RESOURCE_CONFIG = - "" + "" + "" + "64" + "" + diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/JCacheClusteredTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/JCacheClusteredTest.java index f0c15f08f0..16c069e811 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/JCacheClusteredTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/JCacheClusteredTest.java @@ -39,7 +39,7 @@ public class JCacheClusteredTest { private static final String RESOURCE_CONFIG = - "" + "" + "" + "64" + "" + diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java index dd70332511..09c9f21467 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java @@ -124,7 +124,7 @@ public static void setConcurrency() { } private static final String RESOURCE_CONFIG = - "" + "" + "" + "64" + "" + diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java index 8361ea04b2..29e4949058 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java @@ -78,7 +78,7 @@ public abstract class AbstractClusteringManagementTest { private static final String RESOURCE_CONFIG = - "" + "" + "" + "64" + "64" diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheConfigWithManagementTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheConfigWithManagementTest.java index 24b451fd76..3ceb0111fd 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheConfigWithManagementTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheConfigWithManagementTest.java @@ -43,7 +43,7 @@ public class EhcacheConfigWithManagementTest { private static final String RESOURCE_CONFIG = - "" + "" + "" + "64" + "64" diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java index 7982ff03f2..885389e71d 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java @@ -73,7 +73,7 @@ public class BasicClusteredCacheOpsReplicationMultiThreadedTest { private static final int NUM_OF_THREADS = 10; private static final int JOB_SIZE = 100; private static final String RESOURCE_CONFIG = - "" + "" + "" + "16" + "" + diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java index 0236774035..d913daa0ea 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java @@ -58,7 +58,7 @@ public class BasicClusteredCacheOpsReplicationTest { private static final String RESOURCE_CONFIG = - "" + "" + "" + "16" + "" + diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMulitpleClientsTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMulitpleClientsTest.java index 8f36b0d507..b6a358017d 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMulitpleClientsTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMulitpleClientsTest.java @@ -65,7 +65,7 @@ public class BasicClusteredCacheOpsReplicationWithMulitpleClientsTest { private static final String RESOURCE_CONFIG = - "" + "" + "" + "16" + "" + diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java index 7a2fc2b8e9..17199bbc38 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java @@ -56,7 +56,7 @@ public class BasicLifeCyclePassiveReplicationTest { private static final String RESOURCE_CONFIG = - "" + "" + "" + "16" + "" + diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java index c1db4c03d2..d755a427dc 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java @@ -43,7 +43,7 @@ public class PassiveSyncTest { private static final String RESOURCE_CONFIG = - "" + "" + "" + "16" + "" + diff --git a/clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockPassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockPassiveEntity.java index bcb32fc9e7..98451e0ad1 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockPassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/lock/server/VoltronReadWriteLockPassiveEntity.java @@ -47,9 +47,6 @@ public void endSyncConcurrencyKey(int concurrencyKey) {} @Override public void createNew() {} - @Override - public void loadExisting() {} - @Override public void destroy() {} } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index 60e4eee57b..972621983f 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -323,11 +323,6 @@ public void createNew() { management.init(); } - @Override - public void loadExisting() { - - } - @Override public void destroy() { management.close(); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java index 812b4e7305..c0b63d5d0b 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java @@ -70,7 +70,7 @@ public Collection> getProvidedServiceTypes() { } @Override - public void clear() throws ServiceProviderCleanupException { + public void prepareForSynchronization() throws ServiceProviderCleanupException { serviceMap.clear(); } } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java index 1a3545d12c..1e35b83b46 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java @@ -25,9 +25,7 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; public class EhcacheStateServiceProviderTest { @@ -69,7 +67,7 @@ public void testClear() throws ServiceProviderCleanupException { EhcacheStateService ehcacheStateService = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, null, DEFAULT_MAPPER)); EhcacheStateService anotherStateService = serviceProvider.getService(2L, new EhcacheStateServiceConfig(null, null, DEFAULT_MAPPER)); - serviceProvider.clear(); + serviceProvider.prepareForSynchronization(); EhcacheStateService ehcacheStateServiceAfterClear = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, null, DEFAULT_MAPPER)); EhcacheStateService anotherStateServiceAfterClear = serviceProvider.getService(2L, new EhcacheStateServiceConfig(null, null, DEFAULT_MAPPER)); From 7a000bf461dae413f87b7de08c2a4c036e154dbb Mon Sep 17 00:00:00 2001 From: Anthony Dahanne Date: Thu, 24 Nov 2016 13:23:51 -0500 Subject: [PATCH 158/218] :snowflake: Remove mavenLocal() repository, committed by accident --- build.gradle | 1 - 1 file changed, 1 deletion(-) diff --git a/build.gradle b/build.gradle index 1119f75e52..148d016d1e 100644 --- a/build.gradle +++ b/build.gradle @@ -98,7 +98,6 @@ subprojects { repositories { mavenCentral() - mavenLocal() maven { url "http://repo.terracotta.org/maven2" } } From 0d1b6e4718640940a3d0084b089b4c8d9377156d Mon Sep 17 00:00:00 2001 From: Ludovic Orban Date: Fri, 25 Nov 2016 11:24:59 +0100 Subject: [PATCH 159/218] #1654 add a gradle property that enables looking up artifacts in maven's local repo --- build.gradle | 3 +++ 1 file changed, 3 insertions(+) diff --git a/build.gradle b/build.gradle index 148d016d1e..238b6def0d 100644 --- a/build.gradle +++ b/build.gradle @@ -97,6 +97,9 @@ subprojects { targetCompatibility = 1.6 repositories { + if (project.hasProperty('mvnlocal')) { + mavenLocal() + } mavenCentral() maven { url "http://repo.terracotta.org/maven2" } } From d245f35e72c282bf95c5f86bc5959eeaf32f9714 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Mon, 28 Nov 2016 11:58:40 +0100 Subject: [PATCH 160/218] :bug: Fixes #1657 Decoded but unsupported message results in error --- .../clustered/server/EhcacheActiveEntity.java | 6 ++-- .../server/EhcachePassiveEntity.java | 13 ++++--- .../server/repo/ServerStateRepository.java | 2 +- .../server/EhcacheActiveEntityTest.java | 35 +++++++++++++++++++ .../server/EhcachePassiveEntityTest.java | 33 +++++++++++++++++ 5 files changed, 80 insertions(+), 9 deletions(-) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 518f575c19..422eba0741 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -288,7 +288,7 @@ public EhcacheEntityResponse invoke(ClientDescriptor clientDescriptor, EhcacheEn return responseFactory.success(); } } - throw new IllegalMessageException("Unknown message : " + message); + throw new AssertionError("Unsupported message : " + message.getClass()); } catch (ClusterException e) { return responseFactory.failure(e); } catch (Exception e) { @@ -451,7 +451,7 @@ private EhcacheEntityResponse invokeLifeCycleOperation(ClientDescriptor clientDe destroyServerStore(clientDescriptor, (DestroyServerStore) message); break; default: - throw new IllegalMessageException("Unknown LifeCycle operation " + message); + throw new AssertionError("Unsupported LifeCycle operation " + message); } return responseFactory.success(); } @@ -536,7 +536,7 @@ private EhcacheEntityResponse invokeServerStoreOperation(ClientDescriptor client return responseFactory.success(); } default: - throw new IllegalMessageException("Unknown ServerStore operation : " + message); + throw new AssertionError("Unsupported ServerStore operation : " + message); } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index 972621983f..0ca93479e0 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -90,12 +90,15 @@ public void invoke(EhcacheEntityMessage message) { ehcacheStateService.getStateRepositoryManager().invoke((StateRepositoryOpMessage)message); } else if (isPassiveReplicationMessage(messageType)) { invokeRetirementMessages((PassiveReplicationMessage)message); + } else { + throw new AssertionError("Unsupported EhcacheOperationMessage: " + operationMessage.getMessageType()); } } else if (message instanceof EhcacheSyncMessage) { invokeSyncOperation((EhcacheSyncMessage) message); + } else { + throw new AssertionError("Unsupported EhcacheEntityMessage: " + message.getClass()); } - throw new IllegalMessageException("Unknown message : " + message); } catch (Exception e) { LOGGER.error("Unexpected exception raised during operation: " + message, e); } @@ -153,7 +156,7 @@ private void invokeRetirementMessages(PassiveReplicationMessage message) throws destroyServerStore(destroyMessage.getStoreName()); break; default: - throw new IllegalMessageException("Unknown Retirement Message : " + message); + throw new AssertionError("Unsupported Retirement Message : " + message); } } @@ -208,7 +211,7 @@ private void invokeServerStoreOperation(ServerStoreOpMessage message) throws Clu break; } default: - throw new IllegalMessageException("Unknown ServerStore operation : " + message); + throw new AssertionError("Unsupported ServerStore operation : " + message.getMessageType()); } } @@ -234,7 +237,7 @@ private void invokeSyncOperation(EhcacheSyncMessage message) throws ClusterExcep ehcacheStateService.getStore(dataSyncMessage.getCacheId()).put(dataSyncMessage.getKey(), dataSyncMessage.getChain()); break; default: - throw new IllegalMessageException("Unknown Sync operation " + message.getMessageType()); + throw new AssertionError("Unsupported Sync operation " + message.getMessageType()); } } @@ -251,7 +254,7 @@ private void invokeLifeCycleOperation(LifecycleMessage message) throws ClusterEx ehcacheStateService.getClientMessageTracker().track(message.getId(), message.getClientId()); break; default: - throw new IllegalMessageException("Unknown LifeCycle operation " + message); + throw new AssertionError("Unsupported LifeCycle operation " + message.getMessageType()); } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/repo/ServerStateRepository.java b/clustered/server/src/main/java/org/ehcache/clustered/server/repo/ServerStateRepository.java index 68a0184fa4..f4f878d92f 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/repo/ServerStateRepository.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/repo/ServerStateRepository.java @@ -58,7 +58,7 @@ EhcacheEntityResponse invoke(StateRepositoryOpMessage message) throws ClusterExc .collect(Collectors.toSet()); break; default: - throw new IllegalMessageException("Invalid operation: " + message.getMessageType()); + throw new AssertionError("Unsupported operation: " + message.getMessageType()); } return EhcacheEntityResponse.mapValue(result); } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index e5bd66f858..5e157f6eb6 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -2936,6 +2936,24 @@ public void testReplicationMessageAndOriginalServerStoreOpMessageHasSameConcurre assertThat(replicatedMessage.concurrencyKey(), is(((ConcurrentEntityMessage) getAndAppend).concurrencyKey())); } + @Test + public void testInvalidMessageThrowsError() throws Exception { + final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); + registry.addResource("serverResource1", 8, MemoryUnit.MEGABYTES); + + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); + + ClientDescriptor client = new TestClientDescriptor(); + activeEntity.connected(client); + + try { + activeEntity.invoke(client, new InvalidMessage()); + fail("Invalid message should result in AssertionError"); + } catch (AssertionError e) { + assertThat(e.getMessage(), containsString("Unsupported")); + } + } + private void assertSuccess(EhcacheEntityResponse response) throws Exception { @@ -3220,4 +3238,21 @@ private long getUsed() { return used; } } + + private static class InvalidMessage extends EhcacheEntityMessage { + @Override + public void setId(long id) { + throw new UnsupportedOperationException("TODO Implement me!"); + } + + @Override + public long getId() { + throw new UnsupportedOperationException("TODO Implement me!"); + } + + @Override + public UUID getClientId() { + throw new UnsupportedOperationException("TODO Implement me!"); + } + } } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java index d5938338ab..0cfde00d33 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java @@ -49,6 +49,7 @@ import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -500,6 +501,21 @@ public void testDestroyWithStores() throws Exception { assertThat(registry.getResource("serverResource2").getUsed(), is(MemoryUnit.MEGABYTES.toBytes(0L))); } + @Test + public void testInvalidMessageThrowsError() throws Exception { + OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(4, MemoryUnit.MEGABYTES); + registry.addResource("serverResource", 4, MemoryUnit.MEGABYTES); + + final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); + + try { + passiveEntity.invoke(new InvalidMessage()); + fail("Invalid message should result in AssertionError"); + } catch (AssertionError e) { + assertThat(e.getMessage(), containsString("Unsupported")); + } + } + private static ServerSideConfiguration.Pool pool(String resourceName, int poolSize, MemoryUnit unit) { return new ServerSideConfiguration.Pool(unit.toBytes(poolSize), resourceName); } @@ -732,4 +748,21 @@ private long getUsed() { return used; } } + + private static class InvalidMessage extends EhcacheEntityMessage { + @Override + public void setId(long id) { + throw new UnsupportedOperationException("TODO Implement me!"); + } + + @Override + public long getId() { + throw new UnsupportedOperationException("TODO Implement me!"); + } + + @Override + public UUID getClientId() { + throw new UnsupportedOperationException("TODO Implement me!"); + } + } } From 4438145c71bdcad968ba679b03adbb3165838ade Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Mon, 28 Nov 2016 12:17:22 +0100 Subject: [PATCH 161/218] Solve test instability caused by reconnection window --- .../org/ehcache/clustered/sync/PassiveSyncTest.java | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java index d755a427dc..14ae43ee24 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java @@ -33,11 +33,16 @@ import org.terracotta.testing.rules.BasicExternalCluster; import org.terracotta.testing.rules.Cluster; +import com.google.code.tempusfugit.temporal.Timeout; + import java.io.File; import java.util.Collections; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; +import static com.google.code.tempusfugit.temporal.Duration.seconds; +import static com.google.code.tempusfugit.temporal.Timeout.timeout; +import static com.google.code.tempusfugit.temporal.WaitFor.waitOrTimeout; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; @@ -60,7 +65,7 @@ public void startServers() throws Exception { CLUSTER.getClusterControl().waitForRunningPassivesInStandby(); } - @Test + @Test(timeout = 150000) public void testSync() throws Exception { CLUSTER.getClusterControl().terminateOnePassive(); @@ -88,7 +93,10 @@ public void testSync() throws Exception { CLUSTER.getClusterControl().terminateActive(); CLUSTER.getClusterControl().waitForActive(); - for (long i = -5; i < 5; i++) { + // Sometimes the new passive believes there is a second connection and we have to wait for the full reconnect window before getting a result + waitOrTimeout(() -> "value-5".equals(cache.get(-5L)), timeout(seconds(130))); + + for (long i = -4; i < 5; i++) { assertThat(cache.get(i), equalTo("value" + i)); } } finally { From 6fec9954697a37cbc874466b7d554f00cecc26ac Mon Sep 17 00:00:00 2001 From: "EUR\\bra" Date: Wed, 16 Nov 2016 11:06:59 +0100 Subject: [PATCH 162/218] Clarified some terms around caching tiers and storage areas. --- .../docs/asciidoc/user/caching-concepts.adoc | 34 +++++++++++-------- 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/docs/src/docs/asciidoc/user/caching-concepts.adoc b/docs/src/docs/asciidoc/user/caching-concepts.adoc index 34b64fcb57..17a75ca025 100644 --- a/docs/src/docs/asciidoc/user/caching-concepts.adoc +++ b/docs/src/docs/asciidoc/user/caching-concepts.adoc @@ -37,41 +37,45 @@ and technical decision based upon the requirements and assumptions of your appli [[storage-tiers]] == Storage Tiers -You can configure Ehcache to use various data storage areas. When a cache is configured to use more than one data -store, these stores are referred to as tiers. +You can configure Ehcache to use various data storage areas. +When a cache is configured to use more than one storage area, those areas are arranged and managed as `tiers`. +They are organized in a hierarchy, with the lowest tier being called the `authority` tier and the others being part of the `caching` tier. +The caching tier can itself be composed of more than one storage area. +The _hottest_ data is kept in the caching tier, which is typically less abundant but faster than the authority tier. +All the data is kept in the authority tier, which is slower but more abundant. Data stores supported by Ehcache include: -* On-Heap Store – Utilizes Java's on-heap RAM memory to store cache entries. This tier utilizes the same heap memory as +* On-Heap Store - Utilizes Java's on-heap RAM memory to store cache entries. This tier utilizes the same heap memory as your Java application, all of which must be scanned by the JVM's garbage collector. The more heap space your JVM utilizes, the more your application's performance will be impacted by garbage collection pauses. This store is extremely fast, but is typically your most limited storage resource. -* Off-Heap Store – Limited in size only by available RAM (tested to as much as 6TB on a single machine!). Not subject -to Java garbage collection (GC). Is quite fast, yet slower than the On-Heap Store because data must be moved off and -on the JVM's heap as it is stored and re-accessed. -* Disk Store – Utilizes a disk (file system) to store cache entries. This type of storage resource is typically very +* Off-Heap Store - Limited in size only by available RAM. Not subject +to Java garbage collection (GC). Is quite fast, yet slower than the On-Heap Store because data must be moved to and +from the JVM's heap as it is stored and re-accessed. +* Disk Store - Utilizes a disk (file system) to store cache entries. This type of storage resource is typically very abundant but much slower than the RAM-based stores. +* Clustered Store - This data store is a cache on a remote server. +The remote server may optionally have a failover server providing improved high availability. +Since clustered storage comes with performance penalties due to such +factors as network latency as well as for establishing client/server consistency, this tier, by nature, is slower than +local off-heap storage. image::EhcacheTerminology.png[] -When a cache is configured to use more than one storage area, those areas are arranged and managed as tiers - where -the _hottest_ (most recently accessed) data is kept in the faster (and typically less abundant) tiers, and data that -is less hot remains in the slower (and more abundant) tiers. - == Topology Types === Standalone -The data set is held in the application node. Any other application nodes are independent with no -communication between them. If a standalone topology is used where there are multiple application nodes running the -same application, then their caches are completely independent. +The data set is held in the application node. +If a standalone topology is used where there are multiple application nodes running the same application, then their caches are completely independent. === Distributed / Clustered The data is held in a remote server (or array of servers) with a subset of hot data held in each application node. This topology offers offers a selection of consistency options. A distributed topology is the recommended approach in a clustered or scaled-out application environment. -It provides the highest level of performance, availability, and scalability. +It provides the best combination of performance, availability, and scalability. image::ClusteredEhcacheTopology.png[] From 3d7c48ebf6fd9eb725a4c9b0c9e4e6573645cedd Mon Sep 17 00:00:00 2001 From: geoff gibson Date: Tue, 15 Nov 2016 17:12:48 -0800 Subject: [PATCH 163/218] Closes #1484 adds miss and hit rate tests for standalone and clustered cleanup adds call to trigger stats computations fixes null ptr error updates method name due to rebase removes unnecessary asserts in tests changes EhcacheStatisticsProviderConfiguration to use a timeInterval of Seconds removes unneccessary EhcacheStatisticsProviderConfiguration instance removes unused constructor paramater removes system.out --- .../ClusteredStatisticRateTest.java | 132 +++++++++++++++ .../providers/statistics/HitRateTest.java | 153 ++++++++++++++++++ .../providers/statistics/MissRateTest.java | 141 ++++++++++++++++ .../providers/statistics/StatsUtil.java | 112 ++++++++++++- 4 files changed, 534 insertions(+), 4 deletions(-) create mode 100755 clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticRateTest.java create mode 100755 management/src/test/java/org/ehcache/management/providers/statistics/HitRateTest.java create mode 100755 management/src/test/java/org/ehcache/management/providers/statistics/MissRateTest.java diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticRateTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticRateTest.java new file mode 100755 index 0000000000..9efbf323be --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticRateTest.java @@ -0,0 +1,132 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.management; + +import static org.hamcrest.CoreMatchers.is; + +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import org.ehcache.Cache; +import org.ehcache.management.config.DefaultStatisticsProviderConfiguration; +import org.ehcache.management.providers.statistics.EhcacheStatisticsProvider; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; +import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.model.stats.Sample; +import org.terracotta.management.model.stats.history.RateHistory; + + +public class ClusteredStatisticRateTest extends AbstractClusteringManagementTest { + + private static DefaultStatisticsProviderConfiguration config = new DefaultStatisticsProviderConfiguration(EhcacheStatisticsProvider.class); + private static double HIT_RATE; + private static double MISS_RATE; + + @BeforeClass + public static void initSeconds() { + long seconds; + switch (config.averageWindowUnit()) { + case SECONDS: + seconds = config.averageWindowDuration(); + HIT_RATE = 2.0d / (double)seconds; + MISS_RATE = 2.0d / (double)seconds; + break; + case MINUTES: + seconds = TimeUnit.MINUTES.toSeconds(config.averageWindowDuration()); + HIT_RATE = 2.0d / (double)seconds; + MISS_RATE = 2.0d / (double)seconds; + break; + case HOURS: + seconds = TimeUnit.HOURS.toSeconds(config.averageWindowDuration()); + HIT_RATE = 2.0d / (double)seconds; + MISS_RATE = 2.0d / (double)seconds; + default: + throw new IllegalArgumentException("invalid averageWindowUnit: " + config.averageWindowUnit() + " for unit test! You can add this TimeUnit if neccessary"); + } + } + + @Test + public void test() throws Exception { + + double cacheHitRate = 0d; + double clusteredHitRate = 0d; + double cacheMissRate = 0d; + double clusteredMissRate = 0d; + + sendManagementCallOnClientToCollectStats("Cache:HitRate", "Clustered:HitRate","Cache:MissRate","Clustered:MissRate"); + Thread.sleep(25000); + + Cache cache = cacheManager.getCache("dedicated-cache-1", String.class, String.class); + cache.put("one", "val1"); + cache.put("two", "val2"); + + cache.get("one"); //hit + cache.get("two"); //hit + + cache.get("three"); //miss + cache.get("four"); //miss + + + // it could be several seconds before the sampled stats could become available + // let's try until we find the correct values + do { + + // get the stats (we are getting the primitive counter, not the sample history) + List stats = waitForNextStats() + .stream() + .filter(statistics -> "dedicated-cache-1".equals(statistics.getContext().get("cacheName"))) + .collect(Collectors.toList());; + + for (ContextualStatistics stat : stats) { + if (stat.getContext().get("cacheName") != null && stat.getContext().get("cacheName").equals("dedicated-cache-1")) { + + //HIT stats + Sample[] samplesCacheHitRate = stat.getStatistic(RateHistory.class, "Cache:HitRate").getValue(); + if(samplesCacheHitRate.length > 0) { + cacheHitRate = samplesCacheHitRate[samplesCacheHitRate.length - 1].getValue(); + } + + Sample[] samplesClusteredHitRate = stat.getStatistic(RateHistory.class, "Clustered:HitRate").getValue(); + if(samplesClusteredHitRate.length > 0) { + clusteredHitRate = samplesClusteredHitRate[samplesClusteredHitRate.length - 1].getValue(); + } + + //MISS stats + Sample[] samplesCacheMissRate = stat.getStatistic(RateHistory.class, "Cache:MissRate").getValue(); + if(samplesCacheMissRate.length > 0) { + cacheMissRate = samplesCacheMissRate[samplesCacheMissRate.length - 1].getValue(); + } + + Sample[] samplesClusteredMissRate = stat.getStatistic(RateHistory.class, "Clustered:MissRate").getValue(); + if(samplesClusteredMissRate.length > 0) { + clusteredMissRate = samplesClusteredMissRate[samplesClusteredMissRate.length - 1].getValue(); + } + } + } + } while(!Thread.currentThread().isInterrupted() && + (cacheHitRate == 0d) && (clusteredHitRate == 0d) && + (cacheMissRate == 0d) && (clusteredMissRate == 0d)); + + Assert.assertThat(cacheHitRate,is(HIT_RATE)); + Assert.assertThat(clusteredHitRate,is(HIT_RATE)); + + Assert.assertThat(cacheMissRate,is(MISS_RATE)); + Assert.assertThat(clusteredMissRate,is(MISS_RATE)); + } + +} diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/HitRateTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/HitRateTest.java new file mode 100755 index 0000000000..87c1b4cf7d --- /dev/null +++ b/management/src/test/java/org/ehcache/management/providers/statistics/HitRateTest.java @@ -0,0 +1,153 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import static java.util.Arrays.asList; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.config.units.MemoryUnit.MB; +import static org.ehcache.config.units.EntryUnit.ENTRIES; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.Builder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryService; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.Timeout; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.terracotta.management.model.context.Context; + +@RunWith(Parameterized.class) +public class HitRateTest { + + @Rule + public final Timeout globalTimeout = Timeout.seconds(10); + + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + + private static final EhcacheStatisticsProviderConfiguration EHCACHE_STATISTICS_PROVIDER_CONFIG = new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.SECONDS,10,TimeUnit.MINUTES); + private static final double CACHE_HIT_RATE = 5.0d / (double)TimeUnit.MINUTES.toSeconds(EHCACHE_STATISTICS_PROVIDER_CONFIG.averageWindowDuration()); + + private final ResourcePools resources; + private final List statNames; + private final List tierExpectedValues; + + @Parameterized.Parameters + public static Collection data() { + + double seconds = (double)TimeUnit.MINUTES.toSeconds(EHCACHE_STATISTICS_PROVIDER_CONFIG.averageWindowDuration()); + + return asList(new Object[][] { + //1 tier + { newResourcePoolsBuilder().heap(1, MB), Arrays.asList("OnHeap:HitRate"), Arrays.asList(CACHE_HIT_RATE)}, + { newResourcePoolsBuilder().offheap(1, MB), Arrays.asList("OffHeap:HitRate"), Arrays.asList(CACHE_HIT_RATE) }, + { newResourcePoolsBuilder().disk(1, MB), Arrays.asList("Disk:HitRate"), Arrays.asList(CACHE_HIT_RATE) }, + + //2 tier + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), Arrays.asList("OnHeap:HitRate","OffHeap:HitRate"), Arrays.asList(2d/seconds,3d/seconds)}, + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), Arrays.asList("OnHeap:HitRate","Disk:HitRate"), Arrays.asList(2d/seconds,3d/seconds)}, + //offheap and disk configuration is not valid. Throws IllegalStateException no Store.Provider found to handle configured resource types [offheap,disk] + + //3 tier + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB).disk(3, MB), Arrays.asList("OnHeap:HitRate","OffHeap:HitRate","Disk:HitRate"), Arrays.asList(2d/seconds,0d,3d/seconds)}, + { newResourcePoolsBuilder().heap(1, ENTRIES).offheap(2, MB).disk(3, MB), Arrays.asList("OnHeap:HitRate","OffHeap:HitRate","Disk:HitRate"), Arrays.asList(1d/seconds,1d/seconds,3d/seconds)}, + }); + } + + public HitRateTest(Builder resources, List statNames, List tierExpectedValues) { + this.resources = resources.build(); + this.statNames = statNames; + this.tierExpectedValues = tierExpectedValues; + } + + @Test + public void test() throws InterruptedException, IOException { + + CacheManager cacheManager = null; + + try { + + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); + registryConfiguration.addConfiguration(EHCACHE_STATISTICS_PROVIDER_CONFIG); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources) + .withEvictionAdvisor(new EvictionAdvisor() { + @Override + public boolean adviseAgainstEviction(Long key, String value) { + return key.equals(1L); + } + }) + .build(); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("myCache", cacheConfiguration) + .using(managementRegistry) + .using(new DefaultPersistenceConfiguration(diskPath.newFolder())) + .build(true); + + Cache cache = cacheManager.getCache("myCache", Long.class, String.class); + + Context context = StatsUtil.createContext(managementRegistry); + + StatsUtil.triggerStatComputation(managementRegistry, context, "OnHeap:HitRate","OffHeap:HitRate","Disk:HitRate","Cache:HitRate"); + + //Put values in cache + cache.put(1L, "one"); + cache.put(2L, "two"); + cache.put(3L, "three"); + + cache.get(1L);//HIT lowest tier + cache.get(2L);//HIT lowest tier + cache.get(3L);//HIT lowest tier + + cache.get(1L);//HIT higher tier + cache.get(2L);//HIT middle/highest tier (depends on number of tiers) + + //TIER stats + for (int i = 0; i < statNames.size(); i++) { + StatsUtil.getAndAssertExpectedValueFromRateHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); + } + + //CACHE stats + StatsUtil.getAndAssertExpectedValueFromRateHistory("Cache:HitRate", context, managementRegistry, CACHE_HIT_RATE); + + } + finally { + if(cacheManager != null) { + cacheManager.close(); + } + } + } + +} diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/MissRateTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/MissRateTest.java new file mode 100755 index 0000000000..24d149cd18 --- /dev/null +++ b/management/src/test/java/org/ehcache/management/providers/statistics/MissRateTest.java @@ -0,0 +1,141 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import static java.util.Arrays.asList; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.config.units.MemoryUnit.MB; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.Builder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryService; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.Timeout; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.terracotta.management.model.context.Context; + + +@RunWith(Parameterized.class) +public class MissRateTest { + + @Rule + public final Timeout globalTimeout = Timeout.seconds(10); + + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + + private static final EhcacheStatisticsProviderConfiguration EHCACHE_STATISTICS_PROVIDER_CONFIG = new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.SECONDS,10,TimeUnit.MINUTES); + private static final double CACHE_MISS_RATE = 3.0d / (double)TimeUnit.MINUTES.toSeconds(EHCACHE_STATISTICS_PROVIDER_CONFIG.averageWindowDuration()); + + private final ResourcePools resources; + private final List statNames; + private final List tierExpectedValues; + + + @Parameterized.Parameters + public static Collection data() { + + double seconds = (double)TimeUnit.MINUTES.toSeconds(EHCACHE_STATISTICS_PROVIDER_CONFIG.averageWindowDuration()); + + return asList(new Object[][] { + //1 tier + { newResourcePoolsBuilder().heap(1, MB), Arrays.asList("OnHeap:MissRate"), Arrays.asList(CACHE_MISS_RATE)}, + { newResourcePoolsBuilder().offheap(1, MB), Arrays.asList("OffHeap:MissRate"), Arrays.asList(CACHE_MISS_RATE) }, + { newResourcePoolsBuilder().disk(1, MB), Arrays.asList("Disk:MissRate"), Arrays.asList(CACHE_MISS_RATE) }, + + //2 tier + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), Arrays.asList("OnHeap:MissRate","OffHeap:MissRate"), Arrays.asList(3d/seconds,3d/seconds)}, + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), Arrays.asList("OnHeap:MissRate","Disk:MissRate"), Arrays.asList(3d/seconds,3d/seconds)}, + //offheap and disk configuration is not valid. Throws IllegalStateException no Store.Provider found to handle configured resource types [offheap,disk] + + //3 tier + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB).disk(3, MB), Arrays.asList("OnHeap:MissRate","OffHeap:MissRate","Disk:MissRate"), Arrays.asList(3d/seconds,3d/seconds,3d/seconds)}, + }); + } + + public MissRateTest(Builder resources, List statNames, List tierExpectedValues) { + this.resources = resources.build(); + this.statNames = statNames; + this.tierExpectedValues = tierExpectedValues; + } + + @Test + public void test() throws InterruptedException, IOException { + + CacheManager cacheManager = null; + + try { + + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); + registryConfiguration.addConfiguration(EHCACHE_STATISTICS_PROVIDER_CONFIG); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources).build(); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("myCache", cacheConfiguration) + .using(managementRegistry) + .using(new DefaultPersistenceConfiguration(diskPath.newFolder())) + .build(true); + + Cache cache = cacheManager.getCache("myCache", Long.class, String.class); + + Context context = StatsUtil.createContext(managementRegistry); + + StatsUtil.triggerStatComputation(managementRegistry, context, "OnHeap:MissRate","OffHeap:MissRate","Disk:MissRate","Cache:MissRate"); + + //Put values in cache + cache.put(1L, "one"); + cache.put(2L, "two"); + cache.put(3L, "three"); + + cache.get(4L);//MISS + cache.get(5L);//MISS + cache.get(6L);//MISS + + //TIER stats + for (int i = 0; i < statNames.size(); i++) { + StatsUtil.getAndAssertExpectedValueFromRateHistory(statNames.get(i), context, managementRegistry, tierExpectedValues.get(i)); + } + + //CACHE stats + StatsUtil.getAndAssertExpectedValueFromRateHistory("Cache:MissRate", context, managementRegistry, CACHE_MISS_RATE); + + } + finally { + if(cacheManager != null) { + cacheManager.close(); + } + } + } +} diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java b/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java index e89e53b366..9d846cd26d 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java @@ -15,6 +15,8 @@ */ package org.ehcache.management.providers.statistics; +import static java.util.Collections.singletonList; + import org.ehcache.management.ManagementRegistryService; import org.hamcrest.Matchers; import org.terracotta.management.model.context.Context; @@ -23,15 +25,16 @@ import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.management.model.stats.Statistic; import org.terracotta.management.model.stats.StatisticHistory; +import org.terracotta.management.model.stats.history.AverageHistory; import org.terracotta.management.model.stats.history.CounterHistory; +import org.terracotta.management.model.stats.history.DurationHistory; +import org.terracotta.management.model.stats.history.RateHistory; import org.terracotta.management.model.stats.history.RatioHistory; import org.terracotta.management.registry.ResultSet; import org.terracotta.management.registry.StatisticQuery; - import java.util.Arrays; import java.util.Map; - -import static java.util.Collections.singletonList; +import org.junit.Assert; import static org.junit.Assert.assertThat; public class StatsUtil { @@ -148,7 +151,7 @@ public static void triggerStatComputation(ManagementRegistryService managementRe .getStatistics(); for (Map.Entry> entry : statistics.entrySet()) { - if (((StatisticHistory) entry.getValue()).getValue().length == 0) { + if (((StatisticHistory) entry.getValue()).getValue().length < 2) { noSample = true; break; } @@ -156,4 +159,105 @@ public static void triggerStatComputation(ManagementRegistryService managementRe } while (!Thread.currentThread().isInterrupted() && noSample); } + /* + NOTE: When using this method in other unit tests, make sure to declare a timeout as it is possible to get an infinite loop. + This should only occur if the stats value is different from your expectedResult, which may happen if the stats calculations + change, the stats value isn't accessible or if you enter the wrong expectedResult. + */ + public static double getAndAssertExpectedValueFromRateHistory(String statName, Context context, ManagementRegistryService managementRegistry, Double expectedResult) { + + StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(Arrays.asList(statName)) + .on(context) + .build(); + + Double value = 0d; + do { + ResultSet counters = query.execute(); + + ContextualStatistics statisticsContext = counters.getResult(context); + + Assert.assertThat(counters.size(), Matchers.is(1)); + + RateHistory rateHistory = statisticsContext.getStatistic(RateHistory.class, statName); + + if (rateHistory.getValue().length > 0) { + int mostRecentIndex = rateHistory.getValue().length - 1; + value = rateHistory.getValue()[mostRecentIndex].getValue(); + } + + }while(!Thread.currentThread().isInterrupted() && !value.equals(expectedResult)); + + Assert.assertThat(value, Matchers.is(expectedResult)); + + return value; + } + + /* + NOTE: When using this method in other unit tests, make sure to declare a timeout as it is possible to get an infinite loop. + This should only occur if the stats value is different from your minExpectedValue, which may happen if the stats calculations + change, the stats value isn't accessible or if you enter the wrong minExpectedValue. + */ + public static long getExpectedValueFromDurationHistory(String statName, Context context, ManagementRegistryService managementRegistry, Long minExpectedValue) { + + StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(Arrays.asList(statName)) + .on(context) + .build(); + + Long value = null; + do { + ResultSet counters = query.execute(); + + ContextualStatistics statisticsContext = counters.getResult(context); + + Assert.assertThat(counters.size(), Matchers.is(1)); + + DurationHistory durationHistory = statisticsContext.getStatistic(DurationHistory.class, statName); + + if (durationHistory.getValue().length > 0) { + int mostRecentIndex = durationHistory.getValue().length - 1; + value = durationHistory.getValue()[mostRecentIndex].getValue(); + } + + }while(!Thread.currentThread().isInterrupted() && value == null); + + Assert.assertThat(value, Matchers.greaterThan(minExpectedValue)); + + return value; + } + + /* + NOTE: When using this method in other unit tests, make sure to declare a timeout as it is possible to get an infinite loop. + This should only occur if the stats value is different from your minExpectedValue, which may happen if the stats calculations + change, the stats value isn't accessible or if you enter the wrong minExpectedValue. + */ + public static double getExpectedValueFromAverageHistory(String statName, Context context, ManagementRegistryService managementRegistry, double minExpectedValue) { + + StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(Arrays.asList(statName)) + .on(context) + .build(); + + double value = 0; + do { + ResultSet counters = query.execute(); + + ContextualStatistics statisticsContext = counters.getResult(context); + + Assert.assertThat(counters.size(), Matchers.is(1)); + + AverageHistory avgHistory = statisticsContext.getStatistic(AverageHistory.class, statName); + + if (avgHistory.getValue().length > 0) { + int mostRecentIndex = avgHistory.getValue().length - 1; + value = avgHistory.getValue()[mostRecentIndex].getValue(); + } + + }while(!Thread.currentThread().isInterrupted() && value <= minExpectedValue); + + Assert.assertThat(value, Matchers.greaterThan(minExpectedValue)); + + return value; + } } From 13ff9aa9b24720986a45a956a8b827758f41dcd3 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Mon, 21 Nov 2016 15:14:59 +0530 Subject: [PATCH 164/218] Lazily track clients on Passive Entity #1591 --- .../ehcache/clustered/server/EhcacheActiveEntity.java | 7 +------ .../ehcache/clustered/server/EhcachePassiveEntity.java | 10 ++-------- .../clustered/server/state/ClientMessageTracker.java | 8 +++++++- .../clustered/server/EhcacheActiveEntityTest.java | 6 +----- .../clustered/server/EhcachePassiveEntityTest.java | 6 ------ 5 files changed, 11 insertions(+), 26 deletions(-) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 422eba0741..797770883e 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -51,7 +51,6 @@ import org.ehcache.clustered.common.internal.messages.LifecycleMessage; import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; -import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.InvalidationCompleteMessage; import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ChainReplicationMessage; import org.ehcache.clustered.common.internal.messages.ReconnectMessage; @@ -718,11 +717,7 @@ private void validate(ClientDescriptor clientDescriptor, ValidateStoreManager me } else if (clientIdMap.get(clientDescriptor) != null) { throw new LifecycleException("Client : " + clientDescriptor + " is already being tracked with Client Id : " + clientIdMap.get(clientDescriptor)); } - try { - entityMessenger.messageSelfAndDeferRetirement(message, new ClientIDTrackerMessage(message.getId(), message.getClientId())); - } catch (MessageCodecException e) { - throw new AssertionError("Codec error", e); - } + addClientId(clientDescriptor, message.getClientId()); ehcacheStateService.validate(message.getConfiguration()); this.clientStateMap.get(clientDescriptor).attach(); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index 0ca93479e0..d94f4ab522 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -107,7 +107,7 @@ public void invoke(EhcacheEntityMessage message) { EhcachePassiveEntity(ServiceRegistry services, byte[] config, final KeySegmentMapper mapper) { this.identity = ClusteredEhcacheIdentity.deserialize(config); - OffHeapResources offHeapResources = services.getService(new BasicServiceConfiguration(OffHeapResources.class)); + OffHeapResources offHeapResources = services.getService(new BasicServiceConfiguration<>(OffHeapResources.class)); if (offHeapResources == null) { this.offHeapResourceIdentifiers = Collections.emptySet(); } else { @@ -135,10 +135,6 @@ private void invokeRetirementMessages(PassiveReplicationMessage message) throws ehcacheStateService.getClientMessageTracker().applied(message.getId(), message.getClientId()); trackHashInvalidationForEventualCache(retirementMessage); break; - case CLIENT_ID_TRACK_OP: - LOGGER.debug("PassiveReplicationMessage message for msgId {} & client Id {}", message.getId(), message.getClientId()); - ehcacheStateService.getClientMessageTracker().add(message.getClientId()); - break; case INVALIDATION_COMPLETE: untrackHashInvalidationForEventualCache((InvalidationCompleteMessage)message); break; @@ -161,8 +157,7 @@ private void invokeRetirementMessages(PassiveReplicationMessage message) throws } private void untrackHashInvalidationForEventualCache(InvalidationCompleteMessage message) { - InvalidationCompleteMessage invalidationCompleteMessage = message; - ehcacheStateService.getInvalidationTracker(invalidationCompleteMessage.getCacheId()).getInvalidationMap().computeIfPresent(invalidationCompleteMessage.getKey(), (key, count) -> { + ehcacheStateService.getInvalidationTracker(message.getCacheId()).getInvalidationMap().computeIfPresent(message.getKey(), (key, count) -> { if (count == 1) { return null; } @@ -230,7 +225,6 @@ private void invokeSyncOperation(EhcacheSyncMessage message) throws ClusterExcep } management.serverStoreCreated(entry.getKey()); } - stateSyncMessage.getTrackedClients().forEach(id -> ehcacheStateService.getClientMessageTracker().add(id)); break; case DATA: EhcacheDataSyncMessage dataSyncMessage = (EhcacheDataSyncMessage) message; diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java index b97f269b49..a0cdbcf496 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java @@ -34,7 +34,13 @@ public boolean isAdded(UUID clientId) { } public void track(long msgId, UUID clientId) { - messageTrackers.get(clientId).track(msgId); + messageTrackers.compute(clientId, (mappedUuid, messageTracker) -> { + if (messageTracker == null) { + messageTracker = new MessageTracker(); + } + messageTracker.track(msgId); + return messageTracker; + }); } public void applied(long msgId, UUID clientId){ diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index 5e157f6eb6..fdd9fc0266 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -40,9 +40,7 @@ import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; import org.ehcache.clustered.common.internal.messages.LifecycleMessage.CreateServerStore; import org.ehcache.clustered.common.internal.messages.LifecycleMessage.DestroyServerStore; -import org.ehcache.clustered.common.internal.messages.LifecycleMessage.ValidateStoreManager; import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; -import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; import org.ehcache.clustered.server.internal.messages.EhcacheStateSyncMessage; import org.ehcache.clustered.server.state.ClientMessageTracker; @@ -2749,7 +2747,6 @@ public void testCreateServerStoreSendsPassiveReplicationMessageIfSuccessful() th } verify(entityMessenger, times(0)).messageSelf(any()); - verify(entityMessenger, times(1)).messageSelfAndDeferRetirement(any(ValidateStoreManager.class), any(ClientIDTrackerMessage.class)); reset(entityMessenger); @@ -2817,7 +2814,7 @@ public void testDestroyServerStoreSendsPassiveReplicationMessageIfSuccessful() t } verify(entityMessenger, times(0)).messageSelf(any()); - verify(entityMessenger, times(3)).messageSelfAndDeferRetirement(any(), any()); + verify(entityMessenger, times(1)).messageSelfAndDeferRetirement(any(), any()); reset(entityMessenger); @@ -2864,7 +2861,6 @@ public void testPromotedActiveIgnoresDuplicateMessages() throws MessageCodecExce ehcacheStateService.createStore("test", serverStoreConfiguration); ClientMessageTracker clientMessageTracker = ehcacheStateService.getClientMessageTracker(); - clientMessageTracker.add(CLIENT_ID); Random random = new Random(); Set msgIds = new HashSet<>(); diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java index 0cfde00d33..92cea74fe8 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java @@ -24,7 +24,6 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; -import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; import org.ehcache.clustered.common.internal.messages.LifecycleMessage; import org.ehcache.clustered.server.state.EhcacheStateService; import org.hamcrest.Matchers; @@ -251,7 +250,6 @@ public void testCreateDedicatedServerStore() throws Exception { .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build())); - passiveEntity.invoke(new ClientIDTrackerMessage(0L, CLIENT_ID)); EhcacheEntityMessage createServerStore = MESSAGE_FACTORY.createServerStore("cacheAlias", new ServerStoreConfigBuilder() .dedicated("serverResource1", 4, MemoryUnit.MEGABYTES) @@ -290,7 +288,6 @@ public void testCreateSharedServerStore() throws Exception { .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build())); - passiveEntity.invoke(new ClientIDTrackerMessage(0L, CLIENT_ID)); EhcacheEntityMessage createServerStore = MESSAGE_FACTORY.createServerStore("cacheAlias", new ServerStoreConfigBuilder() @@ -326,7 +323,6 @@ public void testDestroyServerStore() throws Exception { .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build())); - passiveEntity.invoke(new ClientIDTrackerMessage(0L, CLIENT_ID)); EhcacheEntityMessage createServerStore = MESSAGE_FACTORY.createServerStore("dedicatedCache", new ServerStoreConfigBuilder() @@ -393,7 +389,6 @@ public void testSharedPoolCacheNameCollision() throws Exception { .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build())); - passiveEntity.invoke(new ClientIDTrackerMessage(0L, CLIENT_ID)); assertThat(registry.getStoreManagerService().getStores(), is(Matchers.empty())); EhcacheEntityMessage createServerStore = MESSAGE_FACTORY.createServerStore("dedicatedCache", @@ -465,7 +460,6 @@ public void testDestroyWithStores() throws Exception { .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build())); - passiveEntity.invoke(new ClientIDTrackerMessage(0L, CLIENT_ID)); EhcacheEntityMessage createServerStore = MESSAGE_FACTORY.createServerStore("dedicatedCache", new ServerStoreConfigBuilder() From aa920f9d3cd905a36ff6bab82dc02d6b603c2f3c Mon Sep 17 00:00:00 2001 From: Abhilash Date: Mon, 21 Nov 2016 15:38:09 +0530 Subject: [PATCH 165/218] Clear client tracking at passive #1591 --- .../clustered/server/EhcacheActiveEntity.java | 6 +++ .../server/EhcachePassiveEntity.java | 3 ++ .../messages/PassiveReplicationMessage.java | 37 ++++++++++++++----- .../PassiveReplicationMessageCodec.java | 13 ++----- .../messages/EhcacheServerCodecTest.java | 3 +- .../PassiveReplicationMessageCodecTest.java | 3 +- 6 files changed, 43 insertions(+), 22 deletions(-) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 797770883e..3fc79e0287 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -51,6 +51,7 @@ import org.ehcache.clustered.common.internal.messages.LifecycleMessage; import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.InvalidationCompleteMessage; import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ChainReplicationMessage; import org.ehcache.clustered.common.internal.messages.ReconnectMessage; @@ -261,6 +262,11 @@ public void disconnected(ClientDescriptor clientDescriptor) { } UUID clientId = clientIdMap.remove(clientDescriptor); if (clientId != null) { + try { + entityMessenger.messageSelf(new ClientIDTrackerMessage(clientId)); + } catch (MessageCodecException mce) { + throw new AssertionError("Codec error", mce); + } trackedClients.remove(clientId); ehcacheStateService.getClientMessageTracker().remove(clientId); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index d94f4ab522..4230db3a6c 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -151,6 +151,9 @@ private void invokeRetirementMessages(PassiveReplicationMessage message) throws PassiveReplicationMessage.DestroyServerStoreReplicationMessage destroyMessage = (PassiveReplicationMessage.DestroyServerStoreReplicationMessage) message; destroyServerStore(destroyMessage.getStoreName()); break; + case CLIENT_ID_TRACK_OP: + ehcacheStateService.getClientMessageTracker().remove(message.getClientId()); + break; default: throw new AssertionError("Unsupported Retirement Message : " + message); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessage.java index 687f091743..bbf1dd32d9 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessage.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessage.java @@ -37,21 +37,20 @@ public void setId(long id) { public static class ClientIDTrackerMessage extends PassiveReplicationMessage { private final UUID clientId; - private final long msgId; - public ClientIDTrackerMessage(long msgId, UUID clientId) { - this.msgId = msgId; + public ClientIDTrackerMessage(UUID clientId) { this.clientId = clientId; } - public long getId() { - return msgId; - } - public UUID getClientId() { return clientId; } + @Override + public long getId() { + throw new UnsupportedOperationException("Not supported for ClientIDTrackerMessage"); + } + @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.CLIENT_ID_TRACK_OP; @@ -63,9 +62,11 @@ public static class ChainReplicationMessage extends ClientIDTrackerMessage imple private final String cacheId; private final long key; private final Chain chain; + private final long msgId; public ChainReplicationMessage(String cacheId, long key, Chain chain, long msgId, UUID clientId) { - super(msgId, clientId); + super(clientId); + this.msgId = msgId; this.cacheId = cacheId; this.key = key; this.chain = chain; @@ -83,6 +84,10 @@ public Chain getChain() { return chain; } + public long getId() { + return msgId; + } + @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.CHAIN_REPLICATION_OP; @@ -154,13 +159,15 @@ public static class CreateServerStoreReplicationMessage extends ClientIDTrackerM private final String storeName; private final ServerStoreConfiguration storeConfiguration; + private final long msgId; public CreateServerStoreReplicationMessage(LifecycleMessage.CreateServerStore createMessage) { this(createMessage.getId(), createMessage.getClientId(), createMessage.getName(), createMessage.getStoreConfiguration()); } public CreateServerStoreReplicationMessage(long msgId, UUID clientId, String storeName, ServerStoreConfiguration configuration) { - super(msgId, clientId); + super(clientId); + this.msgId = msgId; this.storeName = storeName; this.storeConfiguration = configuration; } @@ -173,6 +180,10 @@ public ServerStoreConfiguration getStoreConfiguration() { return storeConfiguration; } + public long getId() { + return msgId; + } + @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.CREATE_SERVER_STORE_REPLICATION; @@ -182,20 +193,26 @@ public EhcacheMessageType getMessageType() { public static class DestroyServerStoreReplicationMessage extends ClientIDTrackerMessage { private final String storeName; + private final long msgId; public DestroyServerStoreReplicationMessage(LifecycleMessage.DestroyServerStore destroyMessage) { this(destroyMessage.getId(), destroyMessage.getClientId(), destroyMessage.getName()); } public DestroyServerStoreReplicationMessage(long msgId, UUID clientId, String storeName) { - super(msgId, clientId); + super(clientId); this.storeName = storeName; + this.msgId = msgId; } public String getStoreName() { return storeName; } + public long getId() { + return msgId; + } + @Override public EhcacheMessageType getMessageType() { return EhcacheMessageType.DESTROY_SERVER_STORE_REPLICATION; diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java index b158b20116..95f5dac747 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java @@ -53,7 +53,6 @@ public class PassiveReplicationMessageCodec { private static final Struct CLIENT_ID_TRACK_STRUCT = newStructBuilder() .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) - .int64(MSG_ID_FIELD, 15) .int64(MSB_UUID_FIELD, 20) .int64(LSB_UUID_FIELD, 21) .build(); @@ -178,7 +177,9 @@ private byte[] encodeChainReplicationMessage(PassiveReplicationMessage.ChainRepl private byte[] encodeClientIdTrackMessage(PassiveReplicationMessage.ClientIDTrackerMessage message) { StructEncoder encoder = CLIENT_ID_TRACK_STRUCT.encoder(); - messageCodecUtils.encodeMandatoryFields(encoder, message); + encoder.enm(EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) + .int64(MSB_UUID_FIELD, message.getClientId().getMostSignificantBits()) + .int64(LSB_UUID_FIELD, message.getClientId().getLeastSignificantBits()); return encoder.encode().array(); } @@ -257,15 +258,9 @@ private PassiveReplicationMessage.ChainReplicationMessage decodeChainReplication private PassiveReplicationMessage.ClientIDTrackerMessage decodeClientIdTrackMessage(ByteBuffer messageBuffer) { StructDecoder decoder = CLIENT_ID_TRACK_STRUCT.decoder(messageBuffer); - Long msgId = decoder.int64(MSG_ID_FIELD); UUID clientId = messageCodecUtils.decodeUUID(decoder); - return new PassiveReplicationMessage.ClientIDTrackerMessage(msgId, clientId); + return new PassiveReplicationMessage.ClientIDTrackerMessage(clientId); } - private static UUID getClientId(ByteBuffer payload) { - long msb = payload.getLong(); - long lsb = payload.getLong(); - return new UUID(msb, lsb); - } } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodecTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodecTest.java index aa35b83576..25f0f9798b 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodecTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodecTest.java @@ -20,6 +20,7 @@ import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; import org.ehcache.clustered.common.internal.messages.LifecycleMessage; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; @@ -71,7 +72,7 @@ public EhcacheMessageType getMessageType() { @Test public void testDelegatesToPassiveReplicationCodeForEncoding() throws Exception { - PassiveReplicationMessage.ClientIDTrackerMessage message = new PassiveReplicationMessage.ClientIDTrackerMessage(42L, CLIENT_ID); + ClientIDTrackerMessage message = new ClientIDTrackerMessage(CLIENT_ID); serverCodec.encodeMessage(message); verify(replicationCodec).encode(message); diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodecTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodecTest.java index 66f87507a1..93b4ba8b00 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodecTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodecTest.java @@ -47,13 +47,12 @@ public class PassiveReplicationMessageCodecTest { @Test public void testClientIDTrackerMessageCodec() { - ClientIDTrackerMessage clientIDTrackerMessage = new ClientIDTrackerMessage(200L, UUID.randomUUID()); + ClientIDTrackerMessage clientIDTrackerMessage = new ClientIDTrackerMessage(UUID.randomUUID()); byte[] encoded = codec.encode(clientIDTrackerMessage); PassiveReplicationMessage decodedMsg = (PassiveReplicationMessage) codec.decode(EhcacheMessageType.CLIENT_ID_TRACK_OP, wrap(encoded)); assertThat(decodedMsg.getClientId(), is(clientIDTrackerMessage.getClientId())); - assertThat(decodedMsg.getId(), is(clientIDTrackerMessage.getId())); } From 8adabc9616f8820471d334443e531bc94db60009 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Mon, 21 Nov 2016 19:51:16 +0530 Subject: [PATCH 166/218] Deprecating tracking on message tracker #1591 --- .../server/EhcachePassiveEntity.java | 16 ++------- .../server/state/ClientMessageTracker.java | 33 ++++++++++++------- .../server/state/MessageTracker.java | 2 ++ 3 files changed, 27 insertions(+), 24 deletions(-) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index 4230db3a6c..a6ec7eefcf 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -132,7 +132,7 @@ private void invokeRetirementMessages(PassiveReplicationMessage message) throws throw new LifecycleException("Clustered tier does not exist : '" + retirementMessage.getCacheId() + "'"); } cacheStore.put(retirementMessage.getKey(), retirementMessage.getChain()); - ehcacheStateService.getClientMessageTracker().applied(message.getId(), message.getClientId()); + applyMessage(message); trackHashInvalidationForEventualCache(retirementMessage); break; case INVALIDATION_COMPLETE: @@ -189,12 +189,6 @@ private void invokeServerStoreOperation(ServerStoreOpMessage message) throws Clu } switch (message.getMessageType()) { - case APPEND: - case GET_AND_APPEND: { - LOGGER.debug("ServerStore append/getAndAppend message for msgId {} & client Id {} is tracked now.", message.getId(), message.getClientId()); - ehcacheStateService.getClientMessageTracker().track(message.getId(), message.getClientId()); - break; - } case REPLACE: { ServerStoreOpMessage.ReplaceAtHeadMessage replaceAtHeadMessage = (ServerStoreOpMessage.ReplaceAtHeadMessage)message; cacheStore.replaceAtHead(replaceAtHeadMessage.getKey(), replaceAtHeadMessage.getExpect(), replaceAtHeadMessage.getUpdate()); @@ -244,7 +238,7 @@ private void invokeLifeCycleOperation(LifecycleMessage message) throws ClusterEx configure((ConfigureStoreManager) message); break; case VALIDATE: - trackAndApplyMessage(message); + applyMessage(message); break; case CREATE_SERVER_STORE: case DESTROY_SERVER_STORE: @@ -261,12 +255,8 @@ private void configure(ConfigureStoreManager message) throws ClusterException { management.sharedPoolsConfigured(); } - private void trackAndApplyMessage(LifecycleMessage message) { + private void applyMessage(EhcacheOperationMessage message) { ClientMessageTracker clientMessageTracker = ehcacheStateService.getClientMessageTracker(); - if (!clientMessageTracker.isAdded(message.getClientId())) { - throw new IllegalStateException("Untracked client id " + message.getClientId()); - } - clientMessageTracker.track(message.getId(), message.getClientId()); clientMessageTracker.applied(message.getId(), message.getClientId()); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java index a0cdbcf496..425d4fd270 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java @@ -16,6 +16,9 @@ package org.ehcache.clustered.server.state; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import com.tc.classloader.CommonComponent; import java.util.UUID; @@ -25,18 +28,23 @@ @CommonComponent public class ClientMessageTracker { + private static final Logger LOGGER = LoggerFactory.getLogger(ClientMessageTracker.class); + private final ConcurrentMap messageTrackers = new ConcurrentHashMap<>(); private volatile UUID entityConfiguredStamp = null; private volatile long configuredTimestamp; - public boolean isAdded(UUID clientId) { - return messageTrackers.containsKey(clientId); - } - + //TODO : This method will be removed once we move to model where + //caches are entites. Then passive just needs to keep track of + //applied messages. Thus only 'applied' method will be keeping + // track of watermarking for de-duplication. This method is only + // allowed to be used by cache lifecycle message for now. + @Deprecated public void track(long msgId, UUID clientId) { messageTrackers.compute(clientId, (mappedUuid, messageTracker) -> { if (messageTracker == null) { messageTracker = new MessageTracker(); + LOGGER.info("Tracking client {}.", clientId); } messageTracker.track(msgId); return messageTracker; @@ -44,7 +52,15 @@ public void track(long msgId, UUID clientId) { } public void applied(long msgId, UUID clientId){ - messageTrackers.get(clientId).applied(msgId); + messageTrackers.compute(clientId, (mappedUuid, messageTracker) -> { + if (messageTracker == null) { + messageTracker = new MessageTracker(); + LOGGER.info("Tracking client {}.", clientId); + } + messageTracker.track(msgId); + messageTracker.applied(msgId); + return messageTracker; + }); } public boolean isDuplicate(long msgId, UUID clientId) { @@ -54,14 +70,9 @@ public boolean isDuplicate(long msgId, UUID clientId) { return !messageTrackers.get(clientId).shouldApply(msgId); } - public void add(UUID clientId) { - if(messageTrackers.putIfAbsent(clientId, new MessageTracker()) != null) { - throw new IllegalStateException("Same client "+ clientId +" cannot be tracked twice"); - } - } - public void remove(UUID clientId) { messageTrackers.remove(clientId); + LOGGER.info("Stop tracking client {}.", clientId); } public void setEntityConfiguredStamp(UUID clientId, long timestamp) { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/MessageTracker.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/MessageTracker.java index 3ee0e5c844..bb3da1f22c 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/MessageTracker.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/MessageTracker.java @@ -70,7 +70,9 @@ boolean shouldApply(long msgId) { * Only to be invoked on Passive Entity * @param msgId */ + @Deprecated void track(long msgId) { + //TODO: remove this once we move to CACHE as ENTITY model. inProgressMessages.put(msgId, false); updateHigherWaterMark(msgId); } From b6e944776da0ccac8a718f900e672341d4392a14 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Tue, 22 Nov 2016 11:45:35 +0530 Subject: [PATCH 167/218] Clear clients which failed to reconnect #1591 --- .../ActivePassiveClientIdTest.java | 127 ++++++++++++++++++ ...leMessageActivePassvieReplicationTest.java | 51 ++----- .../client/replication/ReplicationUtil.java | 49 +++++++ .../ObservableEhcacheServerEntityService.java | 34 ++++- .../clustered/server/EhcacheActiveEntity.java | 16 +++ .../server/EhcacheServerEntityService.java | 3 +- .../server/state/ClientMessageTracker.java | 5 + 7 files changed, 242 insertions(+), 43 deletions(-) create mode 100644 clustered/client/src/test/java/org/ehcache/clustered/client/replication/ActivePassiveClientIdTest.java rename clustered/client/src/test/java/org/ehcache/clustered/client/{ => replication}/LifeCycleMessageActivePassvieReplicationTest.java (76%) create mode 100644 clustered/client/src/test/java/org/ehcache/clustered/client/replication/ReplicationUtil.java diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/replication/ActivePassiveClientIdTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/replication/ActivePassiveClientIdTest.java new file mode 100644 index 0000000000..c30dd9ceff --- /dev/null +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/replication/ActivePassiveClientIdTest.java @@ -0,0 +1,127 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.replication; + +import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.internal.EhcacheClientEntity; +import org.ehcache.clustered.client.internal.EhcacheClientEntityService; +import org.ehcache.clustered.client.internal.UnitTestConnectionService; +import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockEntityClientService; +import org.ehcache.clustered.client.internal.service.ClusteringServiceFactory; +import org.ehcache.clustered.client.service.ClusteringService; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; +import org.ehcache.clustered.server.ObservableEhcacheServerEntityService; +import org.ehcache.clustered.server.ObservableEhcacheServerEntityService.ObservableEhcachePassiveEntity; +import org.ehcache.config.EvictionAdvisor; +import org.ehcache.config.ResourcePools; +import org.ehcache.core.spi.store.Store; +import org.ehcache.expiry.Expiry; +import org.ehcache.spi.serialization.Serializer; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.terracotta.offheapresource.OffHeapResourcesConfiguration; +import org.terracotta.offheapresource.OffHeapResourcesProvider; +import org.terracotta.offheapresource.config.MemoryUnit; +import org.terracotta.passthrough.PassthroughClusterControl; +import org.terracotta.passthrough.PassthroughTestHelpers; + +import java.net.URI; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; + +import static org.ehcache.clustered.client.internal.UnitTestConnectionService.getOffheapResourcesType; +import static org.ehcache.clustered.client.replication.ReplicationUtil.getEntity; +import static org.ehcache.clustered.client.replication.ReplicationUtil.getServerStoreConfiguration; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertThat; +import static org.junit.Assert.fail; + +public class ActivePassiveClientIdTest { + + private PassthroughClusterControl clusterControl; + private static String STRIPENAME = "stripe"; + private static String STRIPE_URI = "passthrough://" + STRIPENAME; + private ObservableEhcacheServerEntityService observableEhcacheServerEntityService; + + @Before + public void setUp() throws Exception { + this.observableEhcacheServerEntityService = new ObservableEhcacheServerEntityService(); + this.clusterControl = PassthroughTestHelpers.createActivePassive(STRIPENAME, + server -> { + server.registerServerEntityService(observableEhcacheServerEntityService); + server.registerClientEntityService(new EhcacheClientEntityService()); + server.registerServerEntityService(new VoltronReadWriteLockServerEntityService()); + server.registerClientEntityService(new VoltronReadWriteLockEntityClientService()); + server.registerServiceProvider(new OffHeapResourcesProvider(), + new OffHeapResourcesConfiguration(getOffheapResourcesType("test", 32, MemoryUnit.MB))); + + UnitTestConnectionService.addServerToStripe(STRIPENAME, server); + } + ); + + clusterControl.waitForActive(); + clusterControl.waitForRunningPassivesInStandby(); + } + + @After + public void tearDown() throws Exception { + UnitTestConnectionService.removeStripe(STRIPENAME); + clusterControl.tearDown(); + } + + @Test + public void testClientIdGetsTrackedAtPassive() throws Exception { + ClusteringServiceConfiguration configuration = + ClusteringServiceConfigurationBuilder.cluster(URI.create(STRIPE_URI)) + .autoCreate() + .build(); + + ClusteringService service = new ClusteringServiceFactory().create(configuration); + + service.start(null); + + ObservableEhcachePassiveEntity ehcachePassiveEntity = observableEhcacheServerEntityService.getServedPassiveEntities().get(0); + + assertThat(ehcachePassiveEntity.getMessageTrackerMap().size(), is(0)); + + EhcacheClientEntity clientEntity = getEntity(service); + + clientEntity.createCache("testCache", getServerStoreConfiguration("test")); + + assertThat(ehcachePassiveEntity.getMessageTrackerMap().size(), is(1)); + + service.stop(); + + CompletableFuture completableFuture = CompletableFuture.supplyAsync(() -> { + while (true) { + try { + if (ehcachePassiveEntity.getMessageTrackerMap().size() == 0) { + return true; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + }); + assertThat(completableFuture.get(2, TimeUnit.SECONDS), is(true)); + + } + +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/LifeCycleMessageActivePassvieReplicationTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/replication/LifeCycleMessageActivePassvieReplicationTest.java similarity index 76% rename from clustered/client/src/test/java/org/ehcache/clustered/client/LifeCycleMessageActivePassvieReplicationTest.java rename to clustered/client/src/test/java/org/ehcache/clustered/client/replication/LifeCycleMessageActivePassvieReplicationTest.java index d1898e0acd..e2a6cbaa91 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/LifeCycleMessageActivePassvieReplicationTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/replication/LifeCycleMessageActivePassvieReplicationTest.java @@ -14,11 +14,9 @@ * limitations under the License. */ -package org.ehcache.clustered.client; +package org.ehcache.clustered.client.replication; -import org.ehcache.clustered.client.config.ClusteredResourcePool; import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; -import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; import org.ehcache.clustered.client.internal.EhcacheClientEntity; import org.ehcache.clustered.client.internal.EhcacheClientEntityService; @@ -29,13 +27,10 @@ import org.ehcache.clustered.client.internal.service.ClusteredTierManagerConfigurationException; import org.ehcache.clustered.client.internal.service.ClusteringServiceFactory; import org.ehcache.clustered.client.service.ClusteringService; -import org.ehcache.clustered.common.Consistency; -import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.exceptions.InvalidStoreException; import org.ehcache.clustered.common.internal.exceptions.InvalidStoreManagerException; import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; import org.ehcache.clustered.server.EhcacheServerEntityService; -import org.ehcache.impl.serialization.CompactJavaSerializer; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -43,14 +38,13 @@ import org.terracotta.offheapresource.OffHeapResourcesProvider; import org.terracotta.offheapresource.config.MemoryUnit; import org.terracotta.passthrough.PassthroughClusterControl; -import org.terracotta.passthrough.PassthroughServer; import org.terracotta.passthrough.PassthroughTestHelpers; -import java.lang.reflect.Field; import java.net.URI; import static org.ehcache.clustered.client.internal.UnitTestConnectionService.getOffheapResourcesType; -import static org.ehcache.config.units.MemoryUnit.MB; +import static org.ehcache.clustered.client.replication.ReplicationUtil.getEntity; +import static org.ehcache.clustered.client.replication.ReplicationUtil.getServerStoreConfiguration; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; @@ -65,18 +59,15 @@ public class LifeCycleMessageActivePassvieReplicationTest { @Before public void setUp() throws Exception { this.clusterControl = PassthroughTestHelpers.createActivePassive(STRIPENAME, - new PassthroughTestHelpers.ServerInitializer() { - @Override - public void registerServicesForServer(PassthroughServer server) { - server.registerServerEntityService(new EhcacheServerEntityService()); - server.registerClientEntityService(new EhcacheClientEntityService()); - server.registerServerEntityService(new VoltronReadWriteLockServerEntityService()); - server.registerClientEntityService(new VoltronReadWriteLockEntityClientService()); - server.registerServiceProvider(new OffHeapResourcesProvider(), - new OffHeapResourcesConfiguration(getOffheapResourcesType("test", 32, MemoryUnit.MB))); - - UnitTestConnectionService.addServerToStripe(STRIPENAME, server); - } + server -> { + server.registerServerEntityService(new EhcacheServerEntityService()); + server.registerClientEntityService(new EhcacheClientEntityService()); + server.registerServerEntityService(new VoltronReadWriteLockServerEntityService()); + server.registerClientEntityService(new VoltronReadWriteLockEntityClientService()); + server.registerServiceProvider(new OffHeapResourcesProvider(), + new OffHeapResourcesConfiguration(getOffheapResourcesType("test", 32, MemoryUnit.MB))); + + UnitTestConnectionService.addServerToStripe(STRIPENAME, server); } ); @@ -218,22 +209,4 @@ public void testDestroyServerStoreIsNotReplicatedIfFailsOnActive() throws Except } - @Test - public void testCreateServerStoreIsNotReplicatedIsFailsOnActive() throws Exception { - - } - - private static EhcacheClientEntity getEntity(ClusteringService clusteringService) throws NoSuchFieldException, IllegalAccessException { - Field entity = clusteringService.getClass().getDeclaredField("entity"); - entity.setAccessible(true); - return (EhcacheClientEntity)entity.get(clusteringService); - } - - private static ServerStoreConfiguration getServerStoreConfiguration(String resourceName) { - ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(resourceName, 8, MB); - return new ServerStoreConfiguration(resourcePool.getPoolAllocation(), - String.class.getName(), String.class.getName(), null, null, CompactJavaSerializer.class.getName(), CompactJavaSerializer.class - .getName(), Consistency.STRONG); - } - } diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/replication/ReplicationUtil.java b/clustered/client/src/test/java/org/ehcache/clustered/client/replication/ReplicationUtil.java new file mode 100644 index 0000000000..2ce2199136 --- /dev/null +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/replication/ReplicationUtil.java @@ -0,0 +1,49 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.client.replication; + +import org.ehcache.clustered.client.config.ClusteredResourcePool; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.internal.EhcacheClientEntity; +import org.ehcache.clustered.client.service.ClusteringService; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.impl.serialization.CompactJavaSerializer; + +import java.lang.reflect.Field; + +import static org.ehcache.config.units.MemoryUnit.MB; + +public class ReplicationUtil { + + private ReplicationUtil() { + + } + + public static EhcacheClientEntity getEntity(ClusteringService clusteringService) throws NoSuchFieldException, IllegalAccessException { + Field entity = clusteringService.getClass().getDeclaredField("entity"); + entity.setAccessible(true); + return (EhcacheClientEntity)entity.get(clusteringService); + } + + public static ServerStoreConfiguration getServerStoreConfiguration(String resourceName) { + ClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(resourceName, 8, MB); + return new ServerStoreConfiguration(resourcePool.getPoolAllocation(), + String.class.getName(), String.class.getName(), null, null, CompactJavaSerializer.class.getName(), CompactJavaSerializer.class + .getName(), Consistency.STRONG); + } +} diff --git a/clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java b/clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java index 33a97bd18c..60aa1eb619 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/server/ObservableEhcacheServerEntityService.java @@ -43,6 +43,7 @@ public class ObservableEhcacheServerEntityService private final EhcacheServerEntityService delegate = new EhcacheServerEntityService(); private final List servedActiveEntities = new ArrayList(); + private final List servedPassiveEntities = new ArrayList<>(); /** * Gets a list of {@link ObservableEhcacheActiveEntity} instances wrapping the @@ -58,6 +59,14 @@ public List getServedActiveEntities() throws NoSu return Collections.unmodifiableList(observables); } + public List getServedPassiveEntities() throws Exception { + List observables = new ArrayList<>(servedPassiveEntities.size()); + for (EhcachePassiveEntity servedPassiveEntity : servedPassiveEntities) { + observables.add(new ObservableEhcachePassiveEntity(servedPassiveEntity)); + } + return Collections.unmodifiableList(observables); + } + @Override public long getVersion() { return delegate.getVersion(); @@ -76,8 +85,10 @@ public EhcacheActiveEntity createActiveEntity(ServiceRegistry registry, byte[] c } @Override - public PassiveServerEntity createPassiveEntity(ServiceRegistry registry, byte[] configuration) { - return delegate.createPassiveEntity(registry, configuration); + public EhcachePassiveEntity createPassiveEntity(ServiceRegistry registry, byte[] configuration) { + EhcachePassiveEntity passiveEntity = delegate.createPassiveEntity(registry, configuration); + servedPassiveEntities.add(passiveEntity); + return passiveEntity; } @Override @@ -143,4 +154,23 @@ public Map getClientsWaitingForInvalidation() throws Exception { return (Map)field.get(activeEntity); } } + + public static final class ObservableEhcachePassiveEntity { + private final EhcachePassiveEntity passiveEntity; + private final EhcacheStateServiceImpl ehcacheStateService; + + private ObservableEhcachePassiveEntity(EhcachePassiveEntity passiveEntity) throws Exception { + this.passiveEntity = passiveEntity; + Field field = passiveEntity.getClass().getDeclaredField("ehcacheStateService"); + field.setAccessible(true); + this.ehcacheStateService = (EhcacheStateServiceImpl)field.get(passiveEntity); + } + + public Map getMessageTrackerMap() throws Exception { + Field field = this.ehcacheStateService.getClientMessageTracker().getClass().getDeclaredField("messageTrackers"); + field.setAccessible(true); + return (Map)field.get(this.ehcacheStateService.getClientMessageTracker()); + } + + } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 3fc79e0287..fb9f0f09e4 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -27,6 +27,7 @@ import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import org.ehcache.clustered.common.Consistency; @@ -129,6 +130,7 @@ class EhcacheActiveEntity implements ActiveServerEntity> inflightInvalidations; private final Management management; + private final AtomicBoolean reconnectComplete = new AtomicBoolean(true); static class InvalidationHolder { final ClientDescriptor clientDescriptorWaitingForInvalidation; @@ -280,6 +282,8 @@ public EhcacheEntityResponse invoke(ClientDescriptor clientDescriptor, EhcacheEn " Check your server configuration and define at least one offheap resource."); } + clearClientTrackedAtReconnectComplete(); + if (message instanceof EhcacheOperationMessage) { EhcacheOperationMessage operationMessage = (EhcacheOperationMessage) message; EhcacheMessageType messageType = operationMessage.getMessageType(); @@ -302,6 +306,17 @@ public EhcacheEntityResponse invoke(ClientDescriptor clientDescriptor, EhcacheEn } } + private void clearClientTrackedAtReconnectComplete() { + + if (!reconnectComplete.get()) { + boolean success = reconnectComplete.compareAndSet(false, true); + if (success) { + ehcacheStateService.getClientMessageTracker().reconcileTrackedClients(trackedClients); + } + } + + } + @Override public void handleReconnect(ClientDescriptor clientDescriptor, byte[] extendedReconnectData) { if (inflightInvalidations == null) { @@ -389,6 +404,7 @@ public void loadExisting() { LOGGER.debug("Preparing for handling Inflight Invalidations and independent Passive Evictions in loadExisting"); inflightInvalidations = new ConcurrentHashMap<>(); addInflightInvalidationsForEventualCaches(); + reconnectComplete.set(false); } private void addInflightInvalidationsForEventualCaches() { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java index cc1fc92fe4..0cfebf71f5 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java @@ -24,7 +24,6 @@ import org.terracotta.entity.EntityServerService; import org.terracotta.entity.ExecutionStrategy; import org.terracotta.entity.MessageCodec; -import org.terracotta.entity.PassiveServerEntity; import org.terracotta.entity.ServiceRegistry; import static org.ehcache.clustered.server.ConcurrencyStrategies.defaultConcurrency; @@ -52,7 +51,7 @@ public EhcacheActiveEntity createActiveEntity(ServiceRegistry registry, byte[] c } @Override - public PassiveServerEntity createPassiveEntity(ServiceRegistry registry, byte[] configuration) { + public EhcachePassiveEntity createPassiveEntity(ServiceRegistry registry, byte[] configuration) { return new EhcachePassiveEntity(registry, configuration, DEFAULT_MAPPER); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java index 425d4fd270..e5e351ad67 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java @@ -21,6 +21,7 @@ import com.tc.classloader.CommonComponent; +import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -90,4 +91,8 @@ public boolean isConfigureApplicable(UUID clientId, long timestamp) { return true; } + public void reconcileTrackedClients(Set trackedClients) { + messageTrackers.entrySet().removeIf(x -> !trackedClients.contains(x)); + } + } From ff3117ccd321632e73712a3ed5390d91f1f1de5d Mon Sep 17 00:00:00 2001 From: Abhilash Date: Mon, 28 Nov 2016 13:24:42 +0530 Subject: [PATCH 168/218] Don't send tracked client during sync #1591 --- .../clustered/server/EhcacheActiveEntity.java | 5 +- .../server/EhcachePassiveEntity.java | 4 +- .../messages/EhcacheStateSyncMessage.java | 9 +-- .../messages/EhcacheSyncMessageCodec.java | 32 +-------- .../server/state/ClientMessageTracker.java | 2 +- .../server/EhcacheActiveEntityTest.java | 1 - .../messages/EhcacheSyncMessageCodecTest.java | 9 +-- .../state/ClientMessageTrackerTest.java | 65 +++++++++++++++++++ 8 files changed, 73 insertions(+), 54 deletions(-) create mode 100644 clustered/server/src/test/java/org/ehcache/clustered/server/state/ClientMessageTrackerTest.java diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index fb9f0f09e4..2d9f8ccd96 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -309,8 +309,7 @@ public EhcacheEntityResponse invoke(ClientDescriptor clientDescriptor, EhcacheEn private void clearClientTrackedAtReconnectComplete() { if (!reconnectComplete.get()) { - boolean success = reconnectComplete.compareAndSet(false, true); - if (success) { + if (reconnectComplete.compareAndSet(false, true)) { ehcacheStateService.getClientMessageTracker().reconcileTrackedClients(trackedClients); } } @@ -380,7 +379,7 @@ public void synchronizeKeyToPassive(PassiveSynchronizationChannel { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index a6ec7eefcf..e20573791e 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -164,7 +164,7 @@ private void untrackHashInvalidationForEventualCache(InvalidationCompleteMessage if (count == 1) { return null; } - return count--; + return count - 1; }); } @@ -175,7 +175,7 @@ private void trackHashInvalidationForEventualCache(ChainReplicationMessage retir if (count == null) { return 1; } else { - return count++; + return count + 1; } }); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheStateSyncMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheStateSyncMessage.java index 605d7b2f9d..d73e5bec77 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheStateSyncMessage.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheStateSyncMessage.java @@ -31,14 +31,11 @@ public class EhcacheStateSyncMessage extends EhcacheSyncMessage implements Seria private final ServerSideConfiguration configuration; private final Map storeConfigs; - private final Set trackedClients; public EhcacheStateSyncMessage(final ServerSideConfiguration configuration, - final Map storeConfigs, - final Set trackedClients) { + final Map storeConfigs) { this.configuration = configuration; this.storeConfigs = storeConfigs; - this.trackedClients = trackedClients; } public ServerSideConfiguration getConfiguration() { @@ -49,10 +46,6 @@ public Map getStoreConfigs() { return storeConfigs; } - public Set getTrackedClients() { - return trackedClients; - } - @Override public SyncMessageType getMessageType() { return SyncMessageType.STATE; diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java index 5de875935a..ee90fa114a 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java @@ -34,17 +34,12 @@ import java.nio.ByteBuffer; import java.util.HashMap; -import java.util.HashSet; import java.util.Map; -import java.util.Set; -import java.util.UUID; import static org.ehcache.clustered.common.internal.messages.ChainCodec.CHAIN_STRUCT; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.CONSISTENCY_ENUM_MAPPING; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.DEFAULT_RESOURCE_FIELD; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.KEY_FIELD; -import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.LSB_UUID_FIELD; -import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSB_UUID_FIELD; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.POOLS_SUB_STRUCT; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.POOL_NAME_FIELD; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.POOL_RESOURCE_NAME_FIELD; @@ -67,7 +62,6 @@ public class EhcacheSyncMessageCodec implements SyncMessageCodec { - uuidEncoder.int64(MSB_UUID_FIELD, uuid.getMostSignificantBits()); - uuidEncoder.int64(LSB_UUID_FIELD, uuid.getLeastSignificantBits()); - }); return encoder.encode().array(); case DATA: encoder = DATA_SYNC_STRUCT.encoder(); @@ -176,8 +160,7 @@ public EhcacheSyncMessage decode(final int concurrencyKey, final byte[] payload) case STATE: ServerSideConfiguration configuration = decodeServerSideConfiguration(decoder); Map storeConfigs = decodeStoreConfigurations(decoder); - Set trackedClients = decodeTrackedClients(decoder); - return new EhcacheStateSyncMessage(configuration, storeConfigs, trackedClients); + return new EhcacheStateSyncMessage(configuration, storeConfigs); case DATA: message.rewind(); decoder = DATA_SYNC_STRUCT.decoder(message); @@ -190,19 +173,6 @@ public EhcacheSyncMessage decode(final int concurrencyKey, final byte[] payload) } } - private Set decodeTrackedClients(StructDecoder decoder) { - Set result = new HashSet<>(); - StructArrayDecoder clientsDecoder = decoder.structs(CLIENTS_SUB_STRUCT); - - if (clientsDecoder != null) { - for (int i = 0; i < clientsDecoder.length(); i++) { - result.add(new UUID(clientsDecoder.int64(MSB_UUID_FIELD), clientsDecoder.int64(LSB_UUID_FIELD))); - clientsDecoder.next(); - } - } - return result; - } - private Map decodeStoreConfigurations(StructDecoder decoder) { Map result = new HashMap<>(); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java index e5e351ad67..afa1513bfe 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/ClientMessageTracker.java @@ -92,7 +92,7 @@ public boolean isConfigureApplicable(UUID clientId, long timestamp) { } public void reconcileTrackedClients(Set trackedClients) { - messageTrackers.entrySet().removeIf(x -> !trackedClients.contains(x)); + messageTrackers.keySet().retainAll(trackedClients); } } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index fdd9fc0266..e7e9e8f57c 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -2647,7 +2647,6 @@ public void testSyncToPassive() throws Exception { Map storeConfigs = capturedSyncMessage.getStoreConfigs(); assertThat(storeConfigs.keySet(), containsInAnyOrder("myCache")); assertThat(storeConfigs.get("myCache").getPoolAllocation(), instanceOf(PoolAllocation.Shared.class)); - assertThat(capturedSyncMessage.getTrackedClients(), containsInAnyOrder(CLIENT_ID)); } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java index 7c44c30c08..f582621bbc 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java @@ -60,19 +60,12 @@ public void testStateSyncMessageEncodeDecode() throws Exception { storeConfigs.put("cache1", serverStoreConfiguration1); storeConfigs.put("cache2", serverStoreConfiguration2); - UUID clientId1 = UUID.randomUUID(); - UUID clientId2 = UUID.randomUUID(); - Set clientIds = new HashSet<>(); - clientIds.add(clientId1); - clientIds.add(clientId2); - - EhcacheStateSyncMessage message = new EhcacheStateSyncMessage(serverSideConfig, storeConfigs, clientIds); + EhcacheStateSyncMessage message = new EhcacheStateSyncMessage(serverSideConfig, storeConfigs); EhcacheSyncMessageCodec codec = new EhcacheSyncMessageCodec(); EhcacheStateSyncMessage decodedMessage = (EhcacheStateSyncMessage) codec.decode(0, codec.encode(0, message)); assertThat(decodedMessage.getConfiguration().getDefaultServerResource(), is("default-pool")); assertThat(decodedMessage.getConfiguration().getResourcePools(), is(sharedPools)); - assertThat(decodedMessage.getTrackedClients(), is(clientIds)); assertThat(decodedMessage.getStoreConfigs().keySet(), containsInAnyOrder("cache1", "cache2")); ServerStoreConfiguration serverStoreConfiguration = decodedMessage.getStoreConfigs().get("cache1"); diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/state/ClientMessageTrackerTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/state/ClientMessageTrackerTest.java new file mode 100644 index 0000000000..2074a209c1 --- /dev/null +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/state/ClientMessageTrackerTest.java @@ -0,0 +1,65 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.server.state; + +import org.junit.Test; + +import java.lang.reflect.Field; +import java.util.Collections; +import java.util.Map; +import java.util.UUID; + +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertThat; + +public class ClientMessageTrackerTest { + + @Test + public void testReconcilationOfClients() throws Exception { + + ClientMessageTracker clientMessageTracker = new ClientMessageTracker(); + UUID clientId = UUID.randomUUID(); + clientMessageTracker.applied(20L, clientId); + + clientMessageTracker.reconcileTrackedClients(Collections.singleton(clientId)); + + Map messageTracker = getMessageTracker(clientMessageTracker); + assertThat(messageTracker.size(), is(1)); + + clientMessageTracker.reconcileTrackedClients(Collections.singleton(UUID.randomUUID())); + + assertThat(messageTracker.size(), is(0)); + + } + + @Test + public void testClientsAreTrackedLazily() throws Exception { + + ClientMessageTracker clientMessageTracker = new ClientMessageTracker(); + Map messageTracker = getMessageTracker(clientMessageTracker); + assertThat(messageTracker.size(), is(0)); + clientMessageTracker.applied(20L, UUID.randomUUID()); + assertThat(messageTracker.size(), is(1)); + + } + + private Map getMessageTracker(ClientMessageTracker clientMessageTracker) throws Exception { + Field field = clientMessageTracker.getClass().getDeclaredField("messageTrackers"); + field.setAccessible(true); + return (Map)field.get(clientMessageTracker); + } +} From 24fe02e0eaa53061391e01754c7943bf06f197a2 Mon Sep 17 00:00:00 2001 From: Chris Bradley Date: Tue, 22 Nov 2016 11:22:46 +0100 Subject: [PATCH 169/218] Small text change --- docs/src/docs/asciidoc/user/serializers-copiers.adoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/docs/asciidoc/user/serializers-copiers.adoc b/docs/src/docs/asciidoc/user/serializers-copiers.adoc index c59d9d40d7..f188361a8f 100644 --- a/docs/src/docs/asciidoc/user/serializers-copiers.adoc +++ b/docs/src/docs/asciidoc/user/serializers-copiers.adoc @@ -9,7 +9,7 @@ ifdef::notBuildingForSite[] include::menu.adoc[] endif::notBuildingForSite[] -While Ehcache is a Java cache, it cannot always store its mapping as Java objects. +While Ehcache is a Java cache, it cannot always store its mappings as Java objects. The <> is capable of storing cached objects either by reference (where the given key and value references are stored) or by value (where a copy of the given key and value are made and those copies are then stored). From a7d27e0d81752eee0884687040ac675b2d6a4a66 Mon Sep 17 00:00:00 2001 From: Chris Bradley Date: Tue, 22 Nov 2016 13:33:29 +0100 Subject: [PATCH 170/218] did a language check --- docs/src/docs/asciidoc/user/usermanaged.adoc | 48 +++++++++++++------- 1 file changed, 32 insertions(+), 16 deletions(-) diff --git a/docs/src/docs/asciidoc/user/usermanaged.adoc b/docs/src/docs/asciidoc/user/usermanaged.adoc index db61e594fa..de5a90e805 100644 --- a/docs/src/docs/asciidoc/user/usermanaged.adoc +++ b/docs/src/docs/asciidoc/user/usermanaged.adoc @@ -25,16 +25,30 @@ Of course, if you find yourself requiring plenty of services, maybe the cache ma === API extensions -While a `UserManagedCache` extends `Cache`, it offers additional methods: +If you use a `UserManagedCache`, you need to configure all required services by hand. +The `UserManagedCache` class extends the `Cache` class by offering additional methods: + +* `init()` - initializes the cache +* `close()` - releases the cache resources +* `getStatus()` - returns a status + +The `init` and `close` methods deal with the lifecycle of the cache and need to be called explicitly, whereas these methods are hidden when the cache is inside a `CacheManager`. + +The interface definition is shown in this code: [source,java,indent=0] ---- include::{sourcedir31}/api/src/main/java/org/ehcache/UserManagedCache.java[lines=17..-1] ---- -As can be seen, these methods deal with the lifecycle of the cache and need to be called explicitly. +=== User Managed Persistent Cache -There is also the following interface which comes into play when a user managed persistent cache is created: +A user managed persistent cache holds cached data in a persistent store such as disk, so that the stored data can outlive the JVM in which your caching application runs. +If you want to create a user managed persistent cache, there is an additional interface `PersistentUserManagedCache` that extends `UserManagedCache` and adds the `destroy` method. +The `destroy` method deletes all data structures, including data stored persistently on disk, for a `PersistentUserManagedCache`. +The `destroy` method deals with the lifecycle of the cache and needs to be called explicitly. + +The interface definition is shown in this code: [source,java,indent=0] ---- @@ -42,19 +56,21 @@ include::{sourcedir31}/api/src/main/java/org/ehcache/PersistentUserManagedCache. ---- -== Getting started with user managed caches +== Code examples for User Managed Caches + +=== Example of a basic cache lifecycle -=== Starting example with lifecycle +Here is a simple example showing a basic lifecycle of a user managed cache: [source,java,indent=0] ---- include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java[tag=userManagedCacheExample] ---- -<1> Create a `UserManagedCache` instance, again you can either have the builder `init()` it for you, passing true, or -<2> pass false and it is up to you to `UserManagedCache.init()` them, prior to using them. +<1> Create a `UserManagedCache` instance. You can either pass "true" to have the builder `init()` it for you, or you can pass "false" and it is up to you to `init()` it prior to using it. +<2> Since "false" was passed in step 1, you have to `init()` the `UserManagedCache` prior to using it. <3> You can use the cache exactly as a managed cache -<4> In the same vein, a `UserManagedCache` requires you to `UserManagedCache.close()` it explicitly. If you also use - managed caches simultaneously, the `CacheManager.close()` operation does not impact the user managed cache(s). +<4> In the same vein, a `UserManagedCache` requires you to close it explicitly using `UserManagedCache.close()`. +If you are also using managed caches simultaneously, the `CacheManager.close()` operation does not impact the user managed cache(s). From this basic example, explore the API of `UserManagedCacheBuilder` to find all the directly available features. @@ -76,17 +92,17 @@ If you want to use a disk persistent cache, you will need to create and lifecycl include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/UserManagedCaches.java[tag=persistentUserManagedCache] ---- <1> Create the persistence service to be used by the cache for storing data on disk -<2> Pass the persistence service to the builder as well as an id for the cache - note that this will make the builder produce a more specific type: `PersistentUserManagedCache` +<2> Pass the persistence service to the builder as well as a name for the cache. Note that this will make the builder produce a more specific type: `PersistentUserManagedCache` <3> As usual, indicate here if the data should outlive the cache -<4> Closing the cache will not delete the data it saved on disk when marked as persistent. -<5> To delete the data, after closing the cache, destroy has to be explicitly invoked. -<6> It is also your responsibility to stop the persistence service once you are done with the cache. +<4> Closing the cache will not delete the data it saved on disk, since the cache is marked as persistent. +<5> To delete the data on disk after closing the cache, you need to invoke the `destroy` method explicitly. +<6> You need to stop the persistence service once you have finished using the cache. === Example with cache event listeners Cache event listeners require executor services to work. You will have to provide either a `CacheEventDispatcher` implementation -or make use of the default one by providing two executor services: one for ordered events and one for un-ordered ones. +or make use of the default one by providing two executor services: one for ordered events and one for unordered ones. NOTE: The ordered events executor must be single threaded to guarantee ordering. @@ -96,5 +112,5 @@ For more information on cache event listeners, see < Provide ExecutorService for ordered and unordered events delivery. -<2> Provide listener configuration using CacheEventListenerConfigurationBuilder. +<1> Provide the `ExecutorService` for ordered and unordered event delivery. +<2> Provide listener configuration using `CacheEventListenerConfigurationBuilder`. From b749d931fde80bc54b61ea35c0d0e1ffae20c758 Mon Sep 17 00:00:00 2001 From: Henri Tremblay Date: Wed, 30 Nov 2016 12:50:55 -0500 Subject: [PATCH 171/218] Use latest offheap-store. Faster allocation. Close #1451 --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 238b6def0d..225df4c58c 100644 --- a/build.gradle +++ b/build.gradle @@ -21,7 +21,7 @@ ext { baseVersion = findProperty('overrideVersion') ?: '3.2.0-SNAPSHOT' // Third parties - offheapVersion = '2.3.1' + offheapVersion = '2.3.2' statisticVersion = '1.4.1' jcacheVersion = '1.0.0' slf4jVersion = '1.7.7' From 1d4f1e21386f8695b4e7e94120a3563111d342c8 Mon Sep 17 00:00:00 2001 From: Henri Tremblay Date: Wed, 30 Nov 2016 21:56:53 -0500 Subject: [PATCH 172/218] Fix class name typo --- ...sicClusteredCacheOpsReplicationWithMultipleClientsTest.java} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename clustered/integration-test/src/test/java/org/ehcache/clustered/replication/{BasicClusteredCacheOpsReplicationWithMulitpleClientsTest.java => BasicClusteredCacheOpsReplicationWithMultipleClientsTest.java} (99%) diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMulitpleClientsTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMultipleClientsTest.java similarity index 99% rename from clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMulitpleClientsTest.java rename to clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMultipleClientsTest.java index b6a358017d..1a0f0fbee3 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMulitpleClientsTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMultipleClientsTest.java @@ -62,7 +62,7 @@ * The point of this test is to assert proper data read after fail-over handling. */ @RunWith(Parameterized.class) -public class BasicClusteredCacheOpsReplicationWithMulitpleClientsTest { +public class BasicClusteredCacheOpsReplicationWithMultipleClientsTest { private static final String RESOURCE_CONFIG = "" From 387165c2aff48a9b23764eb32944070069a76e53 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Thu, 1 Dec 2016 14:51:18 +0100 Subject: [PATCH 173/218] :construction: #1662 Add logging configuration --- .../src/test/resources/simplelogger.properties | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 clustered/integration-test/src/test/resources/simplelogger.properties diff --git a/clustered/integration-test/src/test/resources/simplelogger.properties b/clustered/integration-test/src/test/resources/simplelogger.properties new file mode 100644 index 0000000000..f6142f6c45 --- /dev/null +++ b/clustered/integration-test/src/test/resources/simplelogger.properties @@ -0,0 +1,18 @@ +# +# Copyright Terracotta, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +org.slf4j.simpleLogger.showDateTime=true +org.slf4j.simpleLogger.dateTimeFormat=yyyy-MM-dd HH:mm:ss.SSS From 0275b00a0452e680a6602dd6e26baf3cca1092dc Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Mon, 28 Nov 2016 10:45:46 +0100 Subject: [PATCH 174/218] :construction: Fixes #1662 Update to offheap-resource as an ExtendedConfiguration --- build.gradle | 10 +-- .../internal/UnitTestConnectionService.java | 3 +- ...usteredStateRepositoryReplicationTest.java | 4 +- .../ActivePassiveClientIdTest.java | 11 +--- ...leMessageActivePassvieReplicationTest.java | 12 ++-- .../clustered/BasicClusteredCacheOpsTest.java | 4 +- .../clustered/BasicEntityInteractionTest.java | 10 ++- ...anagerLifecycleEhcacheIntegrationTest.java | 4 +- ...cheClientEntityFactoryIntegrationTest.java | 4 +- .../clustered/JCacheClusteredTest.java | 4 +- .../clustered/TerminatedServerTest.java | 4 +- .../AbstractClusteringManagementTest.java | 4 +- .../EhcacheConfigWithManagementTest.java | 4 +- ...dCacheOpsReplicationMultiThreadedTest.java | 4 +- ...BasicClusteredCacheOpsReplicationTest.java | 4 +- ...OpsReplicationWithMultipleClientsTest.java | 4 +- .../BasicLifeCyclePassiveReplicationTest.java | 4 +- .../clustered/sync/PassiveSyncTest.java | 4 +- .../clustered/server/EhcacheActiveEntity.java | 14 +---- .../server/EhcachePassiveEntity.java | 14 +---- .../server/EhcacheStateServiceImpl.java | 21 +++---- .../state/EhcacheStateServiceProvider.java | 27 +++++++- .../config/EhcacheStateServiceConfig.java | 9 +-- .../server/EhcacheActiveEntityTest.java | 38 ++++------- .../server/EhcachePassiveEntityTest.java | 27 ++++---- .../EhcacheStateServiceProviderTest.java | 63 +++++++++++++++---- 26 files changed, 164 insertions(+), 147 deletions(-) diff --git a/build.gradle b/build.gradle index 225df4c58c..fe34a46eb4 100644 --- a/build.gradle +++ b/build.gradle @@ -28,15 +28,15 @@ ext { sizeofVersion = '0.3.0' // Clustered - terracottaPlatformVersion = '5.0.12.beta2' + terracottaPlatformVersion = '5.0.13.beta' managementVersion = terracottaPlatformVersion - terracottaApisVersion = '1.0.12.beta' - terracottaCoreVersion = '5.0.12-beta' + terracottaApisVersion = '1.0.13.beta' + terracottaCoreVersion = '5.0.13-beta' offheapResourceVersion = terracottaPlatformVersion entityApiVersion = terracottaApisVersion - terracottaPassthroughTestingVersion = '1.0.12.beta' + terracottaPassthroughTestingVersion = '1.0.13.beta' entityTestLibVersion = terracottaPassthroughTestingVersion - galvanVersion = '1.0.12-beta2' + galvanVersion = '1.0.13-beta' // Tools findbugsVersion = '3.0.1' diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java index 6a9476793b..4ea7daa922 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java @@ -55,7 +55,6 @@ import org.terracotta.entity.ServiceProviderConfiguration; import org.terracotta.exception.EntityNotFoundException; import org.terracotta.exception.EntityNotProvidedException; -import org.terracotta.offheapresource.OffHeapResourcesConfiguration; import org.terracotta.offheapresource.OffHeapResourcesProvider; import org.terracotta.offheapresource.config.MemoryUnit; import org.terracotta.offheapresource.config.OffheapResourcesType; @@ -358,7 +357,7 @@ public PassthroughServer build() { } if (!this.resources.getResource().isEmpty()) { - newServer.registerServiceProvider(new OffHeapResourcesProvider(), new OffHeapResourcesConfiguration(this.resources)); + newServer.registerExtendedConfiguration(new OffHeapResourcesProvider(this.resources)); } for (Map.Entry entry : serviceProviders.entrySet()) { diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepositoryReplicationTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepositoryReplicationTest.java index 6f904bfd3e..a13b0534e0 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepositoryReplicationTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/service/ClusteredStateRepositoryReplicationTest.java @@ -29,7 +29,6 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; -import org.terracotta.offheapresource.OffHeapResourcesConfiguration; import org.terracotta.offheapresource.OffHeapResourcesProvider; import org.terracotta.offheapresource.config.MemoryUnit; import org.terracotta.passthrough.PassthroughClusterControl; @@ -59,8 +58,7 @@ public void registerServicesForServer(PassthroughServer server) { server.registerClientEntityService(new EhcacheClientEntityService()); server.registerServerEntityService(new VoltronReadWriteLockServerEntityService()); server.registerClientEntityService(new VoltronReadWriteLockEntityClientService()); - server.registerServiceProvider(new OffHeapResourcesProvider(), - new OffHeapResourcesConfiguration(getOffheapResourcesType("test", 32, MemoryUnit.MB))); + server.registerExtendedConfiguration(new OffHeapResourcesProvider(getOffheapResourcesType("test", 32, MemoryUnit.MB))); UnitTestConnectionService.addServerToStripe(STRIPENAME, server); } diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/replication/ActivePassiveClientIdTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/replication/ActivePassiveClientIdTest.java index c30dd9ceff..cd48e0a5c4 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/replication/ActivePassiveClientIdTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/replication/ActivePassiveClientIdTest.java @@ -24,19 +24,12 @@ import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLockEntityClientService; import org.ehcache.clustered.client.internal.service.ClusteringServiceFactory; import org.ehcache.clustered.client.service.ClusteringService; -import org.ehcache.clustered.common.Consistency; import org.ehcache.clustered.lock.server.VoltronReadWriteLockServerEntityService; import org.ehcache.clustered.server.ObservableEhcacheServerEntityService; import org.ehcache.clustered.server.ObservableEhcacheServerEntityService.ObservableEhcachePassiveEntity; -import org.ehcache.config.EvictionAdvisor; -import org.ehcache.config.ResourcePools; -import org.ehcache.core.spi.store.Store; -import org.ehcache.expiry.Expiry; -import org.ehcache.spi.serialization.Serializer; import org.junit.After; import org.junit.Before; import org.junit.Test; -import org.terracotta.offheapresource.OffHeapResourcesConfiguration; import org.terracotta.offheapresource.OffHeapResourcesProvider; import org.terracotta.offheapresource.config.MemoryUnit; import org.terracotta.passthrough.PassthroughClusterControl; @@ -51,7 +44,6 @@ import static org.ehcache.clustered.client.replication.ReplicationUtil.getServerStoreConfiguration; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; public class ActivePassiveClientIdTest { @@ -69,8 +61,7 @@ public void setUp() throws Exception { server.registerClientEntityService(new EhcacheClientEntityService()); server.registerServerEntityService(new VoltronReadWriteLockServerEntityService()); server.registerClientEntityService(new VoltronReadWriteLockEntityClientService()); - server.registerServiceProvider(new OffHeapResourcesProvider(), - new OffHeapResourcesConfiguration(getOffheapResourcesType("test", 32, MemoryUnit.MB))); + server.registerExtendedConfiguration(new OffHeapResourcesProvider(getOffheapResourcesType("test", 32, MemoryUnit.MB))); UnitTestConnectionService.addServerToStripe(STRIPENAME, server); } diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/replication/LifeCycleMessageActivePassvieReplicationTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/replication/LifeCycleMessageActivePassvieReplicationTest.java index e2a6cbaa91..bdd0c270d3 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/replication/LifeCycleMessageActivePassvieReplicationTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/replication/LifeCycleMessageActivePassvieReplicationTest.java @@ -34,7 +34,6 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; -import org.terracotta.offheapresource.OffHeapResourcesConfiguration; import org.terracotta.offheapresource.OffHeapResourcesProvider; import org.terracotta.offheapresource.config.MemoryUnit; import org.terracotta.passthrough.PassthroughClusterControl; @@ -60,12 +59,11 @@ public class LifeCycleMessageActivePassvieReplicationTest { public void setUp() throws Exception { this.clusterControl = PassthroughTestHelpers.createActivePassive(STRIPENAME, server -> { - server.registerServerEntityService(new EhcacheServerEntityService()); - server.registerClientEntityService(new EhcacheClientEntityService()); - server.registerServerEntityService(new VoltronReadWriteLockServerEntityService()); - server.registerClientEntityService(new VoltronReadWriteLockEntityClientService()); - server.registerServiceProvider(new OffHeapResourcesProvider(), - new OffHeapResourcesConfiguration(getOffheapResourcesType("test", 32, MemoryUnit.MB))); + server.registerServerEntityService(new EhcacheServerEntityService()); + server.registerClientEntityService(new EhcacheClientEntityService()); + server.registerServerEntityService(new VoltronReadWriteLockServerEntityService()); + server.registerClientEntityService(new VoltronReadWriteLockEntityClientService()); + server.registerExtendedConfiguration(new OffHeapResourcesProvider(getOffheapResourcesType("test", 32, MemoryUnit.MB))); UnitTestConnectionService.addServerToStripe(STRIPENAME, server); } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java index 837925a296..2cffd52291 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicClusteredCacheOpsTest.java @@ -53,11 +53,11 @@ public class BasicClusteredCacheOpsTest { private static final String RESOURCE_CONFIG = - "" + "" + "" + "64" + "" + - "\n"; + "\n"; @ClassRule public static Cluster CLUSTER = diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicEntityInteractionTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicEntityInteractionTest.java index dccd6ee58f..8ec25861a6 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicEntityInteractionTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/BasicEntityInteractionTest.java @@ -16,6 +16,7 @@ package org.ehcache.clustered; import java.io.File; +import java.util.Collections; import java.util.UUID; import org.ehcache.clustered.client.internal.EhcacheClientEntity; import org.junit.BeforeClass; @@ -35,8 +36,15 @@ public class BasicEntityInteractionTest { + private static final String RESOURCE_CONFIG = + "" + + "" + + "4" + + "" + + "\n"; + @ClassRule - public static Cluster CLUSTER = new BasicExternalCluster(new File("build/cluster"), 1); + public static Cluster CLUSTER = new BasicExternalCluster(new File("build/cluster"), 1, Collections.emptyList(), "", RESOURCE_CONFIG, ""); @BeforeClass public static void waitForActive() throws Exception { diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/CacheManagerLifecycleEhcacheIntegrationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/CacheManagerLifecycleEhcacheIntegrationTest.java index 0cd052ba89..ee1d81f9b1 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/CacheManagerLifecycleEhcacheIntegrationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/CacheManagerLifecycleEhcacheIntegrationTest.java @@ -60,11 +60,11 @@ public class CacheManagerLifecycleEhcacheIntegrationTest { private static final String RESOURCE_CONFIG = - "" + "" + "" + "64" + "" + - "\n"; + "\n"; @ClassRule public static Cluster CLUSTER = new BasicExternalCluster(new File("build/cluster"), 1, Collections.emptyList(), "", RESOURCE_CONFIG, ""); diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java index ad45ab7d26..733a9ece8d 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/EhcacheClientEntityFactoryIntegrationTest.java @@ -46,11 +46,11 @@ public class EhcacheClientEntityFactoryIntegrationTest { private static final Map EMPTY_RESOURCE_MAP = Collections.emptyMap(); private static final String RESOURCE_CONFIG = - "" + "" + "" + "64" + "" + - "\n"; + "\n"; @ClassRule public static Cluster CLUSTER = diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/JCacheClusteredTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/JCacheClusteredTest.java index 16c069e811..7b8eab483f 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/JCacheClusteredTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/JCacheClusteredTest.java @@ -39,11 +39,11 @@ public class JCacheClusteredTest { private static final String RESOURCE_CONFIG = - "" + "" + "" + "64" + "" + - "\n"; + "\n"; @ClassRule public static Cluster CLUSTER = new BasicExternalCluster(new File("build/cluster"), 1, Collections.emptyList(), "", RESOURCE_CONFIG, ""); diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java index 09c9f21467..0eea41355b 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/TerminatedServerTest.java @@ -124,11 +124,11 @@ public static void setConcurrency() { } private static final String RESOURCE_CONFIG = - "" + "" + "" + "64" + "" + - "\n"; + "\n"; private static Map OLD_PROPERTIES; diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java index 29e4949058..872a46e467 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java @@ -78,12 +78,12 @@ public abstract class AbstractClusteringManagementTest { private static final String RESOURCE_CONFIG = - "" + "" + "" + "64" + "64" + "" + - "\n"; + "\n"; protected static CacheManager cacheManager; protected static ClientIdentifier ehcacheClientIdentifier; diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheConfigWithManagementTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheConfigWithManagementTest.java index 3ceb0111fd..40a31887ac 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheConfigWithManagementTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/EhcacheConfigWithManagementTest.java @@ -43,12 +43,12 @@ public class EhcacheConfigWithManagementTest { private static final String RESOURCE_CONFIG = - "" + "" + "" + "64" + "64" + "" + - "\n"; + "\n"; private static final List MANAGEMENT_PLUGINS = Stream.of(System.getProperty("managementPlugins", "").split(File.pathSeparator)) .map(File::new) diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java index 885389e71d..a7cd8dca36 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationMultiThreadedTest.java @@ -73,11 +73,11 @@ public class BasicClusteredCacheOpsReplicationMultiThreadedTest { private static final int NUM_OF_THREADS = 10; private static final int JOB_SIZE = 100; private static final String RESOURCE_CONFIG = - "" + "" + "" + "16" + "" + - "\n"; + "\n"; private static PersistentCacheManager CACHE_MANAGER1; private static PersistentCacheManager CACHE_MANAGER2; diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java index d913daa0ea..8002f649b2 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationTest.java @@ -58,11 +58,11 @@ public class BasicClusteredCacheOpsReplicationTest { private static final String RESOURCE_CONFIG = - "" + "" + "" + "16" + "" + - "\n"; + "\n"; private static PersistentCacheManager CACHE_MANAGER; private static Cache CACHE1; diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMultipleClientsTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMultipleClientsTest.java index 1a0f0fbee3..d7f58afae2 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMultipleClientsTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicClusteredCacheOpsReplicationWithMultipleClientsTest.java @@ -65,11 +65,11 @@ public class BasicClusteredCacheOpsReplicationWithMultipleClientsTest { private static final String RESOURCE_CONFIG = - "" + "" + "" + "16" + "" + - "\n"; + "\n"; private static PersistentCacheManager CACHE_MANAGER1; private static PersistentCacheManager CACHE_MANAGER2; diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java index 17199bbc38..0cf67d8d48 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java @@ -56,11 +56,11 @@ public class BasicLifeCyclePassiveReplicationTest { private static final String RESOURCE_CONFIG = - "" + "" + "" + "16" + "" + - "\n"; + "\n"; @ClassRule public static Cluster CLUSTER = diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java index 14ae43ee24..a6da1f2a9a 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/sync/PassiveSyncTest.java @@ -48,11 +48,11 @@ public class PassiveSyncTest { private static final String RESOURCE_CONFIG = - "" + "" + "" + "16" + "" + - "\n"; + "\n"; @ClassRule public static Cluster CLUSTER = diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 2d9f8ccd96..d5683faea9 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -106,7 +106,6 @@ class EhcacheActiveEntity implements ActiveServerEntity offHeapResourceIdentifiers; /** * Tracks the state of a connected client. An entry is added to this map when the @@ -173,13 +172,7 @@ public Class getServiceType() { this.identity = ClusteredEhcacheIdentity.deserialize(config); this.responseFactory = new EhcacheEntityResponseFactory(); this.clientCommunicator = services.getService(new CommunicatorServiceConfiguration()); - OffHeapResources offHeapResources = services.getService(new OffHeapResourcesServiceConfiguration()); - if (offHeapResources == null) { - this.offHeapResourceIdentifiers = Collections.emptySet(); - } else { - this.offHeapResourceIdentifiers = offHeapResources.getAllIdentifiers(); - } - ehcacheStateService = services.getService(new EhcacheStateServiceConfig(services, this.offHeapResourceIdentifiers, mapper)); + ehcacheStateService = services.getService(new EhcacheStateServiceConfig(services, mapper)); if (ehcacheStateService == null) { throw new AssertionError("Server failed to retrieve EhcacheStateService."); } @@ -277,11 +270,6 @@ public void disconnected(ClientDescriptor clientDescriptor) { @Override public EhcacheEntityResponse invoke(ClientDescriptor clientDescriptor, EhcacheEntityMessage message) { try { - if (this.offHeapResourceIdentifiers.isEmpty()) { - throw new ServerMisconfigurationException("Server started without any offheap resources defined." + - " Check your server configuration and define at least one offheap resource."); - } - clearClientTrackedAtReconnectComplete(); if (message instanceof EhcacheOperationMessage) { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index e20573791e..68e7616db0 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -66,7 +66,6 @@ class EhcachePassiveEntity implements PassiveServerEntity offHeapResourceIdentifiers; private final EhcacheStateService ehcacheStateService; private final Management management; @@ -74,11 +73,6 @@ class EhcachePassiveEntity implements PassiveServerEntity(OffHeapResources.class)); - if (offHeapResources == null) { - this.offHeapResourceIdentifiers = Collections.emptySet(); - } else { - this.offHeapResourceIdentifiers = offHeapResources.getAllIdentifiers(); - } - ehcacheStateService = services.getService(new EhcacheStateServiceConfig(services, this.offHeapResourceIdentifiers, mapper)); + ehcacheStateService = services.getService(new EhcacheStateServiceConfig(services, mapper)); if (ehcacheStateService == null) { throw new AssertionError("Server failed to retrieve EhcacheStateService."); } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java index ffc15c3035..ab2ed61e55 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java @@ -35,9 +35,9 @@ import org.ehcache.clustered.common.internal.exceptions.LifecycleException; import org.ehcache.clustered.common.internal.exceptions.ResourceConfigurationException; import org.terracotta.context.TreeNode; -import org.terracotta.entity.ServiceRegistry; import org.terracotta.offheapresource.OffHeapResource; import org.terracotta.offheapresource.OffHeapResourceIdentifier; +import org.terracotta.offheapresource.OffHeapResources; import org.terracotta.offheapstore.paging.PageSource; import org.terracotta.statistics.StatisticsManager; @@ -53,6 +53,7 @@ import java.util.function.Function; import static java.util.stream.Collectors.toMap; +import static org.terracotta.offheapresource.OffHeapResourceIdentifier.identifier; public class EhcacheStateServiceImpl implements EhcacheStateService { @@ -84,8 +85,7 @@ public class EhcacheStateServiceImpl implements EhcacheStateService { STAT_POOL_METHOD_REFERENCES.put("allocatedSize", ResourcePageSource::getAllocatedSize); } - private final ServiceRegistry services; - private final Set offHeapResourceIdentifiers; + private final OffHeapResources offHeapResources; private volatile boolean configured = false; /** @@ -118,9 +118,8 @@ public class EhcacheStateServiceImpl implements EhcacheStateService { private final KeySegmentMapper mapper; - public EhcacheStateServiceImpl(ServiceRegistry services, Set offHeapResourceIdentifiers, final KeySegmentMapper mapper) { - this.services = services; - this.offHeapResourceIdentifiers = offHeapResourceIdentifiers; + public EhcacheStateServiceImpl(OffHeapResources offHeapResources, final KeySegmentMapper mapper) { + this.offHeapResources = offHeapResources; this.mapper = mapper; this.stateRepositoryManager = new StateRepositoryManager(); } @@ -227,9 +226,9 @@ public void configure(ServerSideConfiguration configuration) throws ClusterExcep this.defaultServerResource = configuration.getDefaultServerResource(); if (this.defaultServerResource != null) { - if (!offHeapResourceIdentifiers.contains(this.defaultServerResource)) { + if (!offHeapResources.getAllIdentifiers().contains(identifier(this.defaultServerResource))) { throw new ResourceConfigurationException("Default server resource '" + this.defaultServerResource - + "' is not defined. Available resources are: " + offHeapResourceIdentifiers); + + "' is not defined. Available resources are: " + offHeapResources.getAllIdentifiers()); } } @@ -270,10 +269,10 @@ private Map createPools(Map resou private void releasePool(String poolType, String poolName, ResourcePageSource resourcePageSource) { ServerSideConfiguration.Pool pool = resourcePageSource.getPool(); - OffHeapResource source = services.getService(OffHeapResourceIdentifier.identifier(pool.getServerResource())); + OffHeapResource source = offHeapResources.getOffHeapResource(identifier(pool.getServerResource())); if (source != null) { unRegisterPoolStatistics(resourcePageSource); source.release(pool.getSize()); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java index c0b63d5d0b..5ff4f636e9 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateServiceProvider.java @@ -18,16 +18,20 @@ import org.ehcache.clustered.server.EhcacheStateServiceImpl; import org.ehcache.clustered.server.state.config.EhcacheStateServiceConfig; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.terracotta.entity.PlatformConfiguration; import org.terracotta.entity.ServiceConfiguration; import org.terracotta.entity.ServiceProvider; import org.terracotta.entity.ServiceProviderCleanupException; import org.terracotta.entity.ServiceProviderConfiguration; +import org.terracotta.offheapresource.OffHeapResources; import com.tc.classloader.BuiltinService; import java.util.ArrayList; import java.util.Collection; +import java.util.Iterator; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -38,19 +42,40 @@ @BuiltinService public class EhcacheStateServiceProvider implements ServiceProvider { + private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheStateServiceProvider.class); + private ConcurrentMap serviceMap = new ConcurrentHashMap<>(); + private OffHeapResources offHeapResourcesProvider; @Override public boolean initialize(ServiceProviderConfiguration configuration, PlatformConfiguration platformConfiguration) { + Collection extendedConfiguration = platformConfiguration.getExtendedConfiguration(OffHeapResources.class); + if (extendedConfiguration.size() > 1) { + throw new UnsupportedOperationException("There are " + extendedConfiguration.size() + " OffHeapResourcesProvider, this is not supported. " + + "There must be only one!"); + } + Iterator iterator = extendedConfiguration.iterator(); + if (iterator.hasNext()) { + offHeapResourcesProvider = iterator.next(); + if (offHeapResourcesProvider.getAllIdentifiers().isEmpty()) { + throw new UnsupportedOperationException("There are no offheap-resource defined, this is not supported. There must be at least one!"); + } + } else { + LOGGER.warn("No offheap-resource defined - this will prevent provider from offering any EhcacheStateService."); + } return true; } @Override public T getService(long consumerID, ServiceConfiguration configuration) { if (configuration != null && configuration.getServiceType().equals(EhcacheStateService.class)) { + if (offHeapResourcesProvider == null) { + LOGGER.warn("EhcacheStateService requested but no offheap-resource was defined - returning null"); + return null; + } EhcacheStateServiceConfig stateServiceConfig = (EhcacheStateServiceConfig) configuration; EhcacheStateService storeManagerService = new EhcacheStateServiceImpl( - stateServiceConfig.getServiceRegistry(), stateServiceConfig.getOffHeapResourceIdentifiers(), stateServiceConfig.getMapper()); + offHeapResourcesProvider, stateServiceConfig.getMapper()); EhcacheStateService result = serviceMap.putIfAbsent(consumerID, storeManagerService); if (result == null) { result = storeManagerService; diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStateServiceConfig.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStateServiceConfig.java index fef84320a9..c3a36acd30 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStateServiceConfig.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/config/EhcacheStateServiceConfig.java @@ -29,14 +29,11 @@ public class EhcacheStateServiceConfig implements ServiceConfiguration { private final ServiceRegistry serviceRegistry; - private final Set offHeapResourceIdentifiers; private final KeySegmentMapper mapper; - public EhcacheStateServiceConfig(ServiceRegistry serviceRegistry, Set offHeapResourceIdentifiers, - final KeySegmentMapper mapper) { + public EhcacheStateServiceConfig(ServiceRegistry serviceRegistry, final KeySegmentMapper mapper) { this.serviceRegistry = serviceRegistry; - this.offHeapResourceIdentifiers = offHeapResourceIdentifiers; this.mapper = mapper; } @@ -49,10 +46,6 @@ public ServiceRegistry getServiceRegistry() { return this.serviceRegistry; } - public Set getOffHeapResourceIdentifiers() { - return this.offHeapResourceIdentifiers; - } - public KeySegmentMapper getMapper() { return mapper; } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index e7e9e8f57c..372d88b958 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -236,20 +236,6 @@ public void testDisconnectedSecond() throws Exception { assertThat(activeEntity.getInUseStores().isEmpty(), is(true)); } - @Test - public void testInteractionWithServerWithoutResources() throws Exception { - OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); - EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); - ClientDescriptor client = new TestClientDescriptor(); - activeEntity.connected(client); - - String expectedErrorMessage = "Server started without any offheap resources defined."; - assertFailure( - activeEntity.invoke(client, mock(EhcacheEntityMessage.class)), - ServerMisconfigurationException.class, expectedErrorMessage - ); - } - /** * Ensures basic shared resource pool configuration. */ @@ -3153,24 +3139,24 @@ private static Set getIdentifiers(Set pools) @SuppressWarnings("unchecked") @Override public T getService(ServiceConfiguration serviceConfiguration) { - if (serviceConfiguration instanceof OffHeapResourceIdentifier) { - final OffHeapResourceIdentifier resourceIdentifier = (OffHeapResourceIdentifier) serviceConfiguration; - return (T) this.pools.get(resourceIdentifier); - } else if (serviceConfiguration.getServiceType().equals(ClientCommunicator.class)) { + if (serviceConfiguration.getServiceType().equals(ClientCommunicator.class)) { if (this.clientCommunicator == null) { this.clientCommunicator = mock(ClientCommunicator.class); } return (T) this.clientCommunicator; - } else if(serviceConfiguration.getServiceType().equals(OffHeapResources.class)) { - return (T) new OffHeapResources() { - @Override - public Set getAllIdentifiers() { - return getIdentifiers(pools.keySet()); - } - }; } else if (serviceConfiguration.getServiceType().equals(EhcacheStateService.class)) { if (storeManagerService == null) { - this.storeManagerService = new EhcacheStateServiceImpl(this, getIdentifiers(pools.keySet()), DEFAULT_MAPPER); + this.storeManagerService = new EhcacheStateServiceImpl(new OffHeapResources() { + @Override + public Set getAllIdentifiers() { + return pools.keySet(); + } + + @Override + public OffHeapResource getOffHeapResource(OffHeapResourceIdentifier identifier) { + return pools.get(identifier); + } + }, DEFAULT_MAPPER); } return (T) (this.storeManagerService); } else if (serviceConfiguration.getServiceType().equals(IEntityMessenger.class)) { diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java index 92cea74fe8..b2319ae035 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java @@ -37,6 +37,8 @@ import org.terracotta.offheapresource.OffHeapResource; import org.terracotta.offheapresource.OffHeapResourceIdentifier; import org.terracotta.offheapresource.OffHeapResources; +import org.terracotta.offheapresource.OffHeapResourcesProvider; +import org.terracotta.offheapresource.config.OffheapResourcesType; import org.terracotta.offheapstore.util.MemoryUnit; import java.util.Collections; @@ -55,6 +57,7 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class EhcachePassiveEntityTest { @@ -670,19 +673,19 @@ private static Set getIdentifiers(Set pools) @SuppressWarnings("unchecked") @Override public T getService(ServiceConfiguration serviceConfiguration) { - if (serviceConfiguration instanceof OffHeapResourceIdentifier) { - final OffHeapResourceIdentifier resourceIdentifier = (OffHeapResourceIdentifier) serviceConfiguration; - return (T) this.pools.get(resourceIdentifier); - } else if(serviceConfiguration.getServiceType().equals(OffHeapResources.class)) { - return (T) new OffHeapResources() { - @Override - public Set getAllIdentifiers() { - return getIdentifiers(pools.keySet()); - } - }; - } else if (serviceConfiguration.getServiceType().equals(EhcacheStateService.class)) { + if (serviceConfiguration.getServiceType().equals(EhcacheStateService.class)) { if (storeManagerService == null) { - this.storeManagerService = new EhcacheStateServiceImpl(this, getIdentifiers(pools.keySet()), DEFAULT_MAPPER); + this.storeManagerService = new EhcacheStateServiceImpl(new OffHeapResources() { + @Override + public Set getAllIdentifiers() { + return pools.keySet(); + } + + @Override + public OffHeapResource getOffHeapResource(OffHeapResourceIdentifier identifier) { + return pools.get(identifier); + } + }, DEFAULT_MAPPER); } return (T) (this.storeManagerService); } else if (serviceConfiguration.getServiceType().equals(IEntityMessenger.class)) { diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java index 1e35b83b46..8a49f88c54 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/state/EhcacheStateServiceProviderTest.java @@ -18,9 +18,20 @@ import org.ehcache.clustered.server.KeySegmentMapper; import org.ehcache.clustered.server.state.config.EhcacheStateServiceConfig; +import org.junit.Before; import org.junit.Test; +import org.terracotta.entity.PlatformConfiguration; import org.terracotta.entity.ServiceProviderCleanupException; import org.terracotta.entity.ServiceProviderConfiguration; +import org.terracotta.offheapresource.OffHeapResources; +import org.terracotta.offheapresource.OffHeapResourcesProvider; +import org.terracotta.offheapresource.config.MemoryUnit; +import org.terracotta.offheapresource.config.OffheapResourcesType; +import org.terracotta.offheapresource.config.ResourceType; + +import java.math.BigInteger; +import java.util.Collection; +import java.util.Collections; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotSame; @@ -32,28 +43,57 @@ public class EhcacheStateServiceProviderTest { private static final KeySegmentMapper DEFAULT_MAPPER = new KeySegmentMapper(16); + private PlatformConfiguration platformConfiguration; + private ServiceProviderConfiguration serviceProviderConfiguration; + + @Before + public void setUp() { + ResourceType resource = new ResourceType(); + resource.setName("primary"); + resource.setUnit(MemoryUnit.MB); + resource.setValue(BigInteger.valueOf(4L)); + OffheapResourcesType configuration = new OffheapResourcesType(); + configuration.getResource().add(resource); + OffHeapResources offheapResources = new OffHeapResourcesProvider(configuration); + + platformConfiguration = new PlatformConfiguration() { + @Override + public String getServerName() { + return "Server1"; + } + + @Override + public Collection getExtendedConfiguration(Class type) { + if (OffHeapResources.class.isAssignableFrom(type)) { + return Collections.singletonList(type.cast(offheapResources)); + } + throw new UnsupportedOperationException("TODO Implement me!"); + } + }; + + serviceProviderConfiguration = mock(ServiceProviderConfiguration.class); + } + @Test public void testInitialize() { EhcacheStateServiceProvider serviceProvider = new EhcacheStateServiceProvider(); - - ServiceProviderConfiguration serviceProviderConfiguration = mock(ServiceProviderConfiguration.class); - - assertTrue(serviceProvider.initialize(serviceProviderConfiguration, null)); + assertTrue(serviceProvider.initialize(serviceProviderConfiguration, platformConfiguration)); } @Test public void testGetService() { EhcacheStateServiceProvider serviceProvider = new EhcacheStateServiceProvider(); + serviceProvider.initialize(serviceProviderConfiguration, platformConfiguration); - EhcacheStateService ehcacheStateService = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, null, DEFAULT_MAPPER)); + EhcacheStateService ehcacheStateService = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, DEFAULT_MAPPER)); assertNotNull(ehcacheStateService); - EhcacheStateService sameStateService = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, null, DEFAULT_MAPPER)); + EhcacheStateService sameStateService = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, DEFAULT_MAPPER)); assertSame(ehcacheStateService, sameStateService); - EhcacheStateService anotherStateService = serviceProvider.getService(2L, new EhcacheStateServiceConfig(null, null, DEFAULT_MAPPER)); + EhcacheStateService anotherStateService = serviceProvider.getService(2L, new EhcacheStateServiceConfig(null, DEFAULT_MAPPER)); assertNotNull(anotherStateService); assertNotSame(ehcacheStateService, anotherStateService); @@ -63,14 +103,15 @@ public void testGetService() { @Test public void testClear() throws ServiceProviderCleanupException { EhcacheStateServiceProvider serviceProvider = new EhcacheStateServiceProvider(); + serviceProvider.initialize(serviceProviderConfiguration, platformConfiguration); - EhcacheStateService ehcacheStateService = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, null, DEFAULT_MAPPER)); - EhcacheStateService anotherStateService = serviceProvider.getService(2L, new EhcacheStateServiceConfig(null, null, DEFAULT_MAPPER)); + EhcacheStateService ehcacheStateService = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, DEFAULT_MAPPER)); + EhcacheStateService anotherStateService = serviceProvider.getService(2L, new EhcacheStateServiceConfig(null, DEFAULT_MAPPER)); serviceProvider.prepareForSynchronization(); - EhcacheStateService ehcacheStateServiceAfterClear = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, null, DEFAULT_MAPPER)); - EhcacheStateService anotherStateServiceAfterClear = serviceProvider.getService(2L, new EhcacheStateServiceConfig(null, null, DEFAULT_MAPPER)); + EhcacheStateService ehcacheStateServiceAfterClear = serviceProvider.getService(1L, new EhcacheStateServiceConfig(null, DEFAULT_MAPPER)); + EhcacheStateService anotherStateServiceAfterClear = serviceProvider.getService(2L, new EhcacheStateServiceConfig(null, DEFAULT_MAPPER)); assertNotSame(ehcacheStateService, ehcacheStateServiceAfterClear); assertNotSame(anotherStateService, anotherStateServiceAfterClear); From 336e9d4daed5b429104b121898d8e6ce6d5a8cab Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Wed, 23 Nov 2016 12:37:47 -0500 Subject: [PATCH 175/218] :art: Removed code (moved at upper level) --- .../providers/CacheBindingManagementProvider.java | 14 -------------- .../statistics/StandardEhcacheStatistics.java | 2 +- 2 files changed, 1 insertion(+), 15 deletions(-) diff --git a/management/src/main/java/org/ehcache/management/providers/CacheBindingManagementProvider.java b/management/src/main/java/org/ehcache/management/providers/CacheBindingManagementProvider.java index 6b559d19d1..02d53495e5 100644 --- a/management/src/main/java/org/ehcache/management/providers/CacheBindingManagementProvider.java +++ b/management/src/main/java/org/ehcache/management/providers/CacheBindingManagementProvider.java @@ -16,12 +16,7 @@ package org.ehcache.management.providers; import org.ehcache.management.ManagementRegistryServiceConfiguration; -import org.terracotta.management.model.capabilities.descriptors.Descriptor; import org.terracotta.management.registry.AbstractManagementProvider; -import org.terracotta.management.registry.action.ExposedObject; - -import java.util.Collection; -import java.util.LinkedHashSet; public abstract class CacheBindingManagementProvider extends AbstractManagementProvider { @@ -35,13 +30,4 @@ public CacheBindingManagementProvider(ManagementRegistryServiceConfiguration reg @Override protected abstract ExposedCacheBinding wrap(CacheBinding managedObject); - @Override - public Collection getDescriptors() { - Collection capabilities = new LinkedHashSet(); - for (ExposedObject o : getExposedObjects()) { - capabilities.addAll(((ExposedCacheBinding) o).getDescriptors()); - } - return capabilities; - } - } diff --git a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java index 162ed2d8fe..c8e04fea29 100644 --- a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java +++ b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java @@ -76,7 +76,7 @@ class StandardEhcacheStatistics extends ExposedCacheBinding { statisticsRegistry.registerSize("OccupiedByteSize", descriptor("occupiedMemory", singleton("tier"))); } - public Statistic queryStatistic(String fullStatisticName, long since) { + Statistic queryStatistic(String fullStatisticName, long since) { return statisticsRegistryMetadata.queryStatistic(fullStatisticName, since); } From 5b9b9e10c8335b4d23ff3751543e6ca467623b3d Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Wed, 23 Nov 2016 15:03:08 -0500 Subject: [PATCH 176/218] :bug: bad capability names were reported --- .../server/management/PoolSettingsManagementProvider.java | 2 +- .../management/ServerStoreSettingsManagementProvider.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java index b10cbc1ed2..3fc2627a98 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java @@ -43,7 +43,7 @@ class PoolSettingsManagementProvider extends AliasBindingManagementProvider getDescriptors() { Collection descriptors = super.getDescriptors(); descriptors.add(new Settings() - .set("type", "PoolSettingsManagementProvider") + .set("type", getCapabilityName()) .set("defaultServerResource", ehcacheStateService.getDefaultServerResource())); return descriptors; } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java index 0b4c66be94..34c5fc1820 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java @@ -38,7 +38,7 @@ class ServerStoreSettingsManagementProvider extends AliasBindingManagementProvid public Collection getDescriptors() { Collection descriptors = super.getDescriptors(); descriptors.add(new Settings() - .set("type", "ServerStoreSettingsManagementProvider") + .set("type", getCapabilityName()) .set("time", System.currentTimeMillis())); return descriptors; } From 8e25aacd40c7dc09437e3af9c02548cfcc4a2630 Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Wed, 23 Nov 2016 15:03:41 -0500 Subject: [PATCH 177/218] :arrow_up: Upgrade to new tc-platform --- build.gradle | 2 +- .../AbstractClusteringManagementTest.java | 137 +++++------------- .../ClusteredStatisticsCountTest.java | 3 +- .../ClusteringManagementServiceTest.java | 38 ++--- .../clustered/server/EhcacheActiveEntity.java | 2 +- .../server/EhcachePassiveEntity.java | 2 +- .../server/management/Management.java | 45 ++++-- .../PoolSettingsManagementProvider.java | 5 +- .../PoolStatisticsManagementProvider.java | 9 +- ...ServerStoreSettingsManagementProvider.java | 3 +- ...rverStoreStatisticsManagementProvider.java | 5 +- .../server/EhcacheActiveEntityTest.java | 7 +- .../server/EhcachePassiveEntityTest.java | 13 +- .../management/SharedManagementService.java | 2 +- .../settings/EhcacheSettingsProvider.java | 5 +- .../statistics/StandardEhcacheStatistics.java | 2 +- .../DefaultManagementRegistryService.java | 5 +- .../DefaultSharedManagementService.java | 19 ++- .../java/org/ehcache/docs/ManagementTest.java | 4 +- .../actions/EhcacheActionProviderTest.java | 2 +- .../settings/EhcacheSettingsProviderTest.java | 2 +- .../EhcacheStatisticsProviderTest.java | 6 +- .../DefaultManagementRegistryServiceTest.java | 30 ++-- .../DefaultSharedManagementServiceTest.java | 16 +- 24 files changed, 169 insertions(+), 195 deletions(-) diff --git a/build.gradle b/build.gradle index fe34a46eb4..e94aafbe7c 100644 --- a/build.gradle +++ b/build.gradle @@ -28,7 +28,7 @@ ext { sizeofVersion = '0.3.0' // Clustered - terracottaPlatformVersion = '5.0.13.beta' + terracottaPlatformVersion = '5.0.13.beta2' managementVersion = terracottaPlatformVersion terracottaApisVersion = '1.0.13.beta' terracottaCoreVersion = '5.0.13-beta' diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java index 872a46e467..9a4a5b6a86 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/AbstractClusteringManagementTest.java @@ -30,14 +30,10 @@ import org.junit.Rule; import org.junit.rules.Timeout; import org.terracotta.connection.Connection; -import org.terracotta.management.entity.management.ManagementAgentConfig; -import org.terracotta.management.entity.management.client.ManagementAgentEntityFactory; -import org.terracotta.management.entity.management.client.ManagementAgentService; import org.terracotta.management.entity.tms.TmsAgentConfig; import org.terracotta.management.entity.tms.client.TmsAgentEntity; import org.terracotta.management.entity.tms.client.TmsAgentEntityFactory; -import org.terracotta.management.model.call.ContextualReturn; -import org.terracotta.management.model.call.Parameter; +import org.terracotta.management.entity.tms.client.TmsAgentService; import org.terracotta.management.model.cluster.Client; import org.terracotta.management.model.cluster.ClientIdentifier; import org.terracotta.management.model.cluster.ServerEntityIdentifier; @@ -45,23 +41,22 @@ import org.terracotta.management.model.message.Message; import org.terracotta.management.model.notification.ContextualNotification; import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.registry.collect.StatisticConfiguration; import org.terracotta.testing.rules.BasicExternalCluster; import org.terracotta.testing.rules.Cluster; import java.io.File; import java.io.FileNotFoundException; -import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Scanner; -import java.util.concurrent.Exchanger; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import java.util.stream.Stream; import static java.util.Arrays.asList; +import static java.util.concurrent.TimeUnit.SECONDS; import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredShared; import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; @@ -69,11 +64,8 @@ import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.collection.IsCollectionWithSize.hasSize; -import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; public abstract class AbstractClusteringManagementTest { @@ -90,7 +82,7 @@ public abstract class AbstractClusteringManagementTest { protected static ServerEntityIdentifier ehcacheServerEntityIdentifier; protected static ObjectMapper mapper = new ObjectMapper(); - protected static TmsAgentEntity tmsAgentEntity; + protected static TmsAgentService tmsAgentService; protected static ServerEntityIdentifier tmsServerEntityIdentifier; private static final List MANAGEMENT_PLUGINS = System.getProperty("managementPlugins") == null ? @@ -111,7 +103,15 @@ public static void beforeClass() throws Exception { // simulate a TMS client Connection managementConnection = CLUSTER.newConnection(); TmsAgentEntityFactory entityFactory = new TmsAgentEntityFactory(managementConnection, AbstractClusteringManagementTest.class.getName()); - tmsAgentEntity = entityFactory.retrieveOrCreate(new TmsAgentConfig()); + TmsAgentEntity tmsAgentEntity = entityFactory.retrieveOrCreate(new TmsAgentConfig() + .setStatisticConfiguration(new StatisticConfiguration( + 60, SECONDS, + 100, 1, SECONDS, + 10, SECONDS + ))); + tmsAgentService = new TmsAgentService(tmsAgentEntity); + tmsAgentService.setOperationTimeout(5, TimeUnit.SECONDS); + tmsServerEntityIdentifier = readTopology() .activeServerEntityStream() .filter(serverEntity -> serverEntity.getType().equals(TmsAgentConfig.ENTITY_TYPE)) @@ -210,53 +210,23 @@ public static void afterClass() throws Exception { @Before public void init() throws Exception { - if (tmsAgentEntity != null) { + if (tmsAgentService != null) { readMessages(); } } protected static org.terracotta.management.model.cluster.Cluster readTopology() throws Exception { - return tmsAgentEntity.readTopology().get(); + return tmsAgentService.readTopology(); } protected static List readMessages() throws Exception { - return tmsAgentEntity.readMessages().get(); + return tmsAgentService.readMessages(); } - protected static ContextualReturn sendManagementCallOnClientToCollectStats(String... statNames) throws Exception { - Connection managementConnection = CLUSTER.newConnection(); - try { - ManagementAgentService agent = new ManagementAgentService(new ManagementAgentEntityFactory(managementConnection).retrieveOrCreate(new ManagementAgentConfig())); - - final AtomicReference managementCallId = new AtomicReference<>(); - final Exchanger> exchanger = new Exchanger<>(); - - agent.setContextualReturnListener((from, id, aReturn) -> { - try { - assertEquals(ehcacheClientIdentifier, from); - assertEquals(managementCallId.get(), id); - exchanger.exchange(aReturn); - } catch (InterruptedException e) { - fail("interrupted"); - } - }); - - managementCallId.set(agent.call( - ehcacheClientIdentifier, - Context.create("cacheManagerName", "my-super-cache-manager"), - "StatisticCollectorCapability", - "updateCollectedStatistics", - Void.TYPE, - new Parameter("StatisticsCapability"), - new Parameter(asList(statNames), Collection.class.getName()))); - - ContextualReturn contextualReturn = exchanger.exchange(null); - assertThat(contextualReturn.hasExecuted(), is(true)); - - return contextualReturn; - } finally { - managementConnection.close(); - } + protected static void sendManagementCallOnClientToCollectStats(String... statNames) throws Exception { + Context ehcacheClient = readTopology().getClient(ehcacheClientIdentifier).get().getContext() + .with("cacheManagerName", "my-super-cache-manager"); + tmsAgentService.updateCollectedStatistics(ehcacheClient, "StatisticsCapability", asList(statNames)).waitForReturn(); } protected static List waitForNextStats() throws Exception { @@ -303,58 +273,23 @@ protected static String normalizeForLineEndings(String stringToNormalize) { } private static void sendManagementCallOnEntityToCollectStats() throws Exception { - Context context = readTopology().getSingleStripe().getActiveServerEntity(tmsServerEntityIdentifier).get().getContext(); - - ContextualReturn result = tmsAgentEntity.call( - context, - "StatisticCollectorCapability", - "updateCollectedStatistics", - Void.TYPE, - new Parameter("PoolStatistics"), - new Parameter(asList( - "Pool:AllocatedSize" - ), Collection.class.getName()) - ).get(); - - assertThat(result.hasExecuted(), is(true)); - - result = tmsAgentEntity.call( - context, - "StatisticCollectorCapability", - "updateCollectedStatistics", - Void.TYPE, - new Parameter("ServerStoreStatistics"), - new Parameter(asList( - "Store:AllocatedMemory", - "Store:DataAllocatedMemory", - "Store:OccupiedMemory", - "Store:DataOccupiedMemory", - "Store:Entries", - "Store:UsedSlotCount", - "Store:DataVitalMemory", - "Store:VitalMemory", - "Store:ReprobeLength", - "Store:RemovedSlotCount", - "Store:DataSize", - "Store:TableCapacity" - ), Collection.class.getName()) - ).get(); - - assertThat(result.hasExecuted(), is(true)); - - result = tmsAgentEntity.call( - context, - "StatisticCollectorCapability", - "updateCollectedStatistics", - Void.TYPE, - new Parameter("OffHeapResourceStatistics"), - new Parameter(asList( - "OffHeapResource:AllocatedMemory" - ), Collection.class.getName()) - ).get(); - - assertThat(result.hasExecuted(), is(true)); + tmsAgentService.updateCollectedStatistics(context, "PoolStatistics", asList("Pool:AllocatedSize")).waitForReturn(); + tmsAgentService.updateCollectedStatistics(context, "ServerStoreStatistics", asList( + "Store:AllocatedMemory", + "Store:DataAllocatedMemory", + "Store:OccupiedMemory", + "Store:DataOccupiedMemory", + "Store:Entries", + "Store:UsedSlotCount", + "Store:DataVitalMemory", + "Store:VitalMemory", + "Store:ReprobeLength", + "Store:RemovedSlotCount", + "Store:DataSize", + "Store:TableCapacity" + )).waitForReturn(); + tmsAgentService.updateCollectedStatistics(context, "OffHeapResourceStatistics", asList("OffHeapResource:AllocatedMemory")).waitForReturn(); } } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java index 8d617067b0..d495e429a1 100755 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsCountTest.java @@ -16,7 +16,6 @@ package org.ehcache.clustered.management; import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; import java.util.List; import org.ehcache.Cache; @@ -60,7 +59,7 @@ public void countTest() throws Exception { // get the stats (we are getting the primitive counter, not the sample history) List stats = waitForNextStats(); for (ContextualStatistics stat : stats) { - if (stat.getContext().get("cacheName").equals("dedicated-cache-1")) { + if (stat.getContext().contains("cacheName") && stat.getContext().get("cacheName").equals("dedicated-cache-1")) { Sample[] samplesCacheHitCount = stat.getStatistic(CounterHistory.class, "Cache:HitCount").getValue(); if(samplesCacheHitCount.length > 0) { diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java index e6361eeba6..6ef1f06b0d 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java @@ -66,7 +66,7 @@ public class ClusteringManagementServiceTest extends AbstractClusteringManagemen @Test @Ignore("This is not a test, but something useful to show a json print of a cluster topology with all management metadata inside") public void test_A_topology() throws Exception { - Cluster cluster = tmsAgentEntity.readTopology().get(); + Cluster cluster = tmsAgentService.readTopology(); String json = mapper.writeValueAsString(cluster.toMap()); System.out.println(json); } @@ -93,13 +93,14 @@ public void test_C_client_capabilities_exposed() throws Exception { Capability[] capabilities = readTopology().getClient(ehcacheClientIdentifier).get().getManagementRegistry().get().getCapabilities().toArray(new Capability[0]); assertThat(capabilities.length, equalTo(5)); assertThat(capabilities[0].getName(), equalTo("ActionsCapability")); - assertThat(capabilities[1].getName(), equalTo("StatisticsCapability")); - assertThat(capabilities[2].getName(), equalTo("StatisticCollectorCapability")); - assertThat(capabilities[3].getName(), equalTo("SettingsCapability")); - assertThat(capabilities[4].getName(), equalTo("ManagementAgentService")); + assertThat(capabilities[1].getName(), equalTo("ManagementAgentService")); + assertThat(capabilities[2].getName(), equalTo("SettingsCapability")); + assertThat(capabilities[3].getName(), equalTo("StatisticCollectorCapability")); + assertThat(capabilities[4].getName(), equalTo("StatisticsCapability")); + assertThat(capabilities[0].getDescriptors(), hasSize(4)); - Collection descriptors = capabilities[1].getDescriptors(); + Collection descriptors = capabilities[4].getDescriptors(); Collection allDescriptors = new ArrayList<>(); allDescriptors.addAll(CACHE_DESCRIPTORS); allDescriptors.addAll(ONHEAP_DESCRIPTORS); @@ -117,20 +118,19 @@ public void test_D_server_capabilities_exposed() throws Exception { assertThat(capabilities.length, equalTo(5)); assertThat(capabilities[0].getName(), equalTo("ClientStateSettings")); - assertThat(capabilities[1].getName(), equalTo("ServerStoreSettings")); - assertThat(capabilities[2].getName(), equalTo("PoolSettings")); - assertThat(capabilities[3].getName(), equalTo("ServerStoreStatistics")); - assertThat(capabilities[4].getName(), equalTo("PoolStatistics")); - + assertThat(capabilities[1].getName(), equalTo("PoolSettings")); + assertThat(capabilities[2].getName(), equalTo("PoolStatistics")); + assertThat(capabilities[3].getName(), equalTo("ServerStoreSettings")); + assertThat(capabilities[4].getName(), equalTo("ServerStoreStatistics")); - assertThat(capabilities[1].getDescriptors(), hasSize(4)); // time descriptor + 3 dedicated store + assertThat(capabilities[3].getDescriptors(), hasSize(4)); // time descriptor + 3 dedicated store // stats - assertThat(capabilities[3].getDescriptors(), containsInAnyOrder(SERVER_STORE_DESCRIPTORS.toArray())); - assertThat(capabilities[3].getDescriptors(), hasSize(SERVER_STORE_DESCRIPTORS.size())); - assertThat(capabilities[4].getDescriptors(), containsInAnyOrder(POOL_DESCRIPTORS.toArray())); - assertThat(capabilities[4].getDescriptors(), hasSize(POOL_DESCRIPTORS.size())); + assertThat(capabilities[4].getDescriptors(), containsInAnyOrder(SERVER_STORE_DESCRIPTORS.toArray())); + assertThat(capabilities[4].getDescriptors(), hasSize(SERVER_STORE_DESCRIPTORS.size())); + assertThat(capabilities[2].getDescriptors(), containsInAnyOrder(POOL_DESCRIPTORS.toArray())); + assertThat(capabilities[2].getDescriptors(), hasSize(POOL_DESCRIPTORS.size())); // ClientStateSettings @@ -138,9 +138,9 @@ public void test_D_server_capabilities_exposed() throws Exception { Settings settings = (Settings) capabilities[0].getDescriptors().iterator().next(); assertThat(settings.get("attachedStores"), equalTo(new String[]{"dedicated-cache-1", "shared-cache-2", "shared-cache-3"})); - // EhcacheStateServiceSettings + // ServerStoreSettings - List descriptors = new ArrayList<>(capabilities[2].getDescriptors()); + List descriptors = new ArrayList<>(capabilities[1].getDescriptors()); assertThat(descriptors, hasSize(4)); settings = (Settings) descriptors.get(0); @@ -165,7 +165,7 @@ public void test_D_server_capabilities_exposed() throws Exception { assertThat(settings.get("allocationType"), equalTo("dedicated")); settings = (Settings) descriptors.get(3); - assertThat(settings.get("type"), equalTo("PoolSettingsManagementProvider")); + assertThat(settings.get("type"), equalTo("PoolSettings")); assertThat(settings.get("defaultServerResource"), equalTo("primary-server-resource")); // tms entity diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index d5683faea9..c70237cabb 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -180,7 +180,7 @@ public Class getServiceType() { if (entityMessenger == null) { throw new AssertionError("Server failed to retrieve IEntityMessenger service."); } - this.management = new Management(services, ehcacheStateService); + this.management = new Management(services, ehcacheStateService, true); } /** diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index 68e7616db0..2d5bfbe0ea 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -105,7 +105,7 @@ public void invoke(EhcacheEntityMessage message) { if (ehcacheStateService == null) { throw new AssertionError("Server failed to retrieve EhcacheStateService."); } - management = new Management(services, ehcacheStateService); + management = new Management(services, ehcacheStateService, false); } private void invokeRetirementMessages(PassiveReplicationMessage message) throws ClusterException { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java index fd0d242621..d9c19dc0fd 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java @@ -21,13 +21,18 @@ import org.ehcache.clustered.server.state.EhcacheStateService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.terracotta.entity.BasicServiceConfiguration; import org.terracotta.entity.ClientDescriptor; import org.terracotta.entity.ServiceRegistry; import org.terracotta.management.model.context.Context; import org.terracotta.management.registry.collect.StatisticConfiguration; +import org.terracotta.management.service.monitoring.ActiveEntityMonitoringServiceConfiguration; import org.terracotta.management.service.monitoring.ConsumerManagementRegistry; import org.terracotta.management.service.monitoring.ConsumerManagementRegistryConfiguration; +import org.terracotta.management.service.monitoring.EntityMonitoringService; +import org.terracotta.management.service.monitoring.PassiveEntityMonitoringServiceConfiguration; import org.terracotta.management.service.monitoring.registry.provider.ClientBinding; +import org.terracotta.monitoring.IMonitoringProducer; import static java.util.concurrent.TimeUnit.SECONDS; @@ -35,22 +40,36 @@ public class Management { private static final Logger LOGGER = LoggerFactory.getLogger(Management.class); - // TODO: if a day we want to make that configurable, we can, and per provider, or globally as it is now - private final StatisticConfiguration statisticConfiguration = new StatisticConfiguration( - 60, SECONDS, - 100, 1, SECONDS, - 30, SECONDS - ); - private final ConsumerManagementRegistry managementRegistry; private final EhcacheStateService ehcacheStateService; - public Management(ServiceRegistry services, EhcacheStateService ehcacheStateService) { - managementRegistry = services.getService(new ConsumerManagementRegistryConfiguration(services)); + public Management(ServiceRegistry services, EhcacheStateService ehcacheStateService, boolean active) { this.ehcacheStateService = ehcacheStateService; + + // create an entity monitoring service that allows this entity to push some management information into voltron monitoring service + EntityMonitoringService entityMonitoringService; + if (active) { + entityMonitoringService = services.getService(new ActiveEntityMonitoringServiceConfiguration()); + } else { + IMonitoringProducer monitoringProducer = services.getService(new BasicServiceConfiguration<>(IMonitoringProducer.class)); + entityMonitoringService = monitoringProducer == null ? null : services.getService(new PassiveEntityMonitoringServiceConfiguration(monitoringProducer)); + } + + // create a management registry for this entity to handle exposed objects and stats + // if management-server distribution is on the classpath + managementRegistry = entityMonitoringService == null ? null : services.getService(new ConsumerManagementRegistryConfiguration(entityMonitoringService) + .setStatisticConfiguration(new StatisticConfiguration( + 60, SECONDS, + 100, 1, SECONDS, + 30, SECONDS + ))); + if (managementRegistry != null) { - // expose settings about attached stores - managementRegistry.addManagementProvider(new ClientStateSettingsManagementProvider()); + + if (active) { + // expose settings about attached stores + managementRegistry.addManagementProvider(new ClientStateSettingsManagementProvider()); + } // expose settings about server stores managementRegistry.addManagementProvider(new ServerStoreSettingsManagementProvider()); @@ -58,9 +77,9 @@ public Management(ServiceRegistry services, EhcacheStateService ehcacheStateServ managementRegistry.addManagementProvider(new PoolSettingsManagementProvider(ehcacheStateService)); // expose stats about server stores - managementRegistry.addManagementProvider(new ServerStoreStatisticsManagementProvider(statisticConfiguration)); + managementRegistry.addManagementProvider(new ServerStoreStatisticsManagementProvider()); // expose stats about pools - managementRegistry.addManagementProvider(new PoolStatisticsManagementProvider(ehcacheStateService, statisticConfiguration)); + managementRegistry.addManagementProvider(new PoolStatisticsManagementProvider(ehcacheStateService)); } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java index 3fc2627a98..77bc499d1f 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolSettingsManagementProvider.java @@ -24,6 +24,7 @@ import org.terracotta.management.registry.action.RequiredContext; import org.terracotta.management.service.monitoring.registry.provider.AliasBindingManagementProvider; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.stream.Collectors; @@ -40,8 +41,8 @@ class PoolSettingsManagementProvider extends AliasBindingManagementProvider getDescriptors() { - Collection descriptors = super.getDescriptors(); + public Collection getDescriptors() { + Collection descriptors = new ArrayList<>(super.getDescriptors()); descriptors.add(new Settings() .set("type", getCapabilityName()) .set("defaultServerResource", ehcacheStateService.getDefaultServerResource())); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java index 724ec913bf..cf57004aaa 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/PoolStatisticsManagementProvider.java @@ -22,7 +22,6 @@ import org.terracotta.management.registry.action.ExposedObject; import org.terracotta.management.registry.action.Named; import org.terracotta.management.registry.action.RequiredContext; -import org.terracotta.management.registry.collect.StatisticConfiguration; import org.terracotta.management.service.monitoring.registry.provider.AbstractExposedStatistics; import org.terracotta.management.service.monitoring.registry.provider.AbstractStatisticsManagementProvider; @@ -41,8 +40,8 @@ class PoolStatisticsManagementProvider extends AbstractStatisticsManagementProvi private final EhcacheStateService ehcacheStateService; - PoolStatisticsManagementProvider(EhcacheStateService ehcacheStateService, StatisticConfiguration statisticConfiguration) { - super(PoolBinding.class, statisticConfiguration); + PoolStatisticsManagementProvider(EhcacheStateService ehcacheStateService) { + super(PoolBinding.class); this.ehcacheStateService = ehcacheStateService; } @@ -62,11 +61,11 @@ protected StatisticsRegistry createStatisticsRegistry(PoolBinding managedObject) if (allocationType == PoolBinding.AllocationType.DEDICATED) { ResourcePageSource resourcePageSource = Objects.requireNonNull(ehcacheStateService.getDedicatedResourcePageSource(poolName)); - return getStatisticsService().createStatisticsRegistry(getStatisticConfiguration(), resourcePageSource); + return getStatisticsService().createStatisticsRegistry(resourcePageSource); } else { ResourcePageSource resourcePageSource = Objects.requireNonNull(ehcacheStateService.getSharedResourcePageSource(poolName)); - return getStatisticsService().createStatisticsRegistry(getStatisticConfiguration(), resourcePageSource); + return getStatisticsService().createStatisticsRegistry(resourcePageSource); } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java index 34c5fc1820..e77d2543dd 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreSettingsManagementProvider.java @@ -23,6 +23,7 @@ import org.terracotta.management.registry.action.RequiredContext; import org.terracotta.management.service.monitoring.registry.provider.AliasBindingManagementProvider; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -36,7 +37,7 @@ class ServerStoreSettingsManagementProvider extends AliasBindingManagementProvid @Override public Collection getDescriptors() { - Collection descriptors = super.getDescriptors(); + Collection descriptors = new ArrayList<>(super.getDescriptors()); descriptors.add(new Settings() .set("type", getCapabilityName()) .set("time", System.currentTimeMillis())); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java index ec5faeac57..3d45382b95 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreStatisticsManagementProvider.java @@ -19,7 +19,6 @@ import org.terracotta.management.model.context.Context; import org.terracotta.management.registry.action.Named; import org.terracotta.management.registry.action.RequiredContext; -import org.terracotta.management.registry.collect.StatisticConfiguration; import org.terracotta.management.service.monitoring.registry.provider.AbstractExposedStatistics; import org.terracotta.management.service.monitoring.registry.provider.AbstractStatisticsManagementProvider; @@ -33,8 +32,8 @@ @RequiredContext({@Named("consumerId"), @Named("type"), @Named("alias")}) class ServerStoreStatisticsManagementProvider extends AbstractStatisticsManagementProvider { - ServerStoreStatisticsManagementProvider(StatisticConfiguration statisticConfiguration) { - super(ServerStoreBinding.class, statisticConfiguration); + ServerStoreStatisticsManagementProvider() { + super(ServerStoreBinding.class); } @Override diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index 372d88b958..7448fae1ee 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -29,7 +29,6 @@ import org.ehcache.clustered.common.internal.exceptions.LifecycleException; import org.ehcache.clustered.common.internal.exceptions.ResourceBusyException; import org.ehcache.clustered.common.internal.exceptions.ResourceConfigurationException; -import org.ehcache.clustered.common.internal.exceptions.ServerMisconfigurationException; import org.ehcache.clustered.common.internal.messages.ConcurrentEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; @@ -57,7 +56,7 @@ import org.terracotta.entity.PassiveSynchronizationChannel; import org.terracotta.entity.ServiceConfiguration; import org.terracotta.entity.ServiceRegistry; -import org.terracotta.management.service.monitoring.ConsumerManagementRegistry; +import org.terracotta.management.service.monitoring.ActiveEntityMonitoringServiceConfiguration; import org.terracotta.management.service.monitoring.ConsumerManagementRegistryConfiguration; import org.terracotta.offheapresource.OffHeapResource; import org.terracotta.offheapresource.OffHeapResourceIdentifier; @@ -3165,7 +3164,9 @@ public OffHeapResource getOffHeapResource(OffHeapResourceIdentifier identifier) } return (T) this.entityMessenger; } else if(serviceConfiguration instanceof ConsumerManagementRegistryConfiguration) { - return (T) mock(ConsumerManagementRegistry.class); + return null; + } else if(serviceConfiguration instanceof ActiveEntityMonitoringServiceConfiguration) { + return null; } throw new UnsupportedOperationException("Registry.getService does not support " + serviceConfiguration.getClass().getName()); diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java index b2319ae035..ff902db19e 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java @@ -29,16 +29,16 @@ import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Test; +import org.terracotta.entity.BasicServiceConfiguration; import org.terracotta.entity.IEntityMessenger; import org.terracotta.entity.ServiceConfiguration; import org.terracotta.entity.ServiceRegistry; -import org.terracotta.management.service.monitoring.ConsumerManagementRegistry; import org.terracotta.management.service.monitoring.ConsumerManagementRegistryConfiguration; +import org.terracotta.management.service.monitoring.PassiveEntityMonitoringServiceConfiguration; +import org.terracotta.monitoring.IMonitoringProducer; import org.terracotta.offheapresource.OffHeapResource; import org.terracotta.offheapresource.OffHeapResourceIdentifier; import org.terracotta.offheapresource.OffHeapResources; -import org.terracotta.offheapresource.OffHeapResourcesProvider; -import org.terracotta.offheapresource.config.OffheapResourcesType; import org.terracotta.offheapstore.util.MemoryUnit; import java.util.Collections; @@ -57,7 +57,6 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class EhcachePassiveEntityTest { @@ -691,7 +690,11 @@ public OffHeapResource getOffHeapResource(OffHeapResourceIdentifier identifier) } else if (serviceConfiguration.getServiceType().equals(IEntityMessenger.class)) { return (T) mock(IEntityMessenger.class); } else if(serviceConfiguration instanceof ConsumerManagementRegistryConfiguration) { - return (T) mock(ConsumerManagementRegistry.class); + return null; + } else if(serviceConfiguration instanceof PassiveEntityMonitoringServiceConfiguration) { + return null; + } else if(serviceConfiguration instanceof BasicServiceConfiguration && serviceConfiguration.getServiceType() == IMonitoringProducer.class) { + return null; } throw new UnsupportedOperationException("Registry.getService does not support " + serviceConfiguration.getClass().getName()); diff --git a/management/src/main/java/org/ehcache/management/SharedManagementService.java b/management/src/main/java/org/ehcache/management/SharedManagementService.java index f2a76eb007..aad2eef48d 100644 --- a/management/src/main/java/org/ehcache/management/SharedManagementService.java +++ b/management/src/main/java/org/ehcache/management/SharedManagementService.java @@ -44,6 +44,6 @@ public interface SharedManagementService extends CapabilityManagementSupport, Se * * @return a map of capabilities, where the key is the alias of the cache manager */ - Map> getCapabilities(); + Map> getCapabilitiesByContext(); } diff --git a/management/src/main/java/org/ehcache/management/providers/settings/EhcacheSettingsProvider.java b/management/src/main/java/org/ehcache/management/providers/settings/EhcacheSettingsProvider.java index 569db9f222..2be41b1c37 100644 --- a/management/src/main/java/org/ehcache/management/providers/settings/EhcacheSettingsProvider.java +++ b/management/src/main/java/org/ehcache/management/providers/settings/EhcacheSettingsProvider.java @@ -25,6 +25,7 @@ import org.terracotta.management.registry.action.Named; import org.terracotta.management.registry.action.RequiredContext; +import java.util.ArrayList; import java.util.Collection; @Named("SettingsCapability") @@ -44,8 +45,8 @@ protected ExposedCacheSettings wrap(CacheBinding cacheBinding) { } @Override - public Collection getDescriptors() { - Collection descriptors = super.getDescriptors(); + public Collection getDescriptors() { + Collection descriptors = new ArrayList(super.getDescriptors()); descriptors.add(cacheManagerSettings()); return descriptors; } diff --git a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java index c8e04fea29..babc41e9b5 100644 --- a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java +++ b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java @@ -81,7 +81,7 @@ class StandardEhcacheStatistics extends ExposedCacheBinding { } @Override - public Collection getDescriptors() { + public Collection getDescriptors() { return statisticsRegistryMetadata.getDescriptors(); } diff --git a/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryService.java b/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryService.java index 927640ca46..c61d52ef85 100644 --- a/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryService.java +++ b/management/src/main/java/org/ehcache/management/registry/DefaultManagementRegistryService.java @@ -36,7 +36,7 @@ import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceDependencies; import org.terracotta.management.model.context.ContextContainer; -import org.terracotta.management.registry.AbstractManagementRegistry; +import org.terracotta.management.registry.DefaultManagementRegistry; import org.terracotta.management.registry.ManagementProvider; import org.terracotta.statistics.StatisticsManager; @@ -48,7 +48,7 @@ import static org.ehcache.impl.internal.executor.ExecutorUtil.shutdownNow; @ServiceDependencies({CacheManagerProviderService.class, ExecutionService.class}) -public class DefaultManagementRegistryService extends AbstractManagementRegistry implements ManagementRegistryService, CacheManagerListener { +public class DefaultManagementRegistryService extends DefaultManagementRegistry implements ManagementRegistryService, CacheManagerListener { private final ManagementRegistryServiceConfiguration configuration; private volatile ScheduledExecutorService statisticsExecutor; @@ -60,6 +60,7 @@ public DefaultManagementRegistryService() { } public DefaultManagementRegistryService(ManagementRegistryServiceConfiguration configuration) { + super(null); // context container creation is overriden here this.configuration = configuration == null ? new DefaultManagementRegistryConfiguration() : configuration; } diff --git a/management/src/main/java/org/ehcache/management/registry/DefaultSharedManagementService.java b/management/src/main/java/org/ehcache/management/registry/DefaultSharedManagementService.java index bc2abc8a64..168de755e1 100644 --- a/management/src/main/java/org/ehcache/management/registry/DefaultSharedManagementService.java +++ b/management/src/main/java/org/ehcache/management/registry/DefaultSharedManagementService.java @@ -18,14 +18,14 @@ import org.ehcache.Cache; import org.ehcache.Status; import org.ehcache.core.events.CacheManagerListener; +import org.ehcache.core.spi.service.CacheManagerProviderService; import org.ehcache.core.spi.store.InternalCacheManager; import org.ehcache.impl.internal.concurrent.ConcurrentHashMap; import org.ehcache.management.ManagementRegistryService; import org.ehcache.management.SharedManagementService; -import org.ehcache.spi.service.ServiceProvider; -import org.ehcache.core.spi.service.CacheManagerProviderService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceDependencies; +import org.ehcache.spi.service.ServiceProvider; import org.terracotta.management.model.capabilities.Capability; import org.terracotta.management.model.context.Context; import org.terracotta.management.model.context.ContextContainer; @@ -53,7 +53,7 @@ public void start(final ServiceProvider serviceProvider) { final ManagementRegistryService managementRegistry = serviceProvider.getService(ManagementRegistryService.class); final Context cmContext = managementRegistry.getConfiguration().getContext(); final InternalCacheManager cacheManager = - serviceProvider.getService(CacheManagerProviderService.class).getCacheManager(); + serviceProvider.getService(CacheManagerProviderService.class).getCacheManager(); cacheManager.registerListener(new CacheManagerListener() { @Override @@ -102,8 +102,17 @@ public Map getContextContainers() { } @Override - public Map> getCapabilities() { - Map> capabilities = new HashMap>(); + public Collection getCapabilities() { + Collection capabilities = new ArrayList(); + for (ManagementRegistryService registryService : delegates.values()) { + capabilities.addAll(registryService.getCapabilities()); + } + return capabilities; + } + + @Override + public Map> getCapabilitiesByContext() { + Map> capabilities = new HashMap>(); for (Map.Entry entry : delegates.entrySet()) { capabilities.put(entry.getKey(), entry.getValue().getCapabilities()); } diff --git a/management/src/test/java/org/ehcache/docs/ManagementTest.java b/management/src/test/java/org/ehcache/docs/ManagementTest.java index 8b6a8a0b11..6a92f52ace 100644 --- a/management/src/test/java/org/ehcache/docs/ManagementTest.java +++ b/management/src/test/java/org/ehcache/docs/ManagementTest.java @@ -132,11 +132,11 @@ public void capabilitiesAndContexts() throws Exception { .build(true); - Collection capabilities = managementRegistry.getCapabilities(); // <1> + Collection capabilities = managementRegistry.getCapabilities(); // <1> Assert.assertThat(capabilities.isEmpty(), Matchers.is(false)); Capability capability = capabilities.iterator().next(); String capabilityName = capability.getName(); // <2> - Collection capabilityDescriptions = capability.getDescriptors(); // <3> + Collection capabilityDescriptions = capability.getDescriptors(); // <3> Assert.assertThat(capabilityDescriptions.isEmpty(), Matchers.is(false)); CapabilityContext capabilityContext = capability.getCapabilityContext(); Collection attributes = capabilityContext.getAttributes(); // <4> diff --git a/management/src/test/java/org/ehcache/management/providers/actions/EhcacheActionProviderTest.java b/management/src/test/java/org/ehcache/management/providers/actions/EhcacheActionProviderTest.java index e00af73e16..e7d311c08e 100644 --- a/management/src/test/java/org/ehcache/management/providers/actions/EhcacheActionProviderTest.java +++ b/management/src/test/java/org/ehcache/management/providers/actions/EhcacheActionProviderTest.java @@ -58,7 +58,7 @@ public void testDescriptions() throws Exception { ehcacheActionProvider.register(new CacheBinding("myCacheName1", mock(EhcacheWithLoaderWriter.class))); ehcacheActionProvider.register(new CacheBinding("myCacheName2", mock(EhcacheWithLoaderWriter.class))); - Collection descriptions = ehcacheActionProvider.getDescriptors(); + Collection descriptions = ehcacheActionProvider.getDescriptors(); assertThat(descriptions.size(), is(4)); assertThat(descriptions, (Matcher) containsInAnyOrder( new CallDescriptor("remove", "void", Collections.singletonList(new CallDescriptor.Parameter("key", "java.lang.Object"))), diff --git a/management/src/test/java/org/ehcache/management/providers/settings/EhcacheSettingsProviderTest.java b/management/src/test/java/org/ehcache/management/providers/settings/EhcacheSettingsProviderTest.java index fc628dbdaa..a6cb1630a8 100644 --- a/management/src/test/java/org/ehcache/management/providers/settings/EhcacheSettingsProviderTest.java +++ b/management/src/test/java/org/ehcache/management/providers/settings/EhcacheSettingsProviderTest.java @@ -118,7 +118,7 @@ public void test_standalone_ehcache() throws IOException { } private Capability getSettingsCapability() { - for (Capability capability : sharedManagementService.getCapabilities().values().iterator().next()) { + for (Capability capability : sharedManagementService.getCapabilitiesByContext().values().iterator().next()) { if (capability.getName().equals("SettingsCapability")) { return capability; } diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProviderTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProviderTest.java index 9873687473..05731ecb9d 100644 --- a/management/src/test/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProviderTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProviderTest.java @@ -65,18 +65,18 @@ public void testDescriptions() throws Exception { @Override protected ExposedCacheBinding wrap(CacheBinding cacheBinding) { StandardEhcacheStatistics mock = mock(StandardEhcacheStatistics.class); - Set descriptors = new HashSet(); + Collection descriptors = new HashSet(); descriptors.add(new StatisticDescriptor("aCounter", StatisticType.COUNTER)); descriptors.add(new StatisticDescriptor("aDuration", StatisticType.DURATION)); descriptors.add(new StatisticDescriptor("aSampledRate", StatisticType.RATE_HISTORY)); - when(mock.getDescriptors()).thenReturn(descriptors); + when(mock.getDescriptors()).thenReturn((Collection) descriptors); return mock; } }; ehcacheStatisticsProvider.register(new CacheBinding("cache-0", mock(EhcacheWithLoaderWriter.class))); - Collection descriptions = ehcacheStatisticsProvider.getDescriptors(); + Collection descriptions = ehcacheStatisticsProvider.getDescriptors(); assertThat(descriptions.size(), is(3)); assertThat(descriptions, (Matcher) containsInAnyOrder( new StatisticDescriptor("aCounter", StatisticType.COUNTER), diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java index a2d6c75e63..f1edc56457 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java @@ -122,12 +122,13 @@ public void descriptorOnHeapTest() { assertThat(managementRegistry.getCapabilities(), hasSize(4)); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getName(), equalTo("ActionsCapability")); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName(), equalTo("StatisticsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName(), equalTo("SettingsCapability")); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(2).getName(), equalTo("StatisticCollectorCapability")); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName(), equalTo("SettingsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName(), equalTo("StatisticsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getDescriptors(), hasSize(4)); - Collection descriptors = new ArrayList(managementRegistry.getCapabilities()).get(1).getDescriptors(); + Collection descriptors = new ArrayList(managementRegistry.getCapabilities()).get(3).getDescriptors(); Collection allDescriptors = new ArrayList(); allDescriptors.addAll(ONHEAP_DESCRIPTORS); allDescriptors.addAll(CACHE_DESCRIPTORS); @@ -157,12 +158,13 @@ public void descriptorOffHeapTest() { assertThat(managementRegistry.getCapabilities(), hasSize(4)); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getName(), equalTo("ActionsCapability")); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName(), equalTo("StatisticsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName(), equalTo("SettingsCapability")); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(2).getName(), equalTo("StatisticCollectorCapability")); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName(), equalTo("SettingsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName(), equalTo("StatisticsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getDescriptors(), hasSize(4)); - Collection descriptors = new ArrayList(managementRegistry.getCapabilities()).get(1).getDescriptors(); + Collection descriptors = new ArrayList(managementRegistry.getCapabilities()).get(3).getDescriptors(); Collection allDescriptors = new ArrayList(); allDescriptors.addAll(ONHEAP_DESCRIPTORS); allDescriptors.addAll(OFFHEAP_DESCRIPTORS); @@ -195,12 +197,14 @@ public void descriptorDiskStoreTest() throws URISyntaxException { assertThat(managementRegistry.getCapabilities(), hasSize(4)); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getName(), equalTo("ActionsCapability")); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName(), equalTo("StatisticsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName(), equalTo("SettingsCapability")); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(2).getName(), equalTo("StatisticCollectorCapability")); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName(), equalTo("SettingsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName(), equalTo("StatisticsCapability")); + + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getDescriptors(), hasSize(4)); - Collection descriptors = new ArrayList(managementRegistry.getCapabilities()).get(1).getDescriptors(); + Collection descriptors = new ArrayList(managementRegistry.getCapabilities()).get(3).getDescriptors(); Collection allDescriptors = new ArrayList(); allDescriptors.addAll(ONHEAP_DESCRIPTORS); allDescriptors.addAll(DISK_DESCRIPTORS); @@ -233,15 +237,15 @@ public void testCanGetCapabilities() { assertThat(managementRegistry.getCapabilities(), hasSize(4)); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getName(), equalTo("ActionsCapability")); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName(), equalTo("StatisticsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getName(), equalTo("SettingsCapability")); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(2).getName(), equalTo("StatisticCollectorCapability")); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName(), equalTo("SettingsCapability")); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getName(), equalTo("StatisticsCapability")); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getDescriptors(), hasSize(4)); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getDescriptors(), hasSize(ONHEAP_DESCRIPTORS.size() + CACHE_DESCRIPTORS.size())); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getDescriptors(), hasSize(ONHEAP_DESCRIPTORS.size() + CACHE_DESCRIPTORS.size())); assertThat(new ArrayList(managementRegistry.getCapabilities()).get(0).getCapabilityContext().getAttributes(), hasSize(2)); - assertThat(new ArrayList(managementRegistry.getCapabilities()).get(1).getCapabilityContext().getAttributes(), hasSize(2)); + assertThat(new ArrayList(managementRegistry.getCapabilities()).get(3).getCapabilityContext().getAttributes(), hasSize(2)); cacheManager1.close(); } diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java index d32fc46720..3d2bed7c98 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultSharedManagementServiceTest.java @@ -138,22 +138,24 @@ public void testSharedContexts() { @Test public void testSharedCapabilities() { - assertEquals(2, service.getCapabilities().size()); + assertEquals(2, service.getCapabilitiesByContext().size()); - Collection capabilities1 = service.getCapabilities().get(config1.getContext()); - Collection capabilities2 = service.getCapabilities().get(config2.getContext()); + Collection capabilities1 = service.getCapabilitiesByContext().get(config1.getContext()); + Collection capabilities2 = service.getCapabilitiesByContext().get(config2.getContext()); assertThat(capabilities1, hasSize(4)); assertThat(new ArrayList(capabilities1).get(0).getName(), equalTo("ActionsCapability")); - assertThat(new ArrayList(capabilities1).get(1).getName(), equalTo("StatisticsCapability")); + assertThat(new ArrayList(capabilities1).get(1).getName(), equalTo("SettingsCapability")); assertThat(new ArrayList(capabilities1).get(2).getName(), equalTo("StatisticCollectorCapability")); - assertThat(new ArrayList(capabilities1).get(3).getName(), equalTo("SettingsCapability")); + assertThat(new ArrayList(capabilities1).get(3).getName(), equalTo("StatisticsCapability")); + + assertThat(capabilities2, hasSize(4)); assertThat(new ArrayList(capabilities2).get(0).getName(), equalTo("ActionsCapability")); - assertThat(new ArrayList(capabilities2).get(1).getName(), equalTo("StatisticsCapability")); + assertThat(new ArrayList(capabilities2).get(1).getName(), equalTo("SettingsCapability")); assertThat(new ArrayList(capabilities2).get(2).getName(), equalTo("StatisticCollectorCapability")); - assertThat(new ArrayList(capabilities2).get(3).getName(), equalTo("SettingsCapability")); + assertThat(new ArrayList(capabilities2).get(3).getName(), equalTo("StatisticsCapability")); } @Test From 55b24411e8dedcf575d9e437e3486f416ab33739 Mon Sep 17 00:00:00 2001 From: geoff gibson Date: Wed, 23 Nov 2016 11:09:32 -0800 Subject: [PATCH 178/218] Close #1634 adds Hit and Miss tests for standalone and clustered moves all static imports to top adds checks for NaN fixes typo in comment --- .../ClusteredStatisticsLatencyTest.java | 139 +++++++++++++++ .../providers/statistics/HitLatencyTest.java | 163 ++++++++++++++++++ .../providers/statistics/MissLatencyTest.java | 156 +++++++++++++++++ .../providers/statistics/StatsUtil.java | 73 +++++++- 4 files changed, 529 insertions(+), 2 deletions(-) create mode 100755 clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsLatencyTest.java create mode 100755 management/src/test/java/org/ehcache/management/providers/statistics/HitLatencyTest.java create mode 100755 management/src/test/java/org/ehcache/management/providers/statistics/MissLatencyTest.java diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsLatencyTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsLatencyTest.java new file mode 100755 index 0000000000..6bb95ed9ea --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsLatencyTest.java @@ -0,0 +1,139 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.management; + + +import java.util.List; +import java.util.stream.Collectors; +import org.ehcache.Cache; +import org.hamcrest.Matchers; +import org.junit.Test; +import org.junit.Assert; +import org.terracotta.management.model.stats.ContextualStatistics; +import org.terracotta.management.model.stats.Sample; +import org.terracotta.management.model.stats.history.AverageHistory; +import org.terracotta.management.model.stats.history.DurationHistory; + +public class ClusteredStatisticsLatencyTest extends AbstractClusteringManagementTest { + + @Test + public void test() throws Exception { + + long cacheHitLatencyMin=0L; + long cacheHitLatencyMax=0L; + double cacheHitLatencyAvg=Double.NaN; + + long clusteredHitLatencyMin=0L; + long clusteredHitLatencyMax=0L; + double clusteredHitLatencyAvg=Double.NaN; + + long cacheMissLatencyMin=0L; + long cacheMissLatencyMax=0L; + double cacheMissLatencyAvg=Double.NaN; + + long clusteredMissLatencyMin=0L; + long clusteredMissLatencyMax=0L; + double clusteredMissLatencyAvg=Double.NaN; + + sendManagementCallOnClientToCollectStats("Cache:HitLatencyMinimum","Cache:HitLatencyMaximum","Cache:HitLatencyAverage", + "Clustered:HitLatencyMinimum","Clustered:HitLatencyMaximum","Clustered:HitLatencyAverage", + "Cache:MissLatencyMinimum","Cache:MissLatencyMaximum","Cache:MissLatencyAverage", + "Clustered:MissLatencyMinimum","Clustered:MissLatencyMaximum","Clustered:MissLatencyAverage"); + Thread.sleep(25000); + + Cache cache = cacheManager.getCache("dedicated-cache-1", String.class, String.class); + cache.put("one", "val1"); + cache.put("two", "val2"); + + cache.get("one"); //hit + cache.get("two"); //hit + + cache.get("three"); //miss + cache.get("four"); //miss + + + //It may take several seconds for the sampled stat values to be available and correct. + //In the meantime the default values will be available. + //Thus let's loop until the correct value we are expecting is available. + do { + + // get the stats (we are getting the primitive counter, not the sample history) + List stats = waitForNextStats() + .stream() + .filter(statistics -> "dedicated-cache-1".equals(statistics.getContext().get("cacheName"))) + .collect(Collectors.toList());; + + for (ContextualStatistics stat : stats) { + if (stat.getContext().get("cacheName") != null && stat.getContext().get("cacheName").equals("dedicated-cache-1")) { + + //Cache HIT stats + cacheHitLatencyMin = getDurationHistorySampleValue(stat, "Cache:HitLatencyMinimum"); + cacheHitLatencyMax = getDurationHistorySampleValue(stat, "Cache:HitLatencyMaximum"); + cacheHitLatencyAvg = getAverageHistorySampleValue(stat, "Cache:HitLatencyAverage"); + + //Clustered HIT stats + clusteredHitLatencyMin = getDurationHistorySampleValue(stat, "Clustered:HitLatencyMinimum"); + clusteredHitLatencyMax = getDurationHistorySampleValue(stat, "Clustered:HitLatencyMaximum"); + clusteredHitLatencyAvg = getAverageHistorySampleValue(stat, "Clustered:HitLatencyAverage"); + + //Cache MISS stats + cacheMissLatencyMin = getDurationHistorySampleValue(stat, "Cache:MissLatencyMinimum"); + cacheMissLatencyMax = getDurationHistorySampleValue(stat, "Cache:MissLatencyMaximum"); + cacheMissLatencyAvg = getAverageHistorySampleValue(stat, "Cache:MissLatencyAverage"); + + //Clustered MISS stats + clusteredMissLatencyMin = getDurationHistorySampleValue(stat, "Clustered:MissLatencyMinimum"); + clusteredMissLatencyMax = getDurationHistorySampleValue(stat, "Clustered:MissLatencyMaximum"); + clusteredMissLatencyAvg = getAverageHistorySampleValue(stat, "Clustered:MissLatencyAverage"); + + } + } + } while(!Thread.currentThread().isInterrupted() && + ((cacheHitLatencyMin == 0L) || (cacheHitLatencyMax == 0L) || Double.isNaN(cacheHitLatencyAvg) || + (clusteredHitLatencyMin == 0L) || (clusteredHitLatencyMax == 0L) || Double.isNaN(clusteredHitLatencyAvg) || + (cacheMissLatencyMin == 0L) || (cacheMissLatencyMax == 0L) || Double.isNaN(cacheMissLatencyAvg) || + (clusteredMissLatencyMin == 0L) || (clusteredMissLatencyMax == 0L) || Double.isNaN(clusteredMissLatencyAvg))); + + + Assert.assertThat((double)cacheHitLatencyMin,Matchers.lessThanOrEqualTo(cacheHitLatencyAvg)); + Assert.assertThat((double)cacheHitLatencyMax,Matchers.greaterThanOrEqualTo(cacheHitLatencyAvg)); + Assert.assertThat((double)clusteredHitLatencyMin,Matchers.lessThanOrEqualTo(clusteredHitLatencyAvg)); + Assert.assertThat((double)clusteredHitLatencyMax,Matchers.greaterThanOrEqualTo(clusteredHitLatencyAvg)); + Assert.assertThat((double)cacheMissLatencyMin,Matchers.lessThanOrEqualTo(cacheMissLatencyAvg)); + Assert.assertThat((double)cacheMissLatencyMax,Matchers.greaterThanOrEqualTo(cacheMissLatencyAvg)); + Assert.assertThat((double)clusteredMissLatencyMin,Matchers.lessThanOrEqualTo(clusteredMissLatencyAvg)); + Assert.assertThat((double)clusteredMissLatencyMax,Matchers.greaterThanOrEqualTo(clusteredMissLatencyAvg)); + } + + private static long getDurationHistorySampleValue(ContextualStatistics stat, String statName) { + Sample[] samplesCacheHitLatencyMin = stat.getStatistic(DurationHistory.class, statName).getValue(); + if(samplesCacheHitLatencyMin.length > 0 && samplesCacheHitLatencyMin[samplesCacheHitLatencyMin.length - 1].getValue() != null) { + return samplesCacheHitLatencyMin[samplesCacheHitLatencyMin.length - 1].getValue(); + } + + return 0L; + } + + private static double getAverageHistorySampleValue(ContextualStatistics stat, String statName) { + Sample[] samplesCacheHitLatencyAvg = stat.getStatistic(AverageHistory.class, statName).getValue(); + if(samplesCacheHitLatencyAvg.length > 0 && !Double.isNaN(samplesCacheHitLatencyAvg[samplesCacheHitLatencyAvg.length - 1].getValue())) { + return samplesCacheHitLatencyAvg[samplesCacheHitLatencyAvg.length - 1].getValue(); + } + + return Double.NaN; + } + +} \ No newline at end of file diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/HitLatencyTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/HitLatencyTest.java new file mode 100755 index 0000000000..db53482308 --- /dev/null +++ b/management/src/test/java/org/ehcache/management/providers/statistics/HitLatencyTest.java @@ -0,0 +1,163 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import static java.util.Arrays.asList; + +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.config.units.EntryUnit.ENTRIES; +import static org.ehcache.config.units.MemoryUnit.MB; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.Builder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryService; +import org.hamcrest.Matchers; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.Timeout; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.terracotta.management.model.context.Context; + +@RunWith(Parameterized.class) +public class HitLatencyTest { + + @Rule + public final Timeout globalTimeout = Timeout.seconds(10); + + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + + private static final EhcacheStatisticsProviderConfiguration EHCACHE_STATISTICS_PROVIDER_CONFIG = new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.SECONDS,10,TimeUnit.MINUTES); + + private static final Long ITERATIONS = 10L; + private static final List HIT_LATENCY_MIN_STATS = Arrays.asList("OnHeap:HitLatencyMinimum","OffHeap:HitLatencyMinimum","Disk:HitLatencyMinimum"); + private static final List HIT_LATENCY_MAX_STATS = Arrays.asList("OnHeap:HitLatencyMaximum","OffHeap:HitLatencyMaximum","Disk:HitLatencyMaximum"); + private static final List HIT_LATENCY_AVG_STATS = Arrays.asList("OnHeap:HitLatencyAverage","OffHeap:HitLatencyAverage","Disk:HitLatencyAverage"); + + private final ResourcePools resources; + private final List hitLatencyMinStatNames; + private final List hitLatencyMaxStatNames; + private final List hitLatencyAvgStatNames; + + @Parameterized.Parameters + public static Collection data() { + + return asList(new Object[][] { + //1 tier + { newResourcePoolsBuilder().heap(1, MB), HIT_LATENCY_MIN_STATS.subList(0,1), HIT_LATENCY_MAX_STATS.subList(0,1), HIT_LATENCY_AVG_STATS.subList(0,1)}, + { newResourcePoolsBuilder().offheap(1, MB), HIT_LATENCY_MIN_STATS.subList(1,2), HIT_LATENCY_MAX_STATS.subList(1,2), HIT_LATENCY_AVG_STATS.subList(1,2)}, + { newResourcePoolsBuilder().disk(1, MB), HIT_LATENCY_MIN_STATS.subList(2,3), HIT_LATENCY_MAX_STATS.subList(2,3), HIT_LATENCY_AVG_STATS.subList(2,3)}, + + //2 tier + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), HIT_LATENCY_MIN_STATS.subList(0,2), HIT_LATENCY_MAX_STATS.subList(0,2), HIT_LATENCY_AVG_STATS.subList(0,2)}, + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), Arrays.asList(HIT_LATENCY_MIN_STATS.get(0),HIT_LATENCY_MIN_STATS.get(2)), Arrays.asList(HIT_LATENCY_MAX_STATS.get(0),HIT_LATENCY_MAX_STATS.get(2)), Arrays.asList(HIT_LATENCY_AVG_STATS.get(0),HIT_LATENCY_AVG_STATS.get(2))}, + //offheap and disk configuration is not valid. Throws IllegalStateException no Store.Provider found to handle configured resource types [offheap,disk] + + //3 tier + { newResourcePoolsBuilder().heap(1, ENTRIES).offheap(2, MB).disk(3, MB), HIT_LATENCY_MIN_STATS, HIT_LATENCY_MAX_STATS, HIT_LATENCY_AVG_STATS} + }); + } + + public HitLatencyTest(Builder resources, List hitLatencyMinStatNames, List hitLatencyMaxStatNames, List hitLatencyAvgStatNames) { + this.resources = resources.build(); + this.hitLatencyMinStatNames = hitLatencyMinStatNames; + this.hitLatencyMaxStatNames = hitLatencyMaxStatNames; + this.hitLatencyAvgStatNames = hitLatencyAvgStatNames; + } + + + @Test + public void test() throws IOException, InterruptedException { + CacheManager cacheManager = null; + + try { + + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); + registryConfiguration.addConfiguration(EHCACHE_STATISTICS_PROVIDER_CONFIG); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources).build(); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("myCache", cacheConfiguration) + .using(managementRegistry) + .using(new DefaultPersistenceConfiguration(diskPath.newFolder())) + .build(true); + + Cache cache = cacheManager.getCache("myCache", Long.class, String.class); + + Context context = StatsUtil.createContext(managementRegistry); + + StatsUtil.triggerStatComputation(managementRegistry, context, "Cache:HitLatencyMinimum","Cache:HitLatencyMaximum","Cache:HitLatencyAverage", + "OnHeap:HitLatencyMinimum","OnHeap:HitLatencyMaximum","OnHeap:HitLatencyAverage", + "OffHeap:HitLatencyMinimum","OffHeap:HitLatencyMaximum","OffHeap:HitLatencyAverage", + "Disk:HitLatencyMinimum","Disk:HitLatencyMaximum","Disk:HitLatencyAverage"); + + for (Long i = 0L; i < ITERATIONS; i++) { + cache.put(i, String.valueOf(i)); + } + + //HITS to lowest tier + for (Long i = 0L; i < ITERATIONS; i++) { + cache.get(i); + } + + //HITS, depends on tiering so can be any tier + for (Long i = ITERATIONS-1; i >= 0; i--) { + cache.get(i); + } + + + for (int i = 0; i < hitLatencyMinStatNames.size(); i++) { + double tierHitLatencyMin = StatsUtil.assertExpectedValueFromDurationHistory(hitLatencyMinStatNames.get(i), context, managementRegistry, 0L); + double tierHitLatencyMax = StatsUtil.assertExpectedValueFromDurationHistory(hitLatencyMaxStatNames.get(i), context, managementRegistry, 0L); + double tierHitLatencyAverage = StatsUtil.assertExpectedValueFromAverageHistory(hitLatencyAvgStatNames.get(i), context, managementRegistry); + Assert.assertThat(tierHitLatencyMin, Matchers.lessThanOrEqualTo(tierHitLatencyAverage)); + Assert.assertThat(tierHitLatencyMax, Matchers.greaterThanOrEqualTo(tierHitLatencyAverage)); + + } + + double cacheHitLatencyMinimum = StatsUtil.assertExpectedValueFromDurationHistory("Cache:HitLatencyMinimum", context, managementRegistry, 0L); + double cacheHitLatencyMaximum = StatsUtil.assertExpectedValueFromDurationHistory("Cache:HitLatencyMaximum", context, managementRegistry, 0L); + double cacheHitLatencyAverage = StatsUtil.assertExpectedValueFromAverageHistory("Cache:HitLatencyAverage", context, managementRegistry); + + Assert.assertThat(cacheHitLatencyMinimum, Matchers.lessThanOrEqualTo(cacheHitLatencyAverage)); + Assert.assertThat(cacheHitLatencyMaximum, Matchers.greaterThanOrEqualTo(cacheHitLatencyAverage)); + } + finally { + if(cacheManager != null) { + cacheManager.close(); + } + } + } +} diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/MissLatencyTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/MissLatencyTest.java new file mode 100755 index 0000000000..a2d31af6ed --- /dev/null +++ b/management/src/test/java/org/ehcache/management/providers/statistics/MissLatencyTest.java @@ -0,0 +1,156 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.management.providers.statistics; + +import static java.util.Arrays.asList; +import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder; +import static org.ehcache.config.units.MemoryUnit.MB; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.ehcache.Cache; +import org.ehcache.CacheManager; +import org.ehcache.config.Builder; +import org.ehcache.config.CacheConfiguration; +import org.ehcache.config.ResourcePools; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.impl.config.persistence.DefaultPersistenceConfiguration; +import org.ehcache.management.ManagementRegistryService; +import org.ehcache.management.config.EhcacheStatisticsProviderConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryConfiguration; +import org.ehcache.management.registry.DefaultManagementRegistryService; +import org.hamcrest.Matchers; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.Timeout; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.terracotta.management.model.context.Context; + +@RunWith(Parameterized.class) +public class MissLatencyTest { + + @Rule + public final Timeout globalTimeout = Timeout.seconds(60); + + @Rule + public final TemporaryFolder diskPath = new TemporaryFolder(); + + private static final EhcacheStatisticsProviderConfiguration EHCACHE_STATISTICS_PROVIDER_CONFIG = new EhcacheStatisticsProviderConfiguration(1,TimeUnit.MINUTES,100,1,TimeUnit.SECONDS,10,TimeUnit.MINUTES); + + private static final Long ITERATIONS = 10L; + private static final List MISS_LATENCY_MIN_STATS = Arrays.asList("OnHeap:MissLatencyMinimum","OffHeap:MissLatencyMinimum","Disk:MissLatencyMinimum"); + private static final List MISS_LATENCY_MAX_STATS = Arrays.asList("OnHeap:MissLatencyMaximum","OffHeap:MissLatencyMaximum","Disk:MissLatencyMaximum"); + private static final List MISS_LATENCY_AVG_STATS = Arrays.asList("OnHeap:MissLatencyAverage","OffHeap:MissLatencyAverage","Disk:MissLatencyAverage"); + + private final ResourcePools resources; + private final List missLatencyMinStatNames; + private final List missLatencyMaxStatNames; + private final List missLatencyAvgStatNames; + + @Parameterized.Parameters + public static Collection data() { + + return asList(new Object[][] { + //1 tier + { newResourcePoolsBuilder().heap(1, MB), MISS_LATENCY_MIN_STATS.subList(0,1), MISS_LATENCY_MAX_STATS.subList(0,1), MISS_LATENCY_AVG_STATS.subList(0,1)}, + { newResourcePoolsBuilder().offheap(1, MB), MISS_LATENCY_MIN_STATS.subList(1,2), MISS_LATENCY_MAX_STATS.subList(1,2), MISS_LATENCY_AVG_STATS.subList(1,2)}, + { newResourcePoolsBuilder().disk(1, MB), MISS_LATENCY_MIN_STATS.subList(2,3), MISS_LATENCY_MAX_STATS.subList(2,3), MISS_LATENCY_AVG_STATS.subList(2,3)}, + + //2 tier + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB), MISS_LATENCY_MIN_STATS.subList(0,2), MISS_LATENCY_MAX_STATS.subList(0,2), MISS_LATENCY_AVG_STATS.subList(0,2)}, + { newResourcePoolsBuilder().heap(1, MB).disk(2, MB), Arrays.asList(MISS_LATENCY_MIN_STATS.get(0),MISS_LATENCY_MIN_STATS.get(2)), Arrays.asList(MISS_LATENCY_MAX_STATS.get(0),MISS_LATENCY_MAX_STATS.get(2)), Arrays.asList(MISS_LATENCY_AVG_STATS.get(0),MISS_LATENCY_AVG_STATS.get(2))}, + //offheap and disk configuration is not valid. Throws IllegalStateException no Store.Provider found to handle configured resource types [offheap,disk] + + //3 tier + { newResourcePoolsBuilder().heap(1, MB).offheap(2, MB).disk(3, MB), MISS_LATENCY_MIN_STATS, MISS_LATENCY_MAX_STATS, MISS_LATENCY_AVG_STATS} + }); + } + + public MissLatencyTest(Builder resources, List missLatencyMinStatNames, List missLatencyMaxStatNames, List missLatencyAvgStatNames) { + this.resources = resources.build(); + this.missLatencyMinStatNames = missLatencyMinStatNames; + this.missLatencyMaxStatNames = missLatencyMaxStatNames; + this.missLatencyAvgStatNames = missLatencyAvgStatNames; + } + + @Test + public void test() throws InterruptedException, IOException { + CacheManager cacheManager = null; + + try { + + DefaultManagementRegistryConfiguration registryConfiguration = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCacheManager"); + registryConfiguration.addConfiguration(EHCACHE_STATISTICS_PROVIDER_CONFIG); + ManagementRegistryService managementRegistry = new DefaultManagementRegistryService(registryConfiguration); + + CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, resources).build(); + + cacheManager = CacheManagerBuilder.newCacheManagerBuilder() + .withCache("myCache", cacheConfiguration) + .using(managementRegistry) + .using(new DefaultPersistenceConfiguration(diskPath.newFolder())) + .build(true); + + + Cache cache = cacheManager.getCache("myCache", Long.class, String.class); + + Context context = StatsUtil.createContext(managementRegistry); + + StatsUtil.triggerStatComputation(managementRegistry, context, "Cache:MissLatencyMinimum","Cache:MissLatencyMaximum","Cache:MissLatencyAverage", + "OnHeap:MissLatencyMinimum","OnHeap:MissLatencyMaximum","OnHeap:MissLatencyAverage", + "OffHeap:MissLatencyMinimum","OffHeap:MissLatencyMaximum","OffHeap:MissLatencyAverage", + "Disk:MissLatencyMinimum","Disk:MissLatencyMaximum","Disk:MissLatencyAverage"); + + for (Long i = 0L; i < ITERATIONS; i++) { + cache.put(i, String.valueOf(i)); + } + + //MISS + for (Long i = ITERATIONS; i < (2*ITERATIONS); i++) { + cache.get(i); + } + + for (int i = 0; i < missLatencyMinStatNames.size(); i++) { + double tierMissLatencyMin = StatsUtil.assertExpectedValueFromDurationHistory(missLatencyMinStatNames.get(i), context, managementRegistry, 0L); + double tierMissLatencyMax = StatsUtil.assertExpectedValueFromDurationHistory(missLatencyMaxStatNames.get(i), context, managementRegistry, 0L); + double tierMissLatencyAverage = StatsUtil.assertExpectedValueFromAverageHistory(missLatencyAvgStatNames.get(i), context, managementRegistry); + Assert.assertThat(tierMissLatencyMin, Matchers.lessThanOrEqualTo(tierMissLatencyAverage)); + Assert.assertThat(tierMissLatencyMax, Matchers.greaterThanOrEqualTo(tierMissLatencyAverage)); + + } + + double cacheMissLatencyMinimum = StatsUtil.assertExpectedValueFromDurationHistory("Cache:MissLatencyMinimum", context, managementRegistry, 0L); + double cacheMissLatencyMaximum = StatsUtil.assertExpectedValueFromDurationHistory("Cache:MissLatencyMaximum", context, managementRegistry, 0L); + double cacheMissLatencyAverage = StatsUtil.assertExpectedValueFromAverageHistory("Cache:MissLatencyAverage", context, managementRegistry); + + Assert.assertThat(cacheMissLatencyMinimum, Matchers.lessThanOrEqualTo(cacheMissLatencyAverage)); + Assert.assertThat(cacheMissLatencyMaximum, Matchers.greaterThanOrEqualTo(cacheMissLatencyAverage)); + + } + finally { + if(cacheManager != null) { + cacheManager.close(); + } + } + } +} diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java b/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java index 9d846cd26d..ba5c6bdd6a 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/StatsUtil.java @@ -16,6 +16,7 @@ package org.ehcache.management.providers.statistics; import static java.util.Collections.singletonList; +import static org.junit.Assert.assertThat; import org.ehcache.management.ManagementRegistryService; import org.hamcrest.Matchers; @@ -32,10 +33,9 @@ import org.terracotta.management.model.stats.history.RatioHistory; import org.terracotta.management.registry.ResultSet; import org.terracotta.management.registry.StatisticQuery; +import org.junit.Assert; import java.util.Arrays; import java.util.Map; -import org.junit.Assert; -import static org.junit.Assert.assertThat; public class StatsUtil { @@ -129,6 +129,75 @@ public static void assertExpectedValueFromRatioHistory(String statName, Context assertThat(value, Matchers.is(expectedResult)); } + /* + NOTE: When using this method in other unit tests, make sure to declare a timeout as it is possible to get an infinite loop. + This should only occur if the stats value is different from your minExpectedValue, which may happen if the stats calculations + change, the stats value isn't accessible or if you enter the wrong minExpectedValue. + */ + public static long assertExpectedValueFromDurationHistory(String statName, Context context, ManagementRegistryService managementRegistry, Long minExpectedValue) { + + StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(Arrays.asList(statName)) + .on(context) + .build(); + + Long value = null; + do { + ResultSet counters = query.execute(); + + ContextualStatistics statisticsContext = counters.getResult(context); + + assertThat(counters.size(), Matchers.is(1)); + + DurationHistory durationHistory = statisticsContext.getStatistic(DurationHistory.class, statName); + + if (durationHistory.getValue().length > 0) { + int mostRecentIndex = durationHistory.getValue().length - 1; + value = durationHistory.getValue()[mostRecentIndex].getValue(); + } + + }while(!Thread.currentThread().isInterrupted() && value == null); + + assertThat(value, Matchers.greaterThan(minExpectedValue)); + + return value; + } + + /* + NOTE: When using this method in other unit tests, make sure to declare a timeout as it is possible to get an infinite loop. + This should only occur if the stats value is different from your minExpectedValue, which may happen if the stats calculations + change, the stats value isn't accessible or if you enter the wrong minExpectedValue. + */ + public static double assertExpectedValueFromAverageHistory(String statName, Context context, ManagementRegistryService managementRegistry) { + + StatisticQuery query = managementRegistry.withCapability("StatisticsCapability") + .queryStatistics(Arrays.asList(statName)) + .on(context) + .build(); + + double value = Double.NaN; + do { + ResultSet counters = query.execute(); + + ContextualStatistics statisticsContext = counters.getResult(context); + + assertThat(counters.size(), Matchers.is(1)); + + + AverageHistory avgHistory = statisticsContext.getStatistic(AverageHistory.class, statName);//returns type DurationHistory but it was AverageHistory + + if (avgHistory.getValue().length > 0) { + int mostRecentIndex = avgHistory.getValue().length - 1; + value = avgHistory.getValue()[mostRecentIndex].getValue(); + } + + }while(!Thread.currentThread().isInterrupted() && Double.isNaN(value)); + + assertThat(value, Matchers.greaterThan(0d)); + + return value; + } + // When testing ratios, we need to wait for the first computation (we do not have any choice) to happen because ratio depends on 2 other sampled statistics. // If you do not wait, then you'll always get some NaN because the hits will be done within the 1st second, and the hits won't be done in the right "window". // A ratio is computed by dividing a rate with another rate. See CompoundOperationImpl.ratioOf(). From 4b80e9f4b24e5a89f3c38407e7880521a03cd1d5 Mon Sep 17 00:00:00 2001 From: Henri Tremblay Date: Tue, 6 Dec 2016 00:30:58 -0500 Subject: [PATCH 179/218] A 10 seconds timeout is too tight for the CI --- .../ehcache/management/providers/statistics/HitLatencyTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/HitLatencyTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/HitLatencyTest.java index db53482308..492367c0be 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/HitLatencyTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/HitLatencyTest.java @@ -52,7 +52,7 @@ public class HitLatencyTest { @Rule - public final Timeout globalTimeout = Timeout.seconds(10); + public final Timeout globalTimeout = Timeout.seconds(30); @Rule public final TemporaryFolder diskPath = new TemporaryFolder(); From e7e92cae3eeaa320ec6378585bcfdceb339696c2 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Fri, 2 Dec 2016 14:31:13 +0100 Subject: [PATCH 180/218] :construction: #1666 Destroy lock entity less often The lock entity now is only destroyed when the client was holding a write lock. This should make the occurence of Terracotta-OSS/terracotta-core#379 much less frequent. This commit introduces a TODO that is expected to be removed once the root cause is fixed. --- .../internal/lock/VoltronReadWriteLock.java | 21 ++++++-- .../lock/VoltronReadWriteLockTest.java | 5 ++ .../BasicLifeCyclePassiveReplicationTest.java | 48 +++++++++++++++++++ 3 files changed, 69 insertions(+), 5 deletions(-) diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLock.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLock.java index 153bc111a5..311345a508 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLock.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLock.java @@ -19,6 +19,8 @@ import java.io.Closeable; import org.ehcache.clustered.common.internal.lock.LockMessaging.HoldType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.terracotta.connection.Connection; import org.terracotta.connection.entity.EntityRef; import org.terracotta.exception.EntityAlreadyExistsException; @@ -28,6 +30,8 @@ public class VoltronReadWriteLock { + private static final Logger LOGGER = LoggerFactory.getLogger(VoltronReadWriteLock.class); + private final EntityRef reference; public VoltronReadWriteLock(Connection connection, String id) { @@ -66,18 +70,22 @@ private Hold tryLock(final HoldType type) { return new HoldImpl(client, type); } else { client.close(); - tryDestroy(); + //TODO Restore this clean up operation once https://github.com/Terracotta-OSS/terracotta-core/issues/379 is fixed +// tryDestroy(); return null; } } - private boolean tryDestroy() { + private void tryDestroy() { try { - return reference.destroy(); + boolean destroyed = reference.destroy(); + if (destroyed) { + LOGGER.debug("Destroyed lock entity " + reference.getName()); + } } catch (EntityNotProvidedException e) { throw new AssertionError(e); } catch (EntityNotFoundException e) { - return false; + // Nothing to do } } @@ -109,7 +117,9 @@ public void close() { public void unlock() { client.unlock(type); client.close(); - tryDestroy(); + if (type == HoldType.WRITE) { + tryDestroy(); + } } } @@ -118,6 +128,7 @@ private VoltronReadWriteLockClient createClientEntity() { while (true) { try { reference.create(null); + LOGGER.debug("Created lock entity " + reference.getName()); } catch (EntityAlreadyExistsException f) { //ignore } diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockTest.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockTest.java index 136d6c984d..762a5fc320 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockTest.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLockTest.java @@ -17,6 +17,7 @@ package org.ehcache.clustered.client.internal.lock; import org.junit.Before; +import org.junit.Ignore; import org.junit.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; @@ -158,6 +159,7 @@ public void testWriteUnlockDestroysEntity() throws Exception { } @Test + @Ignore("Enable once https://github.com/Terracotta-OSS/terracotta-core/issues/379 is fixed and TODO removed") public void testReadUnlockDestroysEntity() throws Exception { when(entityRef.fetchEntity()).thenReturn(client); @@ -266,6 +268,7 @@ public void testTryWriteUnlockDestroysEntity() throws Exception { } @Test + @Ignore("Enable once https://github.com/Terracotta-OSS/terracotta-core/issues/379 is fixed and TODO removed") public void testTryReadUnlockDestroysEntity() throws Exception { when(client.tryLock(READ)).thenReturn(true); @@ -306,6 +309,7 @@ public void testTryReadLockFailingClosesEntity() throws Exception { } @Test + @Ignore("Enable once https://github.com/Terracotta-OSS/terracotta-core/issues/379 is fixed and TODO removed") public void testTryWriteLockFailingDestroysEntity() throws Exception { when(client.tryLock(WRITE)).thenReturn(false); @@ -319,6 +323,7 @@ public void testTryWriteLockFailingDestroysEntity() throws Exception { } @Test + @Ignore("Enable once https://github.com/Terracotta-OSS/terracotta-core/issues/379 is fixed and TODO removed") public void testTryReadLockFailingDestroysEntity() throws Exception { when(client.tryLock(READ)).thenReturn(false); diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java index 0cf67d8d48..2c0a3d3550 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java @@ -17,11 +17,13 @@ package org.ehcache.clustered.replication; import org.ehcache.CachePersistenceException; +import org.ehcache.PersistentCacheManager; import org.ehcache.clustered.client.config.ClusteredResourcePool; import org.ehcache.clustered.client.config.ClusteringServiceConfiguration; import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; import org.ehcache.clustered.client.internal.EhcacheClientEntity; +import org.ehcache.clustered.client.internal.lock.VoltronReadWriteLock; import org.ehcache.clustered.client.internal.service.ClusteredTierCreationException; import org.ehcache.clustered.client.internal.service.ClusteredTierDestructionException; import org.ehcache.clustered.client.internal.service.ClusteredTierManagerConfigurationException; @@ -33,11 +35,14 @@ import org.ehcache.clustered.common.internal.exceptions.InvalidStoreException; import org.ehcache.clustered.common.internal.exceptions.InvalidStoreManagerException; import org.ehcache.clustered.common.internal.exceptions.LifecycleException; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.impl.serialization.CompactJavaSerializer; import org.ehcache.spi.service.MaintainableService; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; +import org.junit.Ignore; import org.junit.Test; import org.terracotta.testing.rules.BasicExternalCluster; import org.terracotta.testing.rules.Cluster; @@ -46,10 +51,16 @@ import java.lang.reflect.Field; import java.util.Collections; +import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated; +import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster; +import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder; +import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder; +import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.ehcache.config.units.MemoryUnit.MB; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; @@ -233,6 +244,43 @@ public void testDestroyServerStoreIsNotReplicatedIfFailsOnActive() throws Except cleanUpCluster(service1); } + @Test + public void testDestroyCacheManager() throws Exception { + CacheManagerBuilder configBuilder = newCacheManagerBuilder().with(cluster(CLUSTER.getConnectionURI().resolve("/destroy-CM")) + .autoCreate().defaultServerResource("primary-server-resource")); + PersistentCacheManager cacheManager1 = configBuilder.build(true); + PersistentCacheManager cacheManager2 = configBuilder.build(true); + + cacheManager2.close(); + + try { + cacheManager2.destroy(); + fail("Exception expected"); + } catch (Exception e) { + e.printStackTrace(); + } + + CLUSTER.getClusterControl().terminateActive(); + CLUSTER.getClusterControl().waitForActive(); + + cacheManager1.createCache("test", newCacheConfigurationBuilder(Long.class, String.class, heap(10).with(clusteredDedicated(10, MB)))); + } + + @Test +// @Ignore("enable back once https://github.com/Terracotta-OSS/terracotta-core/issues/379 is fixed") + public void testDestroyLockEntity() throws Exception { + VoltronReadWriteLock lock1 = new VoltronReadWriteLock(CLUSTER.newConnection(), "my-lock"); + VoltronReadWriteLock.Hold hold1 = lock1.tryReadLock(); + + VoltronReadWriteLock lock2 = new VoltronReadWriteLock(CLUSTER.newConnection(), "my-lock"); + assertThat(lock2.tryWriteLock(), nullValue()); + + CLUSTER.getClusterControl().terminateActive(); + CLUSTER.getClusterControl().waitForActive(); + + hold1.unlock(); + } + private static EhcacheClientEntity getEntity(ClusteringService clusteringService) throws NoSuchFieldException, IllegalAccessException { Field entity = clusteringService.getClass().getDeclaredField("entity"); entity.setAccessible(true); From fa7f78ec531ddc4d9551b4a3d63db046e0e0fbde Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Mon, 5 Dec 2016 17:01:04 -0500 Subject: [PATCH 181/218] :arrow_up: Upgrade to new tc-platform: fix management service lifecycle, closing entity requested services from the monitoring service when the entity is gone --- build.gradle | 2 +- .../org/ehcache/clustered/server/EhcacheActiveEntity.java | 3 --- .../org/ehcache/clustered/server/EhcachePassiveEntity.java | 1 - .../ehcache/clustered/server/management/Management.java | 7 ------- 4 files changed, 1 insertion(+), 12 deletions(-) diff --git a/build.gradle b/build.gradle index e94aafbe7c..face4837e8 100644 --- a/build.gradle +++ b/build.gradle @@ -28,7 +28,7 @@ ext { sizeofVersion = '0.3.0' // Clustered - terracottaPlatformVersion = '5.0.13.beta2' + terracottaPlatformVersion = '5.0.13.beta3' managementVersion = terracottaPlatformVersion terracottaApisVersion = '1.0.13.beta' terracottaCoreVersion = '5.0.13-beta' diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index c70237cabb..84eea72ed0 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -674,9 +674,6 @@ private void clientInvalidated(ClientDescriptor clientDescriptor, int invalidati */ @Override public void destroy() { - - management.close(); - /* * Ensure the allocated stores are closed out. */ diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index 2d5bfbe0ea..62cdb06c6e 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -303,7 +303,6 @@ public void createNew() { @Override public void destroy() { - management.close(); ehcacheStateService.destroy(); } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java index d9c19dc0fd..53967217b8 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java @@ -97,13 +97,6 @@ public void init() { } } - public void close() { - if (managementRegistry != null) { - LOGGER.trace("close()"); - managementRegistry.close(); - } - } - public void clientConnected(ClientDescriptor clientDescriptor, ClientState clientState) { if (managementRegistry != null) { LOGGER.trace("clientConnected({})", clientDescriptor); From 71b0e450f3cd51babb7712d795e43157c055f98e Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Tue, 6 Dec 2016 10:30:08 -0500 Subject: [PATCH 182/218] :memo: fixed tc-config sample --- .../client/src/test/resources/configs/docs/tc-config.xml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/clustered/client/src/test/resources/configs/docs/tc-config.xml b/clustered/client/src/test/resources/configs/docs/tc-config.xml index 8c46e1c8dc..57d1553f1c 100644 --- a/clustered/client/src/test/resources/configs/docs/tc-config.xml +++ b/clustered/client/src/test/resources/configs/docs/tc-config.xml @@ -1,13 +1,12 @@ - - - + + 128 96 - - + + From 4f71f48d7238b692e45ef51509c89ba5b5d47968 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Tue, 6 Dec 2016 16:51:17 +0530 Subject: [PATCH 183/218] Test that active chain gets corrupted due to unsafe access #1669 --- .../server/offheap/OffHeapChainMap.java | 33 +++++++++-------- .../offheap/OffHeapChainStorageEngine.java | 5 +++ .../server/offheap/ChainMapTest.java | 36 +++++++++++++++++++ 3 files changed, 59 insertions(+), 15 deletions(-) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java index c68824e16a..bf14c2fb4c 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java @@ -48,31 +48,34 @@ interface ChainMapEvictionListener { private final ReadWriteLockedOffHeapClockCache heads; private final OffHeapChainStorageEngine chainStorage; - private volatile ChainMapEvictionListener evictionListener;; + private volatile ChainMapEvictionListener evictionListener; public OffHeapChainMap(PageSource source, Portability keyPortability, int minPageSize, int maxPageSize, boolean shareByThieving) { - this.chainStorage = new OffHeapChainStorageEngine(source, keyPortability, minPageSize, maxPageSize, shareByThieving, shareByThieving); - EvictionListener listener = new EvictionListener() { - @Override - public void evicting(Callable> callable) { + this.chainStorage = new OffHeapChainStorageEngine<>(source, keyPortability, minPageSize, maxPageSize, shareByThieving, shareByThieving); + EvictionListener listener = callable -> { + try { + Map.Entry entry = callable.call(); try { - Map.Entry entry = callable.call(); - try { - if (evictionListener != null) { - evictionListener.onEviction(entry.getKey()); - } - } finally { - entry.getValue().close(); + if (evictionListener != null) { + evictionListener.onEviction(entry.getKey()); } - } catch (Exception e) { - throw new AssertionError(e); + } finally { + entry.getValue().close(); } + } catch (Exception e) { + throw new AssertionError(e); } }; //TODO: EvictionListeningReadWriteLockedOffHeapClockCache lacks ctor that takes shareByThieving // this.heads = new ReadWriteLockedOffHeapClockCache(source, shareByThieving, chainStorage); - this.heads = new EvictionListeningReadWriteLockedOffHeapClockCache(listener, source, chainStorage); + this.heads = new EvictionListeningReadWriteLockedOffHeapClockCache<>(listener, source, chainStorage); + } + + //For tests + OffHeapChainMap(ReadWriteLockedOffHeapClockCache heads, OffHeapChainStorageEngine chainStorage) { + this.chainStorage = chainStorage; + this.heads = heads; } void setEvictionListener(ChainMapEvictionListener listener) { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainStorageEngine.java b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainStorageEngine.java index 0e79350ca1..c540d6a598 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainStorageEngine.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainStorageEngine.java @@ -58,6 +58,11 @@ public OffHeapChainStorageEngine(PageSource source, Portability keyPo this.keyPortability = keyPortability; } + //For tests + Set getActiveChains() { + return this.activeChains; + } + InternalChain newChain(ByteBuffer element) { return new PrimordialChain(element); } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/ChainMapTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/ChainMapTest.java index 25fbb33faf..00d285d149 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/ChainMapTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/ChainMapTest.java @@ -16,6 +16,12 @@ package org.ehcache.clustered.server.offheap; import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; import org.ehcache.clustered.common.internal.store.Element; @@ -29,7 +35,9 @@ import org.junit.runners.Parameterized.Parameters; import org.junit.Test; +import org.terracotta.offheapstore.ReadWriteLockedOffHeapClockCache; import org.terracotta.offheapstore.buffersource.OffHeapBufferSource; +import org.terracotta.offheapstore.eviction.EvictionListeningReadWriteLockedOffHeapClockCache; import org.terracotta.offheapstore.paging.UnlimitedPageSource; import org.terracotta.offheapstore.paging.UpfrontAllocatingPageSource; import org.terracotta.offheapstore.storage.portability.StringPortability; @@ -340,6 +348,34 @@ public void testPutWhenKeyIsNull() { assertThat(map.get("key"), contains(element(1), element(2))); } + @Test + public void testActiveChainsThreadSafety() throws ExecutionException, InterruptedException { + UnlimitedPageSource source = new UnlimitedPageSource(new OffHeapBufferSource()); + OffHeapChainStorageEngine chainStorage = new OffHeapChainStorageEngine<>(source, StringPortability.INSTANCE, minPageSize, maxPageSize, steal, steal); + + ReadWriteLockedOffHeapClockCache heads = new EvictionListeningReadWriteLockedOffHeapClockCache<>(callable -> {}, source, chainStorage); + + OffHeapChainMap map = new OffHeapChainMap<>(heads, chainStorage); + + map.put("key", chain(buffer(1), buffer(2))); + + int nThreads = 10; + ExecutorService executorService = Executors.newFixedThreadPool(nThreads); + + List futures = new ArrayList<>(); + + for (int i = 0; i < nThreads ; i++) { + futures.add(executorService.submit(() -> map.get("key"))); + } + + for (Future f : futures) { + f.get(); + } + + assertThat(chainStorage.getActiveChains().size(), is(0)); + + } + private static ByteBuffer buffer(int i) { ByteBuffer buffer = ByteBuffer.allocate(i); while (buffer.hasRemaining()) { From 598f41f779ee4269c336df7c127f4db3b17a3612 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Tue, 6 Dec 2016 16:59:17 +0530 Subject: [PATCH 184/218] Activechain gets updated in thread safe way #1669 --- .../clustered/server/offheap/OffHeapChainStorageEngine.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainStorageEngine.java b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainStorageEngine.java index c540d6a598..25ec0c40aa 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainStorageEngine.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainStorageEngine.java @@ -17,10 +17,12 @@ import java.nio.ByteBuffer; import java.util.ArrayList; +import java.util.Collections; import java.util.Iterator; import java.util.HashSet; import java.util.List; import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.locks.Lock; import org.ehcache.clustered.common.internal.store.Chain; import org.ehcache.clustered.common.internal.store.Element; @@ -48,7 +50,7 @@ class OffHeapChainStorageEngine implements StorageEngine { private final OffHeapStorageArea storage; private final Portability keyPortability; - private final Set activeChains = new HashSet(); + private final Set activeChains = Collections.newSetFromMap(new ConcurrentHashMap()); private StorageEngine.Owner owner; private long nextSequenceNumber = 0; From 28722b808933f4bb6148f31b5287652e8d604240 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Tue, 6 Dec 2016 17:44:34 +0530 Subject: [PATCH 185/218] Put does no leak when mapping is not null #1669 --- .../server/offheap/OffHeapChainMap.java | 27 ++++++++++++------- .../server/offheap/ChainMapTest.java | 16 +++++++++++ 2 files changed, 33 insertions(+), 10 deletions(-) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java index bf14c2fb4c..c03f06909d 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/offheap/OffHeapChainMap.java @@ -52,18 +52,21 @@ interface ChainMapEvictionListener { public OffHeapChainMap(PageSource source, Portability keyPortability, int minPageSize, int maxPageSize, boolean shareByThieving) { this.chainStorage = new OffHeapChainStorageEngine<>(source, keyPortability, minPageSize, maxPageSize, shareByThieving, shareByThieving); - EvictionListener listener = callable -> { - try { - Map.Entry entry = callable.call(); + EvictionListener listener = new EvictionListener() { + @Override + public void evicting(Callable> callable) { try { - if (evictionListener != null) { - evictionListener.onEviction(entry.getKey()); + Map.Entry entry = callable.call(); + try { + if (evictionListener != null) { + evictionListener.onEviction(entry.getKey()); + } + } finally { + entry.getValue().close(); } - } finally { - entry.getValue().close(); + } catch (Exception e) { + throw new AssertionError(e); } - } catch (Exception e) { - throw new AssertionError(e); } }; @@ -190,7 +193,11 @@ public void put(K key, Chain chain) { try { InternalChain current = heads.get(key); if (current != null) { - replaceAtHead(key, current.detach(), chain); + try { + replaceAtHead(key, current.detach(), chain); + } finally { + current.close(); + } } else { for (Element x : chain) { append(key, x.getPayload()); diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/ChainMapTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/ChainMapTest.java index 00d285d149..82fe871e1e 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/ChainMapTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/offheap/ChainMapTest.java @@ -376,6 +376,22 @@ public void testActiveChainsThreadSafety() throws ExecutionException, Interrupte } + @Test + public void testPutDoesNotLeakWhenMappingIsNotNull() { + UnlimitedPageSource source = new UnlimitedPageSource(new OffHeapBufferSource()); + OffHeapChainStorageEngine chainStorage = new OffHeapChainStorageEngine<>(source, StringPortability.INSTANCE, minPageSize, maxPageSize, steal, steal); + + ReadWriteLockedOffHeapClockCache heads = new EvictionListeningReadWriteLockedOffHeapClockCache<>(callable -> {}, source, chainStorage); + + OffHeapChainMap map = new OffHeapChainMap<>(heads, chainStorage); + + map.put("key", chain(buffer(1))); + map.put("key", chain(buffer(2))); + + assertThat(chainStorage.getActiveChains().size(), is(0)); + + } + private static ByteBuffer buffer(int i) { ByteBuffer buffer = ByteBuffer.allocate(i); while (buffer.hasRemaining()) { From 139acd807a83d2b04eb141986d8cba9c6a461b2f Mon Sep 17 00:00:00 2001 From: Ramesh Kavanappillil Date: Wed, 7 Dec 2016 13:41:50 +0530 Subject: [PATCH 186/218] Closes #1689 Remove hardcoded dependency of EhcacheStateService interface with OSS version of ServerStoreImpl --- .../clustered/server/EhcacheActiveEntity.java | 64 ++++++++++++++----- .../server/EhcachePassiveEntity.java | 11 +++- .../server/ServerSideServerStore.java | 34 ++++++++++ .../clustered/server/ServerStoreImpl.java | 2 +- .../server/management/Management.java | 5 +- .../server/management/ServerStoreBinding.java | 8 +-- .../server/state/EhcacheStateService.java | 6 +- 7 files changed, 101 insertions(+), 29 deletions(-) create mode 100644 clustered/server/src/main/java/org/ehcache/clustered/server/ServerSideServerStore.java diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 84eea72ed0..f2a66cf446 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -27,6 +27,7 @@ import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -318,7 +319,7 @@ public void handleReconnect(ClientDescriptor clientDescriptor, byte[] extendedRe addClientId(clientDescriptor, reconnectMessage.getClientId()); Set cacheIds = reconnectMessage.getAllCaches(); for (final String cacheId : cacheIds) { - ServerStoreImpl serverStore = ehcacheStateService.getStore(cacheId); + ServerSideServerStore serverStore = ehcacheStateService.getStore(cacheId); if (serverStore == null) { //Client removes the cache's reference only when destroy has successfully completed //This happens only when client thinks destroy is still not complete @@ -335,7 +336,7 @@ public void handleReconnect(ClientDescriptor clientDescriptor, byte[] extendedRe management.clientReconnected(clientDescriptor, clientState); } - private void addInflightInvalidationsForStrongCache(ClientDescriptor clientDescriptor, ReconnectMessage reconnectMessage, String cacheId, ServerStoreImpl serverStore) { + private void addInflightInvalidationsForStrongCache(ClientDescriptor clientDescriptor, ReconnectMessage reconnectMessage, String cacheId, ServerSideServerStore serverStore) { if (serverStore.getStoreConfiguration().getConsistency().equals(Consistency.STRONG)) { Set invalidationsInProgress = reconnectMessage.getInvalidationsInProgress(cacheId); LOGGER.debug("Number of Inflight Invalidations from client ID {} for cache {} is {}.", reconnectMessage.getClientId(), cacheId, invalidationsInProgress @@ -363,7 +364,7 @@ public void synchronizeKeyToPassive(PassiveSynchronizationChannel storeConfigs = new HashMap<>(); for (String storeName : ehcacheStateService.getStores()) { - ServerStoreImpl store = ehcacheStateService.getStore(storeName); + ServerSideServerStore store = ehcacheStateService.getStore(storeName); storeConfigs.put(storeName, store.getStoreConfiguration()); } @@ -371,10 +372,16 @@ public void synchronizeKeyToPassive(PassiveSynchronizationChannel { - ServerStoreImpl store = ehcacheStateService.getStore(name); + ServerSideServerStore store = ehcacheStateService.getStore(name); store.getSegments().get(concurrencyKey - DATA_CONCURRENCY_KEY_OFFSET).keySet().stream() .forEach(key -> { - syncChannel.synchronizeToPassive(new EhcacheDataSyncMessage(name, key, store.get(key))); + final Chain chain; + try { + chain = store.get(key); + } catch (TimeoutException e) { + throw new AssertionError("Server side store is not expected to throw timeout exception"); + } + syncChannel.synchronizeToPassive(new EhcacheDataSyncMessage(name, key, chain)); }); }); } @@ -467,7 +474,7 @@ private EhcacheEntityResponse invokeLifeCycleOperation(ClientDescriptor clientDe private EhcacheEntityResponse invokeServerStoreOperation(ClientDescriptor clientDescriptor, ServerStoreOpMessage message) throws ClusterException { validateClusteredTierManagerConfigured(clientDescriptor); - ServerStoreImpl cacheStore = ehcacheStateService.getStore(message.getCacheId()); + ServerSideServerStore cacheStore = ehcacheStateService.getStore(message.getCacheId()); if (cacheStore == null) { // An operation on a non-existent store should never get out of the client throw new LifecycleException("Clustered tier does not exist : '" + message.getCacheId() + "'"); @@ -497,13 +504,23 @@ private EhcacheEntityResponse invokeServerStoreOperation(ClientDescriptor client switch (message.getMessageType()) { case GET_STORE: { ServerStoreOpMessage.GetMessage getMessage = (ServerStoreOpMessage.GetMessage) message; - return responseFactory.response(cacheStore.get(getMessage.getKey())); + try { + return responseFactory.response(cacheStore.get(getMessage.getKey())); + } catch (TimeoutException e) { + throw new AssertionError("Server side store is not expected to throw timeout exception"); + } } case APPEND: { if (!isMessageDuplicate(message)) { ServerStoreOpMessage.AppendMessage appendMessage = (ServerStoreOpMessage.AppendMessage)message; - cacheStore.getAndAppend(appendMessage.getKey(), appendMessage.getPayload()); - sendMessageToSelfAndDeferRetirement(appendMessage, cacheStore.get(appendMessage.getKey())); + final Chain newChain; + try { + cacheStore.getAndAppend(appendMessage.getKey(), appendMessage.getPayload()); + newChain = cacheStore.get(appendMessage.getKey()); + } catch (TimeoutException e) { + throw new AssertionError("Server side store is not expected to throw timeout exception"); + } + sendMessageToSelfAndDeferRetirement(appendMessage, newChain); invalidateHashForClient(clientDescriptor, appendMessage.getCacheId(), appendMessage.getKey()); } return responseFactory.success(); @@ -513,14 +530,25 @@ private EhcacheEntityResponse invokeServerStoreOperation(ClientDescriptor client LOGGER.trace("Message {} : GET_AND_APPEND on key {} from client {}", message, getAndAppendMessage.getKey(), getAndAppendMessage.getClientId()); if (!isMessageDuplicate(message)) { LOGGER.trace("Message {} : is not duplicate", message); - Chain result = cacheStore.getAndAppend(getAndAppendMessage.getKey(), getAndAppendMessage.getPayload()); - sendMessageToSelfAndDeferRetirement(getAndAppendMessage, cacheStore.get(getAndAppendMessage.getKey())); + final Chain result; + final Chain newChain; + try { + result = cacheStore.getAndAppend(getAndAppendMessage.getKey(), getAndAppendMessage.getPayload()); + newChain = cacheStore.get(getAndAppendMessage.getKey()); + } catch (TimeoutException e) { + throw new AssertionError("Server side store is not expected to throw timeout exception"); + } + sendMessageToSelfAndDeferRetirement(getAndAppendMessage, newChain); EhcacheEntityResponse response = responseFactory.response(result); LOGGER.debug("Send invalidations for key {}", getAndAppendMessage.getKey()); invalidateHashForClient(clientDescriptor, getAndAppendMessage.getCacheId(), getAndAppendMessage.getKey()); return response; } - return responseFactory.response(cacheStore.get(getAndAppendMessage.getKey())); + try { + return responseFactory.response(cacheStore.get(getAndAppendMessage.getKey())); + } catch (TimeoutException e) { + throw new AssertionError("Server side store is not expected to throw timeout exception"); + } } case REPLACE: { ServerStoreOpMessage.ReplaceAtHeadMessage replaceAtHeadMessage = (ServerStoreOpMessage.ReplaceAtHeadMessage) message; @@ -538,7 +566,11 @@ private EhcacheEntityResponse invokeServerStoreOperation(ClientDescriptor client case CLEAR: { if (!isMessageDuplicate(message)) { String cacheId = message.getCacheId(); - cacheStore.clear(); + try { + cacheStore.clear(); + } catch (TimeoutException e) { + throw new AssertionError("Server side store is not expected to throw timeout exception"); + } invalidateAll(clientDescriptor, cacheId); } return responseFactory.success(); @@ -761,7 +793,7 @@ private void createServerStore(ClientDescriptor clientDescriptor, CreateServerSt } boolean isDuplicate = isMessageDuplicate(createServerStore); final String name = createServerStore.getName(); // client cache identifier/name - ServerStoreImpl serverStore; + ServerSideServerStore serverStore; if (!isDuplicate) { LOGGER.info("Client {} creating new clustered tier '{}'", clientDescriptor, name); @@ -802,7 +834,7 @@ private void validateServerStore(ClientDescriptor clientDescriptor, ValidateServ ServerStoreConfiguration clientConfiguration = validateServerStore.getStoreConfiguration(); LOGGER.info("Client {} validating clustered tier '{}'", clientDescriptor, name); - ServerStoreImpl store = ehcacheStateService.getStore(name); + ServerSideServerStore store = ehcacheStateService.getStore(name); if (store != null) { storeCompatibility.verify(store.getStoreConfiguration(), clientConfiguration); attachStore(clientDescriptor, name); @@ -826,7 +858,7 @@ private void releaseServerStore(ClientDescriptor clientDescriptor, ReleaseServer String name = releaseServerStore.getName(); LOGGER.info("Client {} releasing clustered tier '{}'", clientDescriptor, name); - ServerStoreImpl store = ehcacheStateService.getStore(name); + ServerSideServerStore store = ehcacheStateService.getStore(name); if (store != null) { boolean removedFromClient = clientState.removeStore(name); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index 62cdb06c6e..7fa04a2ac5 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -55,6 +55,7 @@ import java.util.Map; import java.util.Set; import java.util.UUID; +import java.util.concurrent.TimeoutException; import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isLifecycleMessage; import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.isPassiveReplicationMessage; @@ -114,7 +115,7 @@ private void invokeRetirementMessages(PassiveReplicationMessage message) throws case CHAIN_REPLICATION_OP: LOGGER.debug("Chain Replication message for msgId {} & client Id {}", message.getId(), message.getClientId()); ChainReplicationMessage retirementMessage = (ChainReplicationMessage)message; - ServerStoreImpl cacheStore = ehcacheStateService.getStore(retirementMessage.getCacheId()); + ServerSideServerStore cacheStore = ehcacheStateService.getStore(retirementMessage.getCacheId()); if (cacheStore == null) { // An operation on a non-existent store should never get out of the client throw new LifecycleException("Clustered tier does not exist : '" + retirementMessage.getCacheId() + "'"); @@ -170,7 +171,7 @@ private void trackHashInvalidationForEventualCache(ChainReplicationMessage retir } private void invokeServerStoreOperation(ServerStoreOpMessage message) throws ClusterException { - ServerStoreImpl cacheStore = ehcacheStateService.getStore(message.getCacheId()); + ServerSideServerStore cacheStore = ehcacheStateService.getStore(message.getCacheId()); if (cacheStore == null) { // An operation on a non-existent store should never get out of the client throw new LifecycleException("Clustered tier does not exist : '" + message.getCacheId() + "'"); @@ -183,7 +184,11 @@ private void invokeServerStoreOperation(ServerStoreOpMessage message) throws Clu break; } case CLEAR: { - cacheStore.clear(); + try { + cacheStore.clear(); + } catch (TimeoutException e) { + throw new AssertionError("Server side store is not expected to throw timeout exception"); + } InvalidationTracker invalidationTracker = ehcacheStateService.getInvalidationTracker(message.getCacheId()); if (invalidationTracker != null) { invalidationTracker.setClearInProgress(true); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerSideServerStore.java b/clustered/server/src/main/java/org/ehcache/clustered/server/ServerSideServerStore.java new file mode 100644 index 0000000000..cdafcff88e --- /dev/null +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/ServerSideServerStore.java @@ -0,0 +1,34 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.server; + +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.store.Chain; +import org.ehcache.clustered.common.internal.store.ServerStore; +import org.ehcache.clustered.server.offheap.OffHeapChainMap; +import org.terracotta.offheapstore.MapInternals; + +import com.tc.classloader.CommonComponent; + +import java.util.List; + +@CommonComponent +public interface ServerSideServerStore extends ServerStore, MapInternals { + void setEvictionListener(ServerStoreEvictionListener listener); + ServerStoreConfiguration getStoreConfiguration(); + List> getSegments(); + void put(long key, Chain chain); +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java index b302fcbb69..0851bed60e 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/ServerStoreImpl.java @@ -30,7 +30,7 @@ import java.util.List; @CommonComponent -public class ServerStoreImpl implements ServerStore, MapInternals { +public class ServerStoreImpl implements ServerSideServerStore { private final ServerStoreConfiguration storeConfiguration; private final PageSource pageSource; diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java index 53967217b8..27c5cdf5c5 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/Management.java @@ -17,6 +17,7 @@ import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.server.ClientState; +import org.ehcache.clustered.server.ServerSideServerStore; import org.ehcache.clustered.server.ServerStoreImpl; import org.ehcache.clustered.server.state.EhcacheStateService; import org.slf4j.Logger; @@ -142,7 +143,7 @@ public void clientValidated(ClientDescriptor clientDescriptor, ClientState clien public void serverStoreCreated(String name) { if (managementRegistry != null) { LOGGER.trace("serverStoreCreated({})", name); - ServerStoreImpl serverStore = ehcacheStateService.getStore(name); + ServerSideServerStore serverStore = ehcacheStateService.getStore(name); ServerStoreBinding serverStoreBinding = new ServerStoreBinding(name, serverStore); managementRegistry.register(serverStoreBinding); ServerSideConfiguration.Pool pool = ehcacheStateService.getDedicatedResourcePool(name); @@ -171,7 +172,7 @@ public void storeReleased(ClientDescriptor clientDescriptor, ClientState clientS } public void serverStoreDestroyed(String name) { - ServerStoreImpl serverStore = ehcacheStateService.getStore(name); + ServerSideServerStore serverStore = ehcacheStateService.getStore(name); if (managementRegistry != null && serverStore != null) { LOGGER.trace("serverStoreDestroyed({})", name); ServerStoreBinding managedObject = new ServerStoreBinding(name, serverStore); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreBinding.java b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreBinding.java index 2734cf60ec..5411c609a7 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreBinding.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/management/ServerStoreBinding.java @@ -15,18 +15,18 @@ */ package org.ehcache.clustered.server.management; -import org.ehcache.clustered.server.ServerStoreImpl; +import org.ehcache.clustered.server.ServerSideServerStore; import org.terracotta.management.service.monitoring.registry.provider.AliasBinding; class ServerStoreBinding extends AliasBinding { - ServerStoreBinding(String identifier, ServerStoreImpl serverStore) { + ServerStoreBinding(String identifier, ServerSideServerStore serverStore) { super(identifier, serverStore); } @Override - public ServerStoreImpl getValue() { - return (ServerStoreImpl) super.getValue(); + public ServerSideServerStore getValue() { + return (ServerSideServerStore) super.getValue(); } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java index 19eec35f8a..4ef7dbb79a 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java @@ -19,7 +19,7 @@ import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.exceptions.ClusterException; -import org.ehcache.clustered.server.ServerStoreImpl; +import org.ehcache.clustered.server.ServerSideServerStore; import org.ehcache.clustered.server.repo.StateRepositoryManager; import com.tc.classloader.CommonComponent; @@ -40,7 +40,7 @@ public interface EhcacheStateService { ResourcePageSource getDedicatedResourcePageSource(String name); - ServerStoreImpl getStore(String name); + ServerSideServerStore getStore(String name); Set getStores(); @@ -50,7 +50,7 @@ public interface EhcacheStateService { void configure(ServerSideConfiguration configuration) throws ClusterException; - ServerStoreImpl createStore(String name, ServerStoreConfiguration serverStoreConfiguration) throws ClusterException; + ServerSideServerStore createStore(String name, ServerStoreConfiguration serverStoreConfiguration) throws ClusterException; void destroyServerStore(String name) throws ClusterException; From bcdde0aeba755cc4056c21fcd736a20d1dd1f2e9 Mon Sep 17 00:00:00 2001 From: Mathieu Carbou Date: Wed, 7 Dec 2016 09:43:53 -0500 Subject: [PATCH 187/218] :construction: #1684: Disabling Latency statistics --- build.gradle | 2 +- .../ClusteredStatisticsLatencyTest.java | 4 +- .../ClusteringManagementServiceTest.java | 46 +------------------ .../statistics/EhcacheStatisticsProvider.java | 25 ++++++++++ .../statistics/StandardEhcacheStatistics.java | 3 +- .../providers/statistics/HitLatencyTest.java | 2 + .../providers/statistics/MissLatencyTest.java | 2 + .../DefaultManagementRegistryServiceTest.java | 36 --------------- 8 files changed, 36 insertions(+), 84 deletions(-) diff --git a/build.gradle b/build.gradle index face4837e8..bf52ec5ca1 100644 --- a/build.gradle +++ b/build.gradle @@ -28,7 +28,7 @@ ext { sizeofVersion = '0.3.0' // Clustered - terracottaPlatformVersion = '5.0.13.beta3' + terracottaPlatformVersion = '5.0.13.beta4' managementVersion = terracottaPlatformVersion terracottaApisVersion = '1.0.13.beta' terracottaCoreVersion = '5.0.13-beta' diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsLatencyTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsLatencyTest.java index 6bb95ed9ea..de7eea3f68 100755 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsLatencyTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteredStatisticsLatencyTest.java @@ -20,6 +20,7 @@ import java.util.stream.Collectors; import org.ehcache.Cache; import org.hamcrest.Matchers; +import org.junit.Ignore; import org.junit.Test; import org.junit.Assert; import org.terracotta.management.model.stats.ContextualStatistics; @@ -27,6 +28,7 @@ import org.terracotta.management.model.stats.history.AverageHistory; import org.terracotta.management.model.stats.history.DurationHistory; +@Ignore public class ClusteredStatisticsLatencyTest extends AbstractClusteringManagementTest { @Test @@ -136,4 +138,4 @@ private static double getAverageHistorySampleValue(ContextualStatistics stat, St return Double.NaN; } -} \ No newline at end of file +} diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java index 6ef1f06b0d..6113181093 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/management/ClusteringManagementServiceTest.java @@ -336,63 +336,37 @@ public void test_G_stats_collection() throws Exception { @BeforeClass public static void initDescriptors() throws ClassNotFoundException { - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissLatencyMinimum" , StatisticType.DURATION_HISTORY)); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionLatencyMinimum" , StatisticType.DURATION_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionCount" , StatisticType.COUNTER_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissCount" , StatisticType.COUNTER_HISTORY)); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissLatencyMaximum" , StatisticType.DURATION_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionRate" , StatisticType.RATE_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitRatio" , StatisticType.RATIO_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissRatio" , StatisticType.RATIO_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MappingCount" , StatisticType.COUNTER_HISTORY)); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyAverage" , StatisticType.AVERAGE_HISTORY)); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyMinimum" , StatisticType.DURATION_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:OccupiedByteSize", StatisticType.SIZE_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissRate" , StatisticType.RATE_HISTORY)); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionLatencyAverage" , StatisticType.AVERAGE_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitCount" , StatisticType.COUNTER_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitRate" , StatisticType.RATE_HISTORY)); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissLatencyAverage" , StatisticType.AVERAGE_HISTORY)); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionLatencyMaximum" , StatisticType.DURATION_HISTORY)); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyMaximum" , StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissCount", StatisticType.COUNTER_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionRate", StatisticType.RATE_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissRate", StatisticType.RATE_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitLatencyMinimum", StatisticType.DURATION_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitLatencyAverage", StatisticType.AVERAGE_HISTORY)); + OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:OccupiedByteSize", StatisticType.SIZE_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionLatencyAverage", StatisticType.AVERAGE_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitLatencyMaximum", StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:AllocatedByteSize", StatisticType.SIZE_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionLatencyMaximum", StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MappingCount", StatisticType.COUNTER_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRate", StatisticType.RATE_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRatio", StatisticType.RATIO_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissRatio", StatisticType.RATIO_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionLatencyMinimum", StatisticType.DURATION_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionCount", StatisticType.COUNTER_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissLatencyMaximum", StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MaxMappingCount", StatisticType.COUNTER_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitCount", StatisticType.COUNTER_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissLatencyAverage", StatisticType.AVERAGE_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissLatencyMaximum", StatisticType.DURATION_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissLatencyAverage", StatisticType.AVERAGE_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitLatencyMinimum", StatisticType.DURATION_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MaxMappingCount", StatisticType.COUNTER_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitRate", StatisticType.RATE_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:OccupiedByteSize", StatisticType.SIZE_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitLatencyAverage", StatisticType.AVERAGE_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionLatencyAverage", StatisticType.AVERAGE_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionLatencyMinimum", StatisticType.DURATION_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionRate", StatisticType.RATE_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionLatencyMaximum", StatisticType.DURATION_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:AllocatedByteSize", StatisticType.SIZE_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitLatencyMaximum", StatisticType.DURATION_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitCount", StatisticType.COUNTER_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionCount", StatisticType.COUNTER_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitRatio", StatisticType.RATIO_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissRatio", StatisticType.RATIO_HISTORY)); @@ -401,42 +375,24 @@ public static void initDescriptors() throws ClassNotFoundException { DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissRate", StatisticType.RATE_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissCount", StatisticType.COUNTER_HISTORY)); - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:EvictionLatencyMinimum", StatisticType.DURATION_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitCount", StatisticType.COUNTER_HISTORY)); - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissLatencyMaximum", StatisticType.DURATION_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MaxMappingCount", StatisticType.COUNTER_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitRate", StatisticType.RATE_HISTORY)); - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:EvictionLatencyAverage", StatisticType.AVERAGE_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:EvictionCount", StatisticType.COUNTER_HISTORY)); - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitLatencyAverage", StatisticType.AVERAGE_HISTORY)); - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitLatencyMaximum", StatisticType.DURATION_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissRate", StatisticType.RATE_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:OccupiedByteSize", StatisticType.SIZE_HISTORY)); - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:EvictionLatencyMaximum", StatisticType.DURATION_HISTORY)); - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissLatencyAverage", StatisticType.AVERAGE_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitRatio", StatisticType.RATIO_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MissRatio", StatisticType.RATIO_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:AllocatedByteSize", StatisticType.SIZE_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:MappingCount", StatisticType.COUNTER_HISTORY)); CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:EvictionRate", StatisticType.RATE_HISTORY)); - CLUSTERED_DESCRIPTORS.add(new StatisticDescriptor("Clustered:HitLatencyMinimum", StatisticType.DURATION_HISTORY)); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissLatencyMaximum", StatisticType.DURATION_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitRate", StatisticType.RATE_HISTORY)); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitLatencyMinimum", StatisticType.DURATION_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitCount", StatisticType.COUNTER_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitRatio", StatisticType.RATIO_HISTORY)); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearLatencyAverage", StatisticType.AVERAGE_HISTORY)); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitLatencyMaximum", StatisticType.DURATION_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearRate", StatisticType.RATE_HISTORY)); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissLatencyAverage", StatisticType.AVERAGE_HISTORY)); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitLatencyAverage", StatisticType.AVERAGE_HISTORY)); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearLatencyMaximum", StatisticType.DURATION_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissRate", StatisticType.RATE_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearCount", StatisticType.COUNTER_HISTORY)); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearLatencyMinimum", StatisticType.DURATION_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissCount", StatisticType.COUNTER_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissRatio", StatisticType.RATIO_HISTORY)); diff --git a/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProvider.java b/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProvider.java index 83d0bcc0c2..68c53280dd 100644 --- a/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProvider.java +++ b/management/src/main/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProvider.java @@ -22,19 +22,33 @@ import org.ehcache.management.providers.ExposedCacheBinding; import org.terracotta.management.model.capabilities.Capability; import org.terracotta.management.model.capabilities.StatisticsCapability; +import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; import org.terracotta.management.model.context.Context; import org.terracotta.management.model.stats.Statistic; import org.terracotta.management.registry.action.ExposedObject; import org.terracotta.management.registry.action.Named; +import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; +import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.concurrent.ScheduledExecutorService; @Named("StatisticsCapability") public class EhcacheStatisticsProvider extends CacheBindingManagementProvider { + private static final Comparator STATISTIC_DESCRIPTOR_COMPARATOR = new Comparator() { + @Override + public int compare(StatisticDescriptor o1, StatisticDescriptor o2) { + return o1.getName().compareTo(o2.getName()); + } + }; + private final StatisticsProviderConfiguration statisticsProviderConfiguration; private final ScheduledExecutorService executor; @@ -54,6 +68,17 @@ protected void dispose(ExposedObject exposedObject) { ((StandardEhcacheStatistics) exposedObject).dispose(); } + @Override + public final Collection getDescriptors() { + Collection capabilities = new HashSet(); + for (ExposedObject o : getExposedObjects()) { + capabilities.addAll(((StandardEhcacheStatistics) o).getDescriptors()); + } + List list = new ArrayList(capabilities); + Collections.sort(list, STATISTIC_DESCRIPTOR_COMPARATOR); + return list; + } + @Override public Capability getCapability() { StatisticsCapability.Properties properties = new StatisticsCapability.Properties(statisticsProviderConfiguration.averageWindowDuration(), diff --git a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java index babc41e9b5..8a4323c54c 100644 --- a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java +++ b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java @@ -24,6 +24,7 @@ import org.terracotta.context.extended.OperationStatisticDescriptor; import org.terracotta.context.extended.StatisticsRegistry; import org.terracotta.management.model.capabilities.descriptors.Descriptor; +import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; import org.terracotta.management.model.stats.Statistic; import org.terracotta.management.registry.collect.StatisticsRegistryMetadata; @@ -81,7 +82,7 @@ class StandardEhcacheStatistics extends ExposedCacheBinding { } @Override - public Collection getDescriptors() { + public Collection getDescriptors() { return statisticsRegistryMetadata.getDescriptors(); } diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/HitLatencyTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/HitLatencyTest.java index 492367c0be..d03f2d6164 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/HitLatencyTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/HitLatencyTest.java @@ -40,6 +40,7 @@ import org.ehcache.management.registry.DefaultManagementRegistryService; import org.hamcrest.Matchers; import org.junit.Assert; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -48,6 +49,7 @@ import org.junit.runners.Parameterized; import org.terracotta.management.model.context.Context; +@Ignore @RunWith(Parameterized.class) public class HitLatencyTest { diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/MissLatencyTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/MissLatencyTest.java index a2d31af6ed..ecfb7ec7ef 100755 --- a/management/src/test/java/org/ehcache/management/providers/statistics/MissLatencyTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/MissLatencyTest.java @@ -38,6 +38,7 @@ import org.ehcache.management.registry.DefaultManagementRegistryService; import org.hamcrest.Matchers; import org.junit.Assert; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -46,6 +47,7 @@ import org.junit.runners.Parameterized; import org.terracotta.management.model.context.Context; +@Ignore @RunWith(Parameterized.class) public class MissLatencyTest { diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java index f1edc56457..5397cb36cf 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java @@ -528,63 +528,36 @@ public void testCallOnInexistignContext() { @BeforeClass public static void loadStatsUtil() throws ClassNotFoundException { - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissLatencyMinimum" , StatisticType.DURATION_HISTORY)); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionLatencyMinimum" , StatisticType.DURATION_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionCount" , StatisticType.COUNTER_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissCount" , StatisticType.COUNTER_HISTORY)); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissLatencyMaximum" , StatisticType.DURATION_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionRate" , StatisticType.RATE_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitRatio" , StatisticType.RATIO_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissRatio" , StatisticType.RATIO_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MappingCount" , StatisticType.COUNTER_HISTORY)); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyAverage" , StatisticType.AVERAGE_HISTORY)); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyMinimum" , StatisticType.DURATION_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:OccupiedByteSize" , StatisticType.SIZE_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissRate" , StatisticType.RATE_HISTORY)); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionLatencyAverage" , StatisticType.AVERAGE_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitCount" , StatisticType.COUNTER_HISTORY)); ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitRate" , StatisticType.RATE_HISTORY)); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:MissLatencyAverage" , StatisticType.AVERAGE_HISTORY)); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:EvictionLatencyMaximum" , StatisticType.DURATION_HISTORY)); - ONHEAP_DESCRIPTORS.add(new StatisticDescriptor("OnHeap:HitLatencyMaximum" , StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissCount", StatisticType.COUNTER_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionRate", StatisticType.RATE_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissRate", StatisticType.RATE_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitLatencyMinimum", StatisticType.DURATION_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitLatencyAverage", StatisticType.AVERAGE_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:OccupiedByteSize", StatisticType.SIZE_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionLatencyAverage", StatisticType.AVERAGE_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitLatencyMaximum", StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:AllocatedByteSize", StatisticType.SIZE_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionLatencyMaximum", StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MappingCount", StatisticType.COUNTER_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRate", StatisticType.RATE_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitRatio", StatisticType.RATIO_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissRatio", StatisticType.RATIO_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionLatencyMinimum", StatisticType.DURATION_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:EvictionCount", StatisticType.COUNTER_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissLatencyMaximum", StatisticType.DURATION_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MaxMappingCount", StatisticType.COUNTER_HISTORY)); OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:HitCount", StatisticType.COUNTER_HISTORY)); - OFFHEAP_DESCRIPTORS.add(new StatisticDescriptor("OffHeap:MissLatencyAverage", StatisticType.AVERAGE_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissLatencyMaximum", StatisticType.DURATION_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissLatencyAverage", StatisticType.AVERAGE_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitLatencyMinimum", StatisticType.DURATION_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MaxMappingCount", StatisticType.COUNTER_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitRate", StatisticType.RATE_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:OccupiedByteSize", StatisticType.SIZE_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitLatencyAverage", StatisticType.AVERAGE_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionLatencyAverage", StatisticType.AVERAGE_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionLatencyMinimum", StatisticType.DURATION_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionRate", StatisticType.RATE_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionLatencyMaximum", StatisticType.DURATION_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:AllocatedByteSize", StatisticType.SIZE_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitLatencyMaximum", StatisticType.DURATION_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitCount", StatisticType.COUNTER_HISTORY)); - DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:EvictionCount", StatisticType.COUNTER_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:HitRatio", StatisticType.RATIO_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissRatio", StatisticType.RATIO_HISTORY)); @@ -592,21 +565,12 @@ public static void loadStatsUtil() throws ClassNotFoundException { DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MappingCount", StatisticType.COUNTER_HISTORY)); DISK_DESCRIPTORS.add(new StatisticDescriptor("Disk:MissRate", StatisticType.RATE_HISTORY)); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissLatencyMaximum", StatisticType.DURATION_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitRate", StatisticType.RATE_HISTORY)); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitLatencyMinimum", StatisticType.DURATION_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitCount", StatisticType.COUNTER_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitRatio", StatisticType.RATIO_HISTORY)); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissLatencyMinimum", StatisticType.DURATION_HISTORY)); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearLatencyAverage", StatisticType.AVERAGE_HISTORY)); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitLatencyMaximum", StatisticType.DURATION_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearRate", StatisticType.RATE_HISTORY)); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissLatencyAverage", StatisticType.AVERAGE_HISTORY)); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:HitLatencyAverage", StatisticType.AVERAGE_HISTORY)); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearLatencyMaximum", StatisticType.DURATION_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissRate", StatisticType.RATE_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearCount", StatisticType.COUNTER_HISTORY)); - CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:ClearLatencyMinimum", StatisticType.DURATION_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissCount", StatisticType.COUNTER_HISTORY)); CACHE_DESCRIPTORS.add(new StatisticDescriptor("Cache:MissRatio", StatisticType.RATIO_HISTORY)); From edd7f583def17418b6195e072c63d85240038656 Mon Sep 17 00:00:00 2001 From: Henri Tremblay Date: Thu, 24 Nov 2016 14:06:00 -0500 Subject: [PATCH 188/218] =?UTF-8?q?Make=20sure=20we=20don=E2=80=99t=20supp?= =?UTF-8?q?ress=20exceptions.=20Close=20#1651?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../internal/EhcacheClientEntityFactory.java | 171 +++++++++++------- 1 file changed, 106 insertions(+), 65 deletions(-) diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java index 3391af2f1f..b3758887fe 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java @@ -91,7 +91,7 @@ public void abandonLeadership(String entityIdentifier) { * lifecycle operation timeout */ public void create(final String identifier, final ServerSideConfiguration config) - throws EntityAlreadyExistsException, EhcacheEntityCreationException, EntityBusyException, TimeoutException { + throws EntityAlreadyExistsException, EhcacheEntityCreationException, EntityBusyException, TimeoutException { Hold existingMaintenance = maintenanceHolds.get(identifier); Hold localMaintenance = null; if (existingMaintenance == null) { @@ -99,43 +99,54 @@ public void create(final String identifier, final ServerSideConfiguration config } if (existingMaintenance == null && localMaintenance == null) { throw new EntityBusyException("Unable to create clustered tier manager for id " - + identifier + ": another client owns the maintenance lease"); - } else { + + identifier + ": another client owns the maintenance lease"); + } + + boolean finished = false; + + try { + EntityRef ref = getEntityRef(identifier); try { - EntityRef ref = getEntityRef(identifier); - try { - while (true) { - ref.create(UUID.randomUUID()); + while (true) { + ref.create(UUID.randomUUID()); + try { + EhcacheClientEntity entity = ref.fetchEntity(); try { - EhcacheClientEntity entity = ref.fetchEntity(); - try { - entity.setTimeouts(entityTimeouts); - entity.configure(config); - return; - } finally { + entity.setTimeouts(entityTimeouts); + entity.configure(config); + finished = true; + return; + } finally { + if (finished) { entity.close(); + } else { + silentlyClose(entity, identifier); } - } catch (ClusteredTierManagerConfigurationException e) { - try { - ref.destroy(); - } catch (EntityNotFoundException f) { - //ignore - } - throw new EhcacheEntityCreationException("Unable to configure clustered tier manager for id " + identifier, e); - } catch (EntityNotFoundException e) { - //continue; } + } catch (ClusteredTierManagerConfigurationException e) { + try { + ref.destroy(); + } catch (EntityNotFoundException f) { + //ignore + } + throw new EhcacheEntityCreationException("Unable to configure clustered tier manager for id " + identifier, e); + } catch (EntityNotFoundException e) { + //continue; } - } catch (EntityNotProvidedException e) { - LOGGER.error("Unable to create clustered tier manager for id {}", identifier, e); - throw new AssertionError(e); - } catch (EntityVersionMismatchException e) { - LOGGER.error("Unable to create clustered tier manager for id {}", identifier, e); - throw new AssertionError(e); } - } finally { - if (localMaintenance != null) { + } catch (EntityNotProvidedException e) { + LOGGER.error("Unable to create clustered tier manager for id {}", identifier, e); + throw new AssertionError(e); + } catch (EntityVersionMismatchException e) { + LOGGER.error("Unable to create clustered tier manager for id {}", identifier, e); + throw new AssertionError(e); + } + } finally { + if (localMaintenance != null) { + if (finished) { localMaintenance.unlock(); + } else { + silentlyUnlock(localMaintenance, identifier); } } } @@ -156,64 +167,94 @@ public void create(final String identifier, final ServerSideConfiguration config * lifecycle operation timeout */ public EhcacheClientEntity retrieve(String identifier, ServerSideConfiguration config) - throws EntityNotFoundException, EhcacheEntityValidationException, TimeoutException { + throws EntityNotFoundException, EhcacheEntityValidationException, TimeoutException { + + Hold fetchHold = createAccessLockFor(identifier).readLock(); + + EhcacheClientEntity entity; try { - Hold fetchHold = createAccessLockFor(identifier).readLock(); - EhcacheClientEntity entity = getEntityRef(identifier).fetchEntity(); - /* - * Currently entities are never closed as doing so can stall the client - * when the server is dead. Instead the connection is forcibly closed, - * which suits our purposes since that will unlock the fetchHold too. - */ - boolean validated = false; - try { - entity.setTimeouts(entityTimeouts); - entity.validate(config); - validated = true; - return entity; - } catch (ClusteredTierManagerValidationException e) { - throw new EhcacheEntityValidationException("Unable to validate clustered tier manager for id " + identifier, e); - } finally { - if (!validated) { - entity.close(); - fetchHold.unlock(); - } - } + entity = getEntityRef(identifier).fetchEntity(); } catch (EntityVersionMismatchException e) { LOGGER.error("Unable to retrieve clustered tier manager for id {}", identifier, e); + silentlyUnlock(fetchHold, identifier); throw new AssertionError(e); } + + /* + * Currently entities are never closed as doing so can stall the client + * when the server is dead. Instead the connection is forcibly closed, + * which suits our purposes since that will unlock the fetchHold too. + */ + boolean validated = false; + try { + entity.setTimeouts(entityTimeouts); + entity.validate(config); + validated = true; + return entity; + } catch (ClusteredTierManagerValidationException e) { + throw new EhcacheEntityValidationException("Unable to validate clustered tier manager for id " + identifier, e); + } finally { + if (!validated) { + silentlyClose(entity, identifier); + silentlyUnlock(fetchHold, identifier); + } + } } public void destroy(final String identifier) throws EhcacheEntityNotFoundException, EntityBusyException { Hold existingMaintenance = maintenanceHolds.get(identifier); Hold localMaintenance = null; + if (existingMaintenance == null) { localMaintenance = createAccessLockFor(identifier).tryWriteLock(); } + if (existingMaintenance == null && localMaintenance == null) { throw new EntityBusyException("Destroy operation failed; " + identifier + " clustered tier's maintenance lease held"); - } else { + } + + boolean finished = false; + + try { + EntityRef ref = getEntityRef(identifier); try { - EntityRef ref = getEntityRef(identifier); - try { - if (!ref.destroy()) { - throw new EntityBusyException("Destroy operation failed; " + identifier + " clustered tier in use by other clients"); - } - } catch (EntityNotProvidedException e) { - LOGGER.error("Unable to delete clustered tier manager for id {}", identifier, e); - throw new AssertionError(e); - } catch (EntityNotFoundException e) { - throw new EhcacheEntityNotFoundException(e); + if (!ref.destroy()) { + throw new EntityBusyException("Destroy operation failed; " + identifier + " clustered tier in use by other clients"); } - } finally { - if (localMaintenance != null) { + finished = true; + } catch (EntityNotProvidedException e) { + LOGGER.error("Unable to delete clustered tier manager for id {}", identifier, e); + throw new AssertionError(e); + } catch (EntityNotFoundException e) { + throw new EhcacheEntityNotFoundException(e); + } + } finally { + if (localMaintenance != null) { + if (finished) { localMaintenance.unlock(); + } else { + silentlyUnlock(localMaintenance, identifier); } } } } + private void silentlyClose(EhcacheClientEntity entity, String identifier) { + try { + entity.close(); + } catch (Exception e) { + LOGGER.error("Failed to close entity {}", identifier, e); + } + } + + private void silentlyUnlock(Hold localMaintenance, String identifier) { + try { + localMaintenance.unlock(); + } catch(Exception e) { + LOGGER.error("Failed to unlock for id {}", identifier, e); + } + } + private VoltronReadWriteLock createAccessLockFor(String entityIdentifier) { return new VoltronReadWriteLock(connection, "EhcacheClientEntityFactory-AccessLock-" + entityIdentifier); } From a0a5e98731bda0b653e366308eb6134e7fe3417e Mon Sep 17 00:00:00 2001 From: Abhilash Date: Fri, 9 Dec 2016 16:53:03 +0530 Subject: [PATCH 189/218] Bump up to latest terracotta core/api release --- build.gradle | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/build.gradle b/build.gradle index bf52ec5ca1..bb4d2aed31 100644 --- a/build.gradle +++ b/build.gradle @@ -30,13 +30,13 @@ ext { // Clustered terracottaPlatformVersion = '5.0.13.beta4' managementVersion = terracottaPlatformVersion - terracottaApisVersion = '1.0.13.beta' - terracottaCoreVersion = '5.0.13-beta' + terracottaApisVersion = '1.0.14.beta' + terracottaCoreVersion = '5.0.14-beta2' offheapResourceVersion = terracottaPlatformVersion entityApiVersion = terracottaApisVersion - terracottaPassthroughTestingVersion = '1.0.13.beta' + terracottaPassthroughTestingVersion = '1.0.14.beta' entityTestLibVersion = terracottaPassthroughTestingVersion - galvanVersion = '1.0.13-beta' + galvanVersion = '1.0.14-beta2' // Tools findbugsVersion = '3.0.1' From c03c9c764f6fd5de0bc848e85cfeb8046e2470a3 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Fri, 9 Dec 2016 17:15:12 +0530 Subject: [PATCH 190/218] Bump up terracotta platform version for runnel changes --- build.gradle | 2 +- .../common/internal/messages/ChainCodec.java | 12 ++++---- .../messages/LifeCycleMessageCodec.java | 30 +++++++++---------- .../internal/messages/MessageCodecUtils.java | 4 +-- .../internal/messages/ResponseCodec.java | 6 ++-- .../internal/messages/ServerStoreOpCodec.java | 12 ++++---- .../messages/StateRepositoryOpCodec.java | 12 ++++---- .../messages/EhcacheSyncMessageCodec.java | 14 ++++----- .../PassiveReplicationMessageCodec.java | 24 +++++++-------- 9 files changed, 58 insertions(+), 58 deletions(-) diff --git a/build.gradle b/build.gradle index bb4d2aed31..bf77edc387 100644 --- a/build.gradle +++ b/build.gradle @@ -28,7 +28,7 @@ ext { sizeofVersion = '0.3.0' // Clustered - terracottaPlatformVersion = '5.0.13.beta4' + terracottaPlatformVersion = '5.0.14.beta' managementVersion = terracottaPlatformVersion terracottaApisVersion = '1.0.14.beta' terracottaCoreVersion = '5.0.14-beta2' diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java index 04365460a0..e5b907272a 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ChainCodec.java @@ -43,7 +43,7 @@ public class ChainCodec { .build(); public byte[] encode(Chain chain) { - StructEncoder encoder = CHAIN_STRUCT.encoder(); + StructEncoder encoder = CHAIN_STRUCT.encoder(); encode(encoder, chain); @@ -51,8 +51,8 @@ public byte[] encode(Chain chain) { return byteBuffer.array(); } - public void encode(StructEncoder encoder, Chain chain) { - StructArrayEncoder elementsEncoder = encoder.structs("elements"); + public void encode(StructEncoder encoder, Chain chain) { + StructArrayEncoder> elementsEncoder = encoder.structs("elements"); for (Element element : chain) { if (element instanceof SequencedElement) { elementsEncoder.int64("sequence", ((SequencedElement) element).getSequenceNumber()); @@ -63,12 +63,12 @@ public void encode(StructEncoder encoder, Chain chain) { } public Chain decode(byte[] payload) { - StructDecoder decoder = CHAIN_STRUCT.decoder(ByteBuffer.wrap(payload)); + StructDecoder decoder = CHAIN_STRUCT.decoder(ByteBuffer.wrap(payload)); return decode(decoder); } - public Chain decode(StructDecoder decoder) { - StructArrayDecoder elementsDecoder = decoder.structs("elements"); + public Chain decode(StructDecoder decoder) { + StructArrayDecoder> elementsDecoder = decoder.structs("elements"); final List elements = new ArrayList(); for (int i = 0; i < elementsDecoder.length(); i++) { diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java index 50cd52ca32..d80b329a4d 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodec.java @@ -124,7 +124,7 @@ public byte[] encode(LifecycleMessage message) { } private byte[] encodeReleaseStoreMessage(LifecycleMessage.ReleaseServerStore message) { - StructEncoder encoder = RELEASE_STORE_MESSAGE_STRUCTU.encoder(); + StructEncoder encoder = RELEASE_STORE_MESSAGE_STRUCTU.encoder(); messageCodecUtils.encodeMandatoryFields(encoder, message); encoder.string(SERVER_STORE_NAME_FIELD, message.getName()); @@ -132,7 +132,7 @@ private byte[] encodeReleaseStoreMessage(LifecycleMessage.ReleaseServerStore mes } private byte[] encodeDestroyStoreMessage(LifecycleMessage.DestroyServerStore message) { - StructEncoder encoder = DESTROY_STORE_MESSAGE_STRUCT.encoder(); + StructEncoder encoder = DESTROY_STORE_MESSAGE_STRUCT.encoder(); messageCodecUtils.encodeMandatoryFields(encoder, message); encoder.string(SERVER_STORE_NAME_FIELD, message.getName()); @@ -140,7 +140,7 @@ private byte[] encodeDestroyStoreMessage(LifecycleMessage.DestroyServerStore mes } private byte[] encodeCreateStoreMessage(LifecycleMessage.CreateServerStore message) { - StructEncoder encoder = CREATE_STORE_MESSAGE_STRUCT.encoder(); + StructEncoder encoder = CREATE_STORE_MESSAGE_STRUCT.encoder(); return encodeBaseServerStoreMessage(message, encoder); } @@ -148,7 +148,7 @@ private byte[] encodeValidateStoreMessage(LifecycleMessage.ValidateServerStore m return encodeBaseServerStoreMessage(message, VALIDATE_STORE_MESSAGE_STRUCT.encoder()); } - private byte[] encodeBaseServerStoreMessage(LifecycleMessage.BaseServerStore message, StructEncoder encoder) { + private byte[] encodeBaseServerStoreMessage(LifecycleMessage.BaseServerStore message, StructEncoder encoder) { messageCodecUtils.encodeMandatoryFields(encoder, message); encoder.string(SERVER_STORE_NAME_FIELD, message.getName()); @@ -164,13 +164,13 @@ private byte[] encodeTierManagerValidateMessage(LifecycleMessage.ValidateStoreMa return encodeTierManagerCreateOrValidate(message, message.getConfiguration(), VALIDATE_MESSAGE_STRUCT.encoder()); } - private byte[] encodeTierManagerCreateOrValidate(LifecycleMessage message, ServerSideConfiguration config, StructEncoder encoder) { + private byte[] encodeTierManagerCreateOrValidate(LifecycleMessage message, ServerSideConfiguration config, StructEncoder encoder) { messageCodecUtils.encodeMandatoryFields(encoder, message); encodeServerSideConfiguration(encoder, config); return encoder.encode().array(); } - private void encodeServerSideConfiguration(StructEncoder encoder, ServerSideConfiguration configuration) { + private void encodeServerSideConfiguration(StructEncoder encoder, ServerSideConfiguration configuration) { if (configuration == null) { encoder.bool(CONFIG_PRESENT_FIELD, false); } else { @@ -180,7 +180,7 @@ private void encodeServerSideConfiguration(StructEncoder encoder, ServerSideConf } if (!configuration.getResourcePools().isEmpty()) { - StructArrayEncoder poolsEncoder = encoder.structs(POOLS_SUB_STRUCT); + StructArrayEncoder> poolsEncoder = encoder.structs(POOLS_SUB_STRUCT); for (Map.Entry poolEntry : configuration.getResourcePools().entrySet()) { poolsEncoder.string(POOL_NAME_FIELD, poolEntry.getKey()) .int64(POOL_SIZE_FIELD, poolEntry.getValue().getSize()); @@ -194,14 +194,14 @@ private void encodeServerSideConfiguration(StructEncoder encoder, ServerSideConf } } - private ServerSideConfiguration decodeServerSideConfiguration(StructDecoder decoder) { + private ServerSideConfiguration decodeServerSideConfiguration(StructDecoder decoder) { boolean configPresent = decoder.bool(CONFIG_PRESENT_FIELD); if (configPresent) { String defaultResource = decoder.string(DEFAULT_RESOURCE_FIELD); HashMap resourcePools = new HashMap(); - StructArrayDecoder poolStructs = decoder.structs(POOLS_SUB_STRUCT); + StructArrayDecoder> poolStructs = decoder.structs(POOLS_SUB_STRUCT); if (poolStructs != null) { for (int i = 0; i < poolStructs.length(); i++) { String poolName = poolStructs.string(POOL_NAME_FIELD); @@ -248,7 +248,7 @@ public EhcacheEntityMessage decode(EhcacheMessageType messageType, ByteBuffer me } private LifecycleMessage.ReleaseServerStore decodeReleaseServerStoreMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = RELEASE_STORE_MESSAGE_STRUCTU.decoder(messageBuffer); + StructDecoder decoder = RELEASE_STORE_MESSAGE_STRUCTU.decoder(messageBuffer); Long msgId = decoder.int64(MSG_ID_FIELD); UUID cliendId = messageCodecUtils.decodeUUID(decoder); @@ -261,7 +261,7 @@ private LifecycleMessage.ReleaseServerStore decodeReleaseServerStoreMessage(Byte } private LifecycleMessage.DestroyServerStore decodeDestroyServerStoreMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = DESTROY_STORE_MESSAGE_STRUCT.decoder(messageBuffer); + StructDecoder decoder = DESTROY_STORE_MESSAGE_STRUCT.decoder(messageBuffer); Long msgId = decoder.int64(MSG_ID_FIELD); UUID cliendId = messageCodecUtils.decodeUUID(decoder); @@ -274,7 +274,7 @@ private LifecycleMessage.DestroyServerStore decodeDestroyServerStoreMessage(Byte } private LifecycleMessage.ValidateServerStore decodeValidateServerStoreMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = VALIDATE_STORE_MESSAGE_STRUCT.decoder(messageBuffer); + StructDecoder decoder = VALIDATE_STORE_MESSAGE_STRUCT.decoder(messageBuffer); Long msgId = decoder.int64(MSG_ID_FIELD); UUID cliendId = messageCodecUtils.decodeUUID(decoder); @@ -288,7 +288,7 @@ private LifecycleMessage.ValidateServerStore decodeValidateServerStoreMessage(By } private LifecycleMessage.CreateServerStore decodeCreateServerStoreMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = CREATE_STORE_MESSAGE_STRUCT.decoder(messageBuffer); + StructDecoder decoder = CREATE_STORE_MESSAGE_STRUCT.decoder(messageBuffer); Long msgId = decoder.int64(MSG_ID_FIELD); UUID cliendId = messageCodecUtils.decodeUUID(decoder); @@ -302,7 +302,7 @@ private LifecycleMessage.CreateServerStore decodeCreateServerStoreMessage(ByteBu } private LifecycleMessage.ValidateStoreManager decodeValidateMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = VALIDATE_MESSAGE_STRUCT.decoder(messageBuffer); + StructDecoder decoder = VALIDATE_MESSAGE_STRUCT.decoder(messageBuffer); Long msgId = decoder.int64(MSG_ID_FIELD); UUID cliendId = messageCodecUtils.decodeUUID(decoder); @@ -317,7 +317,7 @@ private LifecycleMessage.ValidateStoreManager decodeValidateMessage(ByteBuffer m } private LifecycleMessage.ConfigureStoreManager decodeConfigureMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = CONFIGURE_MESSAGE_STRUCT.decoder(messageBuffer); + StructDecoder decoder = CONFIGURE_MESSAGE_STRUCT.decoder(messageBuffer); Long msgId = decoder.int64(MSG_ID_FIELD); UUID clientId = messageCodecUtils.decodeUUID(decoder); diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java index e3c7132226..f4faa6476c 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java @@ -56,14 +56,14 @@ public class MessageCodecUtils { .mapping(Consistency.STRONG, 2) .build(); - public void encodeMandatoryFields(StructEncoder encoder, EhcacheOperationMessage message) { + public void encodeMandatoryFields(StructEncoder encoder, EhcacheOperationMessage message) { encoder.enm(EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) .int64(MSG_ID_FIELD, message.getId()) .int64(MSB_UUID_FIELD, message.getClientId().getMostSignificantBits()) .int64(LSB_UUID_FIELD, message.getClientId().getLeastSignificantBits()); } - public UUID decodeUUID(StructDecoder decoder) { + public UUID decodeUUID(StructDecoder decoder) { return new UUID(decoder.int64(MSB_UUID_FIELD), decoder.int64(LSB_UUID_FIELD)); } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java index 4e1070a4aa..6aa1ffcc7b 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java @@ -112,9 +112,9 @@ public byte[] encode(EhcacheEntityResponse response) { final EhcacheEntityResponse.GetResponse getResponse = (EhcacheEntityResponse.GetResponse)response; return GET_RESPONSE_STRUCT.encoder() .enm(RESPONSE_TYPE_FIELD_NAME, getResponse.getResponseType()) - .struct(CHAIN_FIELD, new StructEncoderFunction() { + .struct(CHAIN_FIELD, new StructEncoderFunction>>() { @Override - public void encode(StructEncoder encoder) { + public void encode(StructEncoder> encoder) { chainCodec.encode(encoder, getResponse.getChain()); } }) @@ -174,7 +174,7 @@ public void encode(StructEncoder encoder) { public EhcacheEntityResponse decode(byte[] payload) { ByteBuffer buffer = wrap(payload); - StructDecoder decoder = SUCCESS_RESPONSE_STRUCT.decoder(buffer); + StructDecoder decoder = SUCCESS_RESPONSE_STRUCT.decoder(buffer); Enm opCodeEnm = decoder.enm(RESPONSE_TYPE_FIELD_NAME); if (!opCodeEnm.isFound()) { diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java index e4651fee7c..615da83c6c 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java @@ -105,7 +105,7 @@ class ServerStoreOpCodec { } public byte[] encode(ServerStoreOpMessage message) { - StructEncoder encoder = null; + StructEncoder encoder = null; switch (message.getMessageType()) { case GET_STORE: @@ -145,16 +145,16 @@ public byte[] encode(ServerStoreOpMessage message) { return encoder .string(SERVER_STORE_NAME_FIELD, replaceAtHeadMessage.getCacheId()) .int64(KEY_FIELD, replaceAtHeadMessage.getKey()) - .struct("expect", new StructEncoderFunction() { + .struct("expect", new StructEncoderFunction>>() { @Override - public void encode(StructEncoder encoder) { + public void encode(StructEncoder> encoder) { Chain expect = replaceAtHeadMessage.getExpect(); chainCodec.encode(encoder, expect); } }) - .struct("update", new StructEncoderFunction() { + .struct("update", new StructEncoderFunction>>() { @Override - public void encode(StructEncoder encoder) { + public void encode(StructEncoder> encoder) { Chain update = replaceAtHeadMessage.getUpdate(); chainCodec.encode(encoder, update); } @@ -185,7 +185,7 @@ public void encode(StructEncoder encoder) { } public EhcacheEntityMessage decode(EhcacheMessageType opCode, ByteBuffer messageBuffer) { - StructDecoder decoder; + StructDecoder decoder; switch (opCode) { case GET_STORE: { decoder = GET_MESSAGE_STRUCT.decoder(messageBuffer); diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpCodec.java index 649243b9f4..5275004fdd 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpCodec.java @@ -87,7 +87,7 @@ public byte[] encode(StateRepositoryOpMessage message) { } private byte[] encodeEntrySetMessage(StateRepositoryOpMessage.EntrySetMessage message) { - StructEncoder encoder = ENTRY_SET_MESSAGE_STRUCT.encoder(); + StructEncoder encoder = ENTRY_SET_MESSAGE_STRUCT.encoder(); messageCodecUtils.encodeMandatoryFields(encoder, message); encoder.string(SERVER_STORE_NAME_FIELD, message.getCacheId()); @@ -97,7 +97,7 @@ private byte[] encodeEntrySetMessage(StateRepositoryOpMessage.EntrySetMessage me } private byte[] encodePutIfAbsentMessage(StateRepositoryOpMessage.PutIfAbsentMessage message) { - StructEncoder encoder = PUT_IF_ABSENT_MESSAGE_STRUCT.encoder(); + StructEncoder encoder = PUT_IF_ABSENT_MESSAGE_STRUCT.encoder(); messageCodecUtils.encodeMandatoryFields(encoder, message); encoder.string(SERVER_STORE_NAME_FIELD, message.getCacheId()); @@ -110,7 +110,7 @@ private byte[] encodePutIfAbsentMessage(StateRepositoryOpMessage.PutIfAbsentMess } private byte[] encodeGetMessage(StateRepositoryOpMessage.GetMessage message) { - StructEncoder encoder = GET_MESSAGE_STRUCT.encoder(); + StructEncoder encoder = GET_MESSAGE_STRUCT.encoder(); messageCodecUtils.encodeMandatoryFields(encoder, message); encoder.string(SERVER_STORE_NAME_FIELD, message.getCacheId()); @@ -135,7 +135,7 @@ public StateRepositoryOpMessage decode(EhcacheMessageType messageType, ByteBuffe } private StateRepositoryOpMessage.EntrySetMessage decodeEntrySetMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = ENTRY_SET_MESSAGE_STRUCT.decoder(messageBuffer); + StructDecoder decoder = ENTRY_SET_MESSAGE_STRUCT.decoder(messageBuffer); Long msgId = decoder.int64(MSG_ID_FIELD); UUID clientId = messageCodecUtils.decodeUUID(decoder); @@ -149,7 +149,7 @@ private StateRepositoryOpMessage.EntrySetMessage decodeEntrySetMessage(ByteBuffe } private StateRepositoryOpMessage.PutIfAbsentMessage decodePutIfAbsentMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = PUT_IF_ABSENT_MESSAGE_STRUCT.decoder(messageBuffer); + StructDecoder decoder = PUT_IF_ABSENT_MESSAGE_STRUCT.decoder(messageBuffer); Long msgId = decoder.int64(MSG_ID_FIELD); UUID clientId = messageCodecUtils.decodeUUID(decoder); @@ -169,7 +169,7 @@ private StateRepositoryOpMessage.PutIfAbsentMessage decodePutIfAbsentMessage(Byt } private StateRepositoryOpMessage.GetMessage decodeGetMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = GET_MESSAGE_STRUCT.decoder(messageBuffer); + StructDecoder decoder = GET_MESSAGE_STRUCT.decoder(messageBuffer); Long msgId = decoder.int64(MSG_ID_FIELD); UUID clientId = messageCodecUtils.decodeUUID(decoder); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java index ee90fa114a..1f009b4e10 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java @@ -101,7 +101,7 @@ public class EhcacheSyncMessageCodec implements SyncMessageCodec encoder; switch (syncMessage.getMessageType()) { case STATE: encoder = STATE_SYNC_STRUCT.encoder(); @@ -129,7 +129,7 @@ public byte[] encode(final int concurrencyKey, final EhcacheEntityMessage messag } } - private void encodeServerSideConfiguration(StructEncoder encoder, ServerSideConfiguration configuration) { + private void encodeServerSideConfiguration(StructEncoder encoder, ServerSideConfiguration configuration) { if (configuration.getDefaultServerResource() != null) { encoder.string(DEFAULT_RESOURCE_FIELD, configuration.getDefaultServerResource()); } @@ -146,7 +146,7 @@ private void encodeServerSideConfiguration(StructEncoder encoder, ServerSideConf @Override public EhcacheSyncMessage decode(final int concurrencyKey, final byte[] payload) throws MessageCodecException { ByteBuffer message = ByteBuffer.wrap(payload); - StructDecoder decoder = STATE_SYNC_STRUCT.decoder(message); + StructDecoder decoder = STATE_SYNC_STRUCT.decoder(message); Enm enm = decoder.enm(SYNC_MESSAGE_TYPE_FIELD_NAME); if (!enm.isFound()) { throw new AssertionError("Invalid message format - misses the message type field"); @@ -173,10 +173,10 @@ public EhcacheSyncMessage decode(final int concurrencyKey, final byte[] payload) } } - private Map decodeStoreConfigurations(StructDecoder decoder) { + private Map decodeStoreConfigurations(StructDecoder decoder) { Map result = new HashMap<>(); - StructArrayDecoder storesDecoder = decoder.structs(STORES_SUB_STRUCT); + StructArrayDecoder> storesDecoder = decoder.structs(STORES_SUB_STRUCT); if (storesDecoder != null) { for (int i = 0; i < storesDecoder.length(); i++) { String storeName = storesDecoder.string(SERVER_STORE_NAME_FIELD); @@ -187,10 +187,10 @@ private Map decodeStoreConfigurations(StructDe return result; } - private ServerSideConfiguration decodeServerSideConfiguration(StructDecoder decoder) { + private ServerSideConfiguration decodeServerSideConfiguration(StructDecoder decoder) { String defaultResource = decoder.string(DEFAULT_RESOURCE_FIELD); Map pools = new HashMap<>(); - StructArrayDecoder poolsDecoder = decoder.structs(POOLS_SUB_STRUCT); + StructArrayDecoder> poolsDecoder = decoder.structs(POOLS_SUB_STRUCT); if (poolsDecoder != null) { for (int i = 0; i < poolsDecoder.length(); i++) { String poolName = poolsDecoder.string(POOL_NAME_FIELD); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java index 95f5dac747..eced67d4d4 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java @@ -125,7 +125,7 @@ public byte[] encode(PassiveReplicationMessage message) { } private byte[] encoreDestroyServerStoreReplicationMessage(PassiveReplicationMessage.DestroyServerStoreReplicationMessage message) { - StructEncoder encoder = DESTROY_SERVER_STORE_REPLICATION_STRUCT.encoder(); + StructEncoder encoder = DESTROY_SERVER_STORE_REPLICATION_STRUCT.encoder(); messageCodecUtils.encodeMandatoryFields(encoder, message); encoder.string(SERVER_STORE_NAME_FIELD, message.getStoreName()); @@ -134,7 +134,7 @@ private byte[] encoreDestroyServerStoreReplicationMessage(PassiveReplicationMess } private byte[] encodeCreateServerStoreReplicationMessage(PassiveReplicationMessage.CreateServerStoreReplicationMessage message) { - StructEncoder encoder = CREATE_SERVER_STORE_REPLICATION_STRUCT.encoder(); + StructEncoder encoder = CREATE_SERVER_STORE_REPLICATION_STRUCT.encoder(); messageCodecUtils.encodeMandatoryFields(encoder, message); encoder.string(SERVER_STORE_NAME_FIELD, message.getStoreName()); @@ -144,7 +144,7 @@ private byte[] encodeCreateServerStoreReplicationMessage(PassiveReplicationMessa } private byte[] encodeInvalidationCompleteMessage(PassiveReplicationMessage.InvalidationCompleteMessage message) { - StructEncoder encoder = INVALIDATION_COMPLETE_STRUCT.encoder(); + StructEncoder encoder = INVALIDATION_COMPLETE_STRUCT.encoder(); encoder.enm(MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) .string(SERVER_STORE_NAME_FIELD, message.getCacheId()) @@ -154,7 +154,7 @@ private byte[] encodeInvalidationCompleteMessage(PassiveReplicationMessage.Inval } private byte[] encodeClearInvalidationCompleteMessage(PassiveReplicationMessage.ClearInvalidationCompleteMessage message) { - StructEncoder encoder = CLEAR_INVALIDATION_COMPLETE_STRUCT.encoder(); + StructEncoder encoder = CLEAR_INVALIDATION_COMPLETE_STRUCT.encoder(); encoder.enm(MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) .string(SERVER_STORE_NAME_FIELD, message.getCacheId()); @@ -163,7 +163,7 @@ private byte[] encodeClearInvalidationCompleteMessage(PassiveReplicationMessage. } private byte[] encodeChainReplicationMessage(PassiveReplicationMessage.ChainReplicationMessage message) { - StructEncoder encoder = CHAIN_REPLICATION_STRUCT.encoder(); + StructEncoder encoder = CHAIN_REPLICATION_STRUCT.encoder(); messageCodecUtils.encodeMandatoryFields(encoder, message); @@ -175,7 +175,7 @@ private byte[] encodeChainReplicationMessage(PassiveReplicationMessage.ChainRepl } private byte[] encodeClientIdTrackMessage(PassiveReplicationMessage.ClientIDTrackerMessage message) { - StructEncoder encoder = CLIENT_ID_TRACK_STRUCT.encoder(); + StructEncoder encoder = CLIENT_ID_TRACK_STRUCT.encoder(); encoder.enm(EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) .int64(MSB_UUID_FIELD, message.getClientId().getMostSignificantBits()) @@ -205,7 +205,7 @@ public EhcacheEntityMessage decode(EhcacheMessageType messageType, ByteBuffer me } private PassiveReplicationMessage.DestroyServerStoreReplicationMessage decodeDestroyServerStoreReplicationMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = DESTROY_SERVER_STORE_REPLICATION_STRUCT.decoder(messageBuffer); + StructDecoder decoder = DESTROY_SERVER_STORE_REPLICATION_STRUCT.decoder(messageBuffer); Long msgId = decoder.int64(MSG_ID_FIELD); UUID clientId = messageCodecUtils.decodeUUID(decoder); @@ -216,7 +216,7 @@ private PassiveReplicationMessage.DestroyServerStoreReplicationMessage decodeDes } private PassiveReplicationMessage.CreateServerStoreReplicationMessage decodeCreateServerStoreReplicationMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = CREATE_SERVER_STORE_REPLICATION_STRUCT.decoder(messageBuffer); + StructDecoder decoder = CREATE_SERVER_STORE_REPLICATION_STRUCT.decoder(messageBuffer); Long msgId = decoder.int64(MSG_ID_FIELD); UUID clientId = messageCodecUtils.decodeUUID(decoder); @@ -228,7 +228,7 @@ private PassiveReplicationMessage.CreateServerStoreReplicationMessage decodeCrea } private PassiveReplicationMessage.InvalidationCompleteMessage decodeInvalidationCompleteMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = INVALIDATION_COMPLETE_STRUCT.decoder(messageBuffer); + StructDecoder decoder = INVALIDATION_COMPLETE_STRUCT.decoder(messageBuffer); String storeId = decoder.string(SERVER_STORE_NAME_FIELD); Long key = decoder.int64(KEY_FIELD); @@ -237,12 +237,12 @@ private PassiveReplicationMessage.InvalidationCompleteMessage decodeInvalidation } private PassiveReplicationMessage.ClearInvalidationCompleteMessage decodeClearInvalidationCompleteMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = CLEAR_INVALIDATION_COMPLETE_STRUCT.decoder(messageBuffer); + StructDecoder decoder = CLEAR_INVALIDATION_COMPLETE_STRUCT.decoder(messageBuffer); return new PassiveReplicationMessage.ClearInvalidationCompleteMessage(decoder.string(SERVER_STORE_NAME_FIELD)); } private PassiveReplicationMessage.ChainReplicationMessage decodeChainReplicationMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = CHAIN_REPLICATION_STRUCT.decoder(messageBuffer); + StructDecoder decoder = CHAIN_REPLICATION_STRUCT.decoder(messageBuffer); Long msgId = decoder.int64(MSG_ID_FIELD); UUID clientId = messageCodecUtils.decodeUUID(decoder); @@ -256,7 +256,7 @@ private PassiveReplicationMessage.ChainReplicationMessage decodeChainReplication } private PassiveReplicationMessage.ClientIDTrackerMessage decodeClientIdTrackMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = CLIENT_ID_TRACK_STRUCT.decoder(messageBuffer); + StructDecoder decoder = CLIENT_ID_TRACK_STRUCT.decoder(messageBuffer); UUID clientId = messageCodecUtils.decodeUUID(decoder); From 982c3555d4570cd9ff2094ebce6a2286b666a155 Mon Sep 17 00:00:00 2001 From: Abhilash Date: Thu, 8 Dec 2016 19:30:51 +0530 Subject: [PATCH 191/218] Marshalling/Unmarshalling exceptions using runnel #1645 --- .../exceptions/UnknownClusterException.java | 33 +++++ .../internal/messages/ExceptionCodec.java | 120 ++++++++++++++++++ .../internal/messages/ResponseCodec.java | 16 ++- 3 files changed, 164 insertions(+), 5 deletions(-) create mode 100644 clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/UnknownClusterException.java create mode 100644 clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ExceptionCodec.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/UnknownClusterException.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/UnknownClusterException.java new file mode 100644 index 0000000000..da21efeddb --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/UnknownClusterException.java @@ -0,0 +1,33 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.exceptions; + +public class UnknownClusterException extends ClusterException{ + + public UnknownClusterException(String message) { + super(message); + } + + private UnknownClusterException(Throwable throwable) { + super(throwable); + } + + @Override + public ClusterException withClientStackTrace() { + return new UnknownClusterException(this); + } +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ExceptionCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ExceptionCodec.java new file mode 100644 index 0000000000..8618f6ef34 --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ExceptionCodec.java @@ -0,0 +1,120 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.internal.exceptions.ClusterException; +import org.ehcache.clustered.common.internal.exceptions.UnknownClusterException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.StructBuilder; +import org.terracotta.runnel.decoding.StructArrayDecoder; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.StructArrayEncoder; +import org.terracotta.runnel.encoding.StructEncoder; + +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; + +class ExceptionCodec { + + private static final Logger LOGGER = LoggerFactory.getLogger(ExceptionCodec.class); + + private static final String DECLARING_CLASS_FIELD = "declaringClass"; + private static final String METHOD_NAME_FIELD = "methodName"; + private static final String FILE_NAME_FIELD = "fileName"; + private static final String LINE_NUM_FIELD = "lineNumber"; + private static final String FQCN_FIELD = "fqcn"; + private static final String MESSAGE_FIELD = "message"; + private static final String STACKTRACE_ELEMENTS_FIELD = "stacktraceElements"; + + private static final Struct STE_STRUCT = StructBuilder.newStructBuilder() + .string(DECLARING_CLASS_FIELD, 10) + .string(METHOD_NAME_FIELD, 20) + .string(FILE_NAME_FIELD, 30) + .int32(LINE_NUM_FIELD, 40) + .build(); + + static final Struct EXCEPTION_STRUCT = StructBuilder.newStructBuilder() + .string(FQCN_FIELD, 10) + .string(MESSAGE_FIELD, 20) + .structs(STACKTRACE_ELEMENTS_FIELD, 30, STE_STRUCT) + .build(); + + public void encode(StructEncoder> encoder, ClusterException exception) { + encoder.string(FQCN_FIELD, exception.getClass().getCanonicalName()); + encoder.string(MESSAGE_FIELD, exception.getMessage()); + StructArrayEncoder>> arrayEncoder = encoder.structs(STACKTRACE_ELEMENTS_FIELD); + for (StackTraceElement stackTraceElement : exception.getStackTrace()) { + arrayEncoder.string(DECLARING_CLASS_FIELD, stackTraceElement.getClassName()); + arrayEncoder.string(METHOD_NAME_FIELD, stackTraceElement.getMethodName()); + if (stackTraceElement.getFileName() != null) { + arrayEncoder.string(FILE_NAME_FIELD, stackTraceElement.getFileName()); + } + arrayEncoder.int32(LINE_NUM_FIELD, stackTraceElement.getLineNumber()); + arrayEncoder.next(); + } + arrayEncoder.end(); + } + + public ClusterException decode(StructDecoder> decoder) { + String exceptionClassName = decoder.string(FQCN_FIELD); + String message = decoder.string(MESSAGE_FIELD); + StructArrayDecoder>> arrayDecoder = decoder.structs(STACKTRACE_ELEMENTS_FIELD); + StackTraceElement[] stackTraceElements = new StackTraceElement[arrayDecoder.length()]; + for (int i = 0; i < arrayDecoder.length(); i++) { + stackTraceElements[i] = new StackTraceElement(arrayDecoder.string(DECLARING_CLASS_FIELD), arrayDecoder.string(METHOD_NAME_FIELD), arrayDecoder + .string(FILE_NAME_FIELD), arrayDecoder.int32(LINE_NUM_FIELD)); + arrayDecoder.next(); + } + arrayDecoder.end(); + Class clazz = null; + ClusterException exception = null; + try { + clazz = Class.forName(exceptionClassName); + } catch (ClassNotFoundException e) { + LOGGER.error("Exception type not found", e); + } + exception = getClusterException(message, clazz); + if (exception == null) { + exception = new UnknownClusterException(message); + } + exception.setStackTrace(stackTraceElements); + return exception; + } + + @SuppressWarnings("unchecked") + private ClusterException getClusterException(String message, Class clazz) { + ClusterException exception = null; + if (clazz != null) { + try { + Constructor declaredConstructor = clazz.getDeclaredConstructor(String.class); + exception = (ClusterException)declaredConstructor.newInstance(message); + } catch (NoSuchMethodException e) { + LOGGER.error("Failed to instantiate exception object.", e); + } catch (IllegalAccessException e) { + LOGGER.error("Failed to instantiate exception object.", e); + } catch (InstantiationException e) { + LOGGER.error("Failed to instantiate exception object.", e); + } catch (InvocationTargetException e) { + LOGGER.error("Failed to instantiate exception object.", e); + } + } + return exception; + } + +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java index 6aa1ffcc7b..83153131f4 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java @@ -48,12 +48,14 @@ class ResponseCodec { private static final String CHAIN_FIELD = "chain"; private static final String MAP_VALUE_FIELD = "mapValue"; + private final ExceptionCodec exceptionCodec = new ExceptionCodec(); + private static final Struct SUCCESS_RESPONSE_STRUCT = StructBuilder.newStructBuilder() .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) .build(); private static final Struct FAILURE_RESPONSE_STRUCT = StructBuilder.newStructBuilder() .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) - .byteBuffer(EXCEPTION_FIELD, 20) + .struct(EXCEPTION_FIELD, 20, ExceptionCodec.EXCEPTION_STRUCT) .build(); private static final Struct GET_RESPONSE_STRUCT = StructBuilder.newStructBuilder() .enm(RESPONSE_TYPE_FIELD_NAME, RESPONSE_TYPE_FIELD_INDEX, EHCACHE_RESPONSE_TYPES_ENUM_MAPPING) @@ -98,11 +100,15 @@ class ResponseCodec { public byte[] encode(EhcacheEntityResponse response) { switch (response.getResponseType()) { case FAILURE: - EhcacheEntityResponse.Failure failure = (EhcacheEntityResponse.Failure)response; - byte[] failureMsg = Util.marshall(failure.getCause()); + final EhcacheEntityResponse.Failure failure = (EhcacheEntityResponse.Failure)response; return FAILURE_RESPONSE_STRUCT.encoder() .enm(RESPONSE_TYPE_FIELD_NAME, failure.getResponseType()) - .byteBuffer(EXCEPTION_FIELD, wrap(failureMsg)) + .struct(EXCEPTION_FIELD, new StructEncoderFunction>>() { + @Override + public void encode(StructEncoder> encoder) { + exceptionCodec.encode(encoder, failure.getCause()); + } + }) .encode().array(); case SUCCESS: return SUCCESS_RESPONSE_STRUCT.encoder() @@ -192,7 +198,7 @@ public EhcacheEntityResponse decode(byte[] payload) { return EhcacheEntityResponse.Success.INSTANCE; case FAILURE: decoder = FAILURE_RESPONSE_STRUCT.decoder(buffer); - ClusterException exception = (ClusterException)Util.unmarshall(decoder.byteBuffer(EXCEPTION_FIELD)); + ClusterException exception = exceptionCodec.decode(decoder.struct(EXCEPTION_FIELD)); return new EhcacheEntityResponse.Failure(exception.withClientStackTrace()); case GET_RESPONSE: decoder = GET_RESPONSE_STRUCT.decoder(buffer); From 3de79605afa8853ea610c7fbba8475e40a3bb316 Mon Sep 17 00:00:00 2001 From: Albin Suresh Date: Fri, 9 Dec 2016 20:07:48 +0530 Subject: [PATCH 192/218] Closes #1685 Implement batching for data sync messages --- .../clustered/server/EhcacheActiveEntity.java | 31 ++++-- .../server/EhcachePassiveEntity.java | 6 +- .../messages/EhcacheDataSyncMessage.java | 20 ++-- .../messages/EhcacheSyncMessageCodec.java | 47 ++++++--- .../server/EhcacheActiveEntityTest.java | 96 +++++++++++++++++++ .../messages/EhcacheSyncMessageCodecTest.java | 22 +++-- 6 files changed, 185 insertions(+), 37 deletions(-) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index f2a66cf446..2ef99024db 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -30,6 +30,8 @@ import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; import org.ehcache.clustered.common.Consistency; import org.ehcache.clustered.common.ServerSideConfiguration; @@ -37,13 +39,11 @@ import org.ehcache.clustered.common.internal.ClusteredEhcacheIdentity; import org.ehcache.clustered.common.PoolAllocation; import org.ehcache.clustered.common.internal.exceptions.ClusterException; -import org.ehcache.clustered.common.internal.exceptions.IllegalMessageException; import org.ehcache.clustered.common.internal.exceptions.InvalidClientIdException; import org.ehcache.clustered.common.internal.exceptions.InvalidOperationException; import org.ehcache.clustered.common.internal.exceptions.InvalidStoreException; import org.ehcache.clustered.common.internal.exceptions.LifecycleException; import org.ehcache.clustered.common.internal.exceptions.ResourceBusyException; -import org.ehcache.clustered.common.internal.exceptions.ServerMisconfigurationException; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; @@ -51,6 +51,7 @@ import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; import org.ehcache.clustered.common.internal.messages.EhcacheOperationMessage; import org.ehcache.clustered.common.internal.messages.LifecycleMessage; +import org.ehcache.clustered.common.internal.store.Element; import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClearInvalidationCompleteMessage; import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ClientIDTrackerMessage; @@ -105,6 +106,8 @@ class EhcacheActiveEntity implements ActiveServerEntity { private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheActiveEntity.class); + static final String SYNC_DATA_SIZE_PROP = "ehcache.sync.data.size.threshold"; + private static final long DEFAULT_SYNC_DATA_SIZE_THRESHOLD = 4 * 1024 * 1024; private final UUID identity; @@ -370,10 +373,13 @@ public void synchronizeKeyToPassive(PassiveSynchronizationChannel { ServerSideServerStore store = ehcacheStateService.getStore(name); - store.getSegments().get(concurrencyKey - DATA_CONCURRENCY_KEY_OFFSET).keySet().stream() + final AtomicReference> mappingsToSend = new AtomicReference<>(new HashMap<>()); + store.getSegments().get(concurrencyKey - DATA_CONCURRENCY_KEY_OFFSET).keySet() .forEach(key -> { final Chain chain; try { @@ -381,8 +387,21 @@ public void synchronizeKeyToPassive(PassiveSynchronizationChannel dataSizeThreshold) { + syncChannel.synchronizeToPassive(new EhcacheDataSyncMessage(name, mappingsToSend.get())); + mappingsToSend.set(new HashMap<>()); + size.set(0); + } }); + if (!mappingsToSend.get().isEmpty()) { + syncChannel.synchronizeToPassive(new EhcacheDataSyncMessage(name, mappingsToSend.get())); + mappingsToSend.set(new HashMap<>()); + size.set(0); + } }); } LOGGER.info("Sync complete for concurrency key {}.", concurrencyKey); @@ -515,7 +534,7 @@ private EhcacheEntityResponse invokeServerStoreOperation(ClientDescriptor client ServerStoreOpMessage.AppendMessage appendMessage = (ServerStoreOpMessage.AppendMessage)message; final Chain newChain; try { - cacheStore.getAndAppend(appendMessage.getKey(), appendMessage.getPayload()); + cacheStore.append(appendMessage.getKey(), appendMessage.getPayload()); newChain = cacheStore.get(appendMessage.getKey()); } catch (TimeoutException e) { throw new AssertionError("Server side store is not expected to throw timeout exception"); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index 7fa04a2ac5..ebcf40ba6b 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -218,7 +218,11 @@ private void invokeSyncOperation(EhcacheSyncMessage message) throws ClusterExcep break; case DATA: EhcacheDataSyncMessage dataSyncMessage = (EhcacheDataSyncMessage) message; - ehcacheStateService.getStore(dataSyncMessage.getCacheId()).put(dataSyncMessage.getKey(), dataSyncMessage.getChain()); + ServerSideServerStore store = ehcacheStateService.getStore(dataSyncMessage.getCacheId()); + dataSyncMessage.getChainMap().entrySet().forEach(entry -> { + store.put(entry.getKey(), entry.getValue()); + + }); break; default: throw new AssertionError("Unsupported Sync operation " + message.getMessageType()); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java index 603c470e1b..c5b10b826c 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheDataSyncMessage.java @@ -20,17 +20,19 @@ import com.tc.classloader.CommonComponent; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + @CommonComponent public class EhcacheDataSyncMessage extends EhcacheSyncMessage { private final String cacheId; - private final long key; - private final Chain chain; + private final Map chainMap; - public EhcacheDataSyncMessage(final String cacheId, final long key, final Chain chain) { + public EhcacheDataSyncMessage(final String cacheId, final Map chainMap) { this.cacheId = cacheId; - this.key = key; - this.chain = chain; + this.chainMap = Collections.unmodifiableMap(chainMap); } @Override @@ -42,11 +44,7 @@ public String getCacheId() { return cacheId; } - public long getKey() { - return key; - } - - public Chain getChain() { - return chain; + public Map getChainMap() { + return chainMap; } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java index 1f009b4e10..f41939c1ce 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodec.java @@ -63,6 +63,7 @@ public class EhcacheSyncMessageCodec implements SyncMessageCodec encoder; switch (syncMessage.getMessageType()) { - case STATE: + case STATE: { encoder = STATE_SYNC_STRUCT.encoder(); - EhcacheStateSyncMessage stateSyncMessage = (EhcacheStateSyncMessage) syncMessage; + EhcacheStateSyncMessage stateSyncMessage = (EhcacheStateSyncMessage)syncMessage; encoder.enm(SYNC_MESSAGE_TYPE_FIELD_NAME, STATE); encodeServerSideConfiguration(encoder, stateSyncMessage.getConfiguration()); encoder.structs(STORES_SUB_STRUCT, stateSyncMessage.getStoreConfigs().entrySet(), (storeEncoder, storeEntry) -> { @@ -113,14 +118,19 @@ public byte[] encode(final int concurrencyKey, final EhcacheEntityMessage messag codecUtils.encodeServerStoreConfiguration(storeEncoder, storeEntry.getValue()); }); return encoder.encode().array(); - case DATA: + } + case DATA: { encoder = DATA_SYNC_STRUCT.encoder(); - EhcacheDataSyncMessage dataSyncMessage = (EhcacheDataSyncMessage) syncMessage; + EhcacheDataSyncMessage dataSyncMessage = (EhcacheDataSyncMessage)syncMessage; encoder.enm(SYNC_MESSAGE_TYPE_FIELD_NAME, DATA); encoder.string(SERVER_STORE_NAME_FIELD, dataSyncMessage.getCacheId()); - encoder.int64(KEY_FIELD, dataSyncMessage.getKey()); - encoder.struct(CHAIN_FIELD, (chainEncoder) -> chainCodec.encode(chainEncoder, dataSyncMessage.getChain())); + encoder.structs(CHAIN_MAP_ENTRIES_SUB_STRUCT, + dataSyncMessage.getChainMap().entrySet(), (entryEncoder, entry) -> { + entryEncoder.int64(KEY_FIELD, entry.getKey()); + chainCodec.encode(entryEncoder.struct(CHAIN_FIELD), entry.getValue()); + }); return encoder.encode().array(); + } default: throw new IllegalArgumentException("Sync message codec can not encode " + syncMessage.getMessageType()); } @@ -165,14 +175,29 @@ public EhcacheSyncMessage decode(final int concurrencyKey, final byte[] payload) message.rewind(); decoder = DATA_SYNC_STRUCT.decoder(message); String storeName = decoder.string(SERVER_STORE_NAME_FIELD); - Long key = decoder.int64(KEY_FIELD); - Chain chain = chainCodec.decode(decoder.struct(CHAIN_FIELD)); - return new EhcacheDataSyncMessage(storeName, key, chain); + Map chainMap = decodeChainMapEntries(decoder); + return new EhcacheDataSyncMessage(storeName, chainMap); default: throw new AssertionError("Cannot happen given earlier checks"); } } + private Map decodeChainMapEntries(StructDecoder decoder) { + Map chainMap = new HashMap<>(); + + StructArrayDecoder> entriesDecoder = decoder.structs(CHAIN_MAP_ENTRIES_SUB_STRUCT); + if (entriesDecoder != null) { + for (int i = 0; i < entriesDecoder.length(); i++) { + Long key = entriesDecoder.int64(KEY_FIELD); + StructDecoder>> chainDecoder = entriesDecoder.struct(CHAIN_FIELD); + Chain chain = chainCodec.decode(chainDecoder); + chainMap.put(key, chain); + entriesDecoder.next(); + } + } + return chainMap; + } + private Map decodeStoreConfigurations(StructDecoder decoder) { Map result = new HashMap<>(); diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java index 7448fae1ee..689fac54cc 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcacheActiveEntityTest.java @@ -39,6 +39,7 @@ import org.ehcache.clustered.common.internal.messages.LifeCycleMessageFactory; import org.ehcache.clustered.common.internal.messages.LifecycleMessage.CreateServerStore; import org.ehcache.clustered.common.internal.messages.LifecycleMessage.DestroyServerStore; +import org.ehcache.clustered.server.internal.messages.EhcacheDataSyncMessage; import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage; import org.ehcache.clustered.common.internal.messages.ServerStoreMessageFactory; import org.ehcache.clustered.server.internal.messages.EhcacheStateSyncMessage; @@ -63,6 +64,7 @@ import org.terracotta.offheapresource.OffHeapResources; import org.terracotta.offheapstore.util.MemoryUnit; +import java.nio.ByteBuffer; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -75,6 +77,7 @@ import org.ehcache.clustered.common.PoolAllocation.Dedicated; import static org.ehcache.clustered.common.internal.store.Util.createPayload; +import static org.ehcache.clustered.server.EhcacheActiveEntity.SYNC_DATA_SIZE_PROP; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -85,6 +88,7 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; +import static org.mockito.Mockito.atLeast; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.reset; import static org.mockito.Mockito.times; @@ -2635,6 +2639,98 @@ public void testSyncToPassive() throws Exception { } + @Test + public void testDataSyncToPassiveBatchedByDefault() throws Exception { + final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); + registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); + registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); + + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); + ClientDescriptor client = new TestClientDescriptor(); + activeEntity.connected(client); + + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() + .defaultResource("serverResource1") + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) + .build(); + + activeEntity.invoke(client, + MESSAGE_FACTORY.configureStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client, + MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client, + MESSAGE_FACTORY.createServerStore("myCache", + new ServerStoreConfigBuilder() + .shared("primary") + .build())); + + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("myCache", UUID.randomUUID()); + + ByteBuffer payload = ByteBuffer.allocate(512); + // Put keys that maps to the same concurrency key + activeEntity.invoke(client, messageFactory.appendOperation(1L, payload)); + activeEntity.invoke(client, messageFactory.appendOperation(-2L, payload)); + activeEntity.invoke(client, messageFactory.appendOperation(17L, payload)); + + @SuppressWarnings("unchecked") + PassiveSynchronizationChannel syncChannel = mock(PassiveSynchronizationChannel.class); + activeEntity.synchronizeKeyToPassive(syncChannel, 3); + + verify(syncChannel).synchronizeToPassive(any(EhcacheDataSyncMessage.class)); + } + + @Test + public void testDataSyncToPassiveCustomBatchSize() throws Exception { + final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); + registry.addResource("serverResource1", 32, MemoryUnit.MEGABYTES); + registry.addResource("serverResource2", 32, MemoryUnit.MEGABYTES); + + final EhcacheActiveEntity activeEntity = new EhcacheActiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); + ClientDescriptor client = new TestClientDescriptor(); + activeEntity.connected(client); + + ServerSideConfiguration serverSideConfiguration = new ServerSideConfigBuilder() + .defaultResource("serverResource1") + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) + .build(); + + activeEntity.invoke(client, + MESSAGE_FACTORY.configureStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client, + MESSAGE_FACTORY.validateStoreManager(serverSideConfiguration)); + + activeEntity.invoke(client, + MESSAGE_FACTORY.createServerStore("myCache", + new ServerStoreConfigBuilder() + .shared("primary") + .build())); + + ServerStoreMessageFactory messageFactory = new ServerStoreMessageFactory("myCache", UUID.randomUUID()); + + ByteBuffer payload = ByteBuffer.allocate(512); + // Put keys that maps to the same concurrency key + activeEntity.invoke(client, messageFactory.appendOperation(1L, payload)); + activeEntity.invoke(client, messageFactory.appendOperation(-2L, payload)); + activeEntity.invoke(client, messageFactory.appendOperation(17L, payload)); + activeEntity.invoke(client, messageFactory.appendOperation(33L, payload)); + + System.setProperty(SYNC_DATA_SIZE_PROP, "512"); + try { + @SuppressWarnings("unchecked") + PassiveSynchronizationChannel syncChannel = mock(PassiveSynchronizationChannel.class); + activeEntity.synchronizeKeyToPassive(syncChannel, 3); + + verify(syncChannel, atLeast(2)).synchronizeToPassive(any(EhcacheDataSyncMessage.class)); + } finally { + System.clearProperty(SYNC_DATA_SIZE_PROP); + } + } + @Test public void testSyncToPassiveWithoutDefaultServerResource() throws Exception { final OffHeapIdentifierRegistry registry = new OffHeapIdentifierRegistry(); diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java index f582621bbc..84a64c3950 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java @@ -19,13 +19,11 @@ import org.ehcache.clustered.common.PoolAllocation; import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.store.Chain; import org.junit.Test; import java.util.HashMap; -import java.util.HashSet; import java.util.Map; -import java.util.Set; -import java.util.UUID; import static org.ehcache.clustered.common.internal.store.Util.chainsEqual; import static org.ehcache.clustered.common.internal.store.Util.createPayload; @@ -93,11 +91,19 @@ public void testStateSyncMessageEncodeDecode() throws Exception { @Test public void testDataSyncMessageEncodeDecode() throws Exception { EhcacheSyncMessageCodec codec = new EhcacheSyncMessageCodec(); - EhcacheDataSyncMessage message = new EhcacheDataSyncMessage("foo", 123L, - getChain(true, createPayload(10L), createPayload(100L), createPayload(1000L))); - EhcacheDataSyncMessage decoded = (EhcacheDataSyncMessage) codec.decode(0, codec.encode(0, message)); + Map chainMap = new HashMap<>(); + Chain chain = getChain(true, createPayload(10L), createPayload(100L), createPayload(1000L)); + chainMap.put(1L, chain); + chainMap.put(2L, chain); + chainMap.put(3L, chain); + EhcacheDataSyncMessage message = new EhcacheDataSyncMessage("foo", chainMap); + byte[] encodedMessage = codec.encode(0, message); + EhcacheDataSyncMessage decoded = (EhcacheDataSyncMessage) codec.decode(0, encodedMessage); assertThat(decoded.getCacheId(), is(message.getCacheId())); - assertThat(decoded.getKey(), is(message.getKey())); - assertThat(chainsEqual(decoded.getChain(), message.getChain()), is(true)); + Map decodedChainMap = decoded.getChainMap(); + assertThat(decodedChainMap.size(), is(3)); + assertThat(chainsEqual(decodedChainMap.get(1L), chain), is(true)); + assertThat(chainsEqual(decodedChainMap.get(2L), chain), is(true)); + assertThat(chainsEqual(decodedChainMap.get(3L), chain), is(true)); } } From 5fd32977f9a51eac6c157f4e0ae8ad223cc83d17 Mon Sep 17 00:00:00 2001 From: "EUR\\bra" Date: Wed, 16 Nov 2016 11:06:59 +0100 Subject: [PATCH 193/218] Clarified some terms around caching tiers and storage areas. --- .../docs/asciidoc/user/caching-concepts.adoc | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/docs/src/docs/asciidoc/user/caching-concepts.adoc b/docs/src/docs/asciidoc/user/caching-concepts.adoc index 17a75ca025..c1cd258df1 100644 --- a/docs/src/docs/asciidoc/user/caching-concepts.adoc +++ b/docs/src/docs/asciidoc/user/caching-concepts.adoc @@ -37,12 +37,12 @@ and technical decision based upon the requirements and assumptions of your appli [[storage-tiers]] == Storage Tiers -You can configure Ehcache to use various data storage areas. -When a cache is configured to use more than one storage area, those areas are arranged and managed as `tiers`. -They are organized in a hierarchy, with the lowest tier being called the `authority` tier and the others being part of the `caching` tier. -The caching tier can itself be composed of more than one storage area. -The _hottest_ data is kept in the caching tier, which is typically less abundant but faster than the authority tier. -All the data is kept in the authority tier, which is slower but more abundant. +You can configure Ehcache to use various data storage areas. +When a cache is configured to use more than one storage area, those areas are arranged and managed as `tiers `. +They are organized in a hierarchy, withthe lowest tier being called the `authority` tier and the others being part of the `caching` tier. +The caching tier can itself be composed of more than one storage area. +The_hottest_ data is kept in the caching tier, which is typically less abundantbut faster than the authority tier. +All the datais kept in the authority tier, which isslower but more abundant. Data stores supported by Ehcache include: @@ -55,8 +55,8 @@ to Java garbage collection (GC). Is quite fast, yet slower than the On-Heap Stor from the JVM's heap as it is stored and re-accessed. * Disk Store - Utilizes a disk (file system) to store cache entries. This type of storage resource is typically very abundant but much slower than the RAM-based stores. -* Clustered Store - This data store is a cache on a remote server. -The remote server may optionally have a failover server providing improved high availability. +* Clustered Store - This data store is a cache on a remote server. +The remote server may optionally have a failover server providing improved high availability. Since clustered storage comes with performance penalties due to such factors as network latency as well as for establishing client/server consistency, this tier, by nature, is slower than local off-heap storage. @@ -67,15 +67,15 @@ image::EhcacheTerminology.png[] === Standalone -The data set is held in the application node. -If a standalone topology is used where there are multiple application nodes running the same application, then their caches are completely independent. +The data set is held in the application node. If a standalone topology is used where there are multiple application nodes running the +same application, then their caches are completely independent. === Distributed / Clustered The data is held in a remote server (or array of servers) with a subset of hot data held in each application node. This topology offers offers a selection of consistency options. A distributed topology is the recommended approach in a clustered or scaled-out application environment. -It provides the best combination of performance, availability, and scalability. +It provides the best combination of performance, availability and scalability. image::ClusteredEhcacheTopology.png[] From ef3f168ffca7fb1a882d200afedc7e16998eb5a9 Mon Sep 17 00:00:00 2001 From: Chris Bradley Date: Thu, 24 Nov 2016 11:34:07 +0100 Subject: [PATCH 194/218] Added changes resulting from a technical review of the text. --- .../docs/asciidoc/user/getting-started.adoc | 64 +++++++++++-------- .../asciidoc/user/serializers-copiers.adoc | 2 +- docs/src/docs/asciidoc/user/xa.adoc | 2 +- docs/src/docs/asciidoc/user/xml.adoc | 9 ++- 4 files changed, 43 insertions(+), 34 deletions(-) diff --git a/docs/src/docs/asciidoc/user/getting-started.adoc b/docs/src/docs/asciidoc/user/getting-started.adoc index a4dc77b361..0d97ebbb09 100644 --- a/docs/src/docs/asciidoc/user/getting-started.adoc +++ b/docs/src/docs/asciidoc/user/getting-started.adoc @@ -32,24 +32,29 @@ As with the previous versions of Ehcache, the canonical way of dealing with `Cac include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=cachemanagerExample] ---- -<1> Static method `org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder` that returns a new `org.ehcache.config.builders.CacheManagerBuilder` - instance; -<2> Use the builder to register a pre-configured `Cache` to be created when we `.build()` the actual `CacheManager`. - The first `String` argument is the alias used to interact with the `Cache` through the `CacheManager`; the second - argument is `org.ehcache.config.CacheConfiguration` to configure the `Cache`. We use the static - `.newCacheConfigurationBuilder()` method on `org.ehcache.config.builders.CacheConfigurationBuilder` to create a default config; -<3> Finally, invoking `.build()` returns a fully instantiated, but uninitialized, `CacheManager` we can use; -<4> Before you start to use the `CacheManager` it needs to be `init()`, which can be done for you by the builder by passing - `true` to `build(boolean)`; -<5> We can retrieve the `preConfigured` aliased `Cache` we declared in step 2. For type-safety, we ask for both key and - value types to be passed in. If these differ from the ones we expect, the `CacheManager` throws a `ClassCastException` - early in the application's lifecycle. It also guards the `Cache` from being polluted by random types. -<6> The `CacheManager` can also be used to create new `Cache` instances as needed. Just as in step 2, it requires passing in an +<1> The static method `org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder` returns a new `org.ehcache.config.builders.CacheManagerBuilder` instance. +<2> Use the builder to define a `Cache` with alias "preConfigured". This cache will be created when `cacheManager.build()` is invoked on the actual `CacheManager` instance. +The first `String` argument is the cache alias, which is used to retrieve the cache from the `CacheManager`. +The second argument, `org.ehcache.config.CacheConfiguration`, is used to configure the `Cache`. +We use the static `newCacheConfigurationBuilder()` method on `org.ehcache.config.builders.CacheConfigurationBuilder` to create a default configuration. +<3> Finally, invoking `build()` returns a fully instantiated, but uninitialized, `CacheManager` we can use. +<4> Before using the `CacheManager` it needs to be initialized, which can be done in 1 of 2 ways: +Calling `CacheManager.init()` on the `CacheManager` instance, or calling the `CacheManagerBuilder.build(boolean init)` method with the boolean parameter set to true. +<5> A cache is retrieved by passing its alias, key type and value type to the `CacheManager`. +For instance, to obtain the cache declared in step 2 you need its alias=preConfigured, keyType=Long.class and valueType=String.class. +For type-safety, we ask for both key and value types to be passed in. +If these differ from the ones we expect, the `CacheManager` throws a `ClassCastException` early in the application's lifecycle. +This guards the `Cache` from being polluted by random types. +<6> The `CacheManager` can be used to create new `Cache` instances as needed. Just as in step 2, it requires passing in an alias as well as a `CacheConfiguration`. The instantiated and fully initialized `Cache` added will be returned and/or accessed through the `CacheManager.getCache` API. -<7> We can now use the newly added `Cache` to store and ... -<8> ... retrieve data. -<9> We can also `CacheManager.removeCache(String)` a given `Cache`. The `CacheManager` will not only remove its reference to the +<7> The newly added `Cache` can now be used to store entries, which are comprised of key value pairs. +The put method's first parameter is the key and the second parameter is the value. +Remember the key and value types must be the same types as those defined in the `CacheConfiguration`. +Additionally the key must be unique and is only associated with one value. +<8> A value is retrieved from a cache by calling the `cache.get(key)` method. +It only takes one parameter which is the key, and returns the value associated with that key. If there is no value associated with that key then null is returned. +<9> We can `CacheManager.removeCache(String)` a given `Cache`. The `CacheManager` will not only remove its reference to the `Cache`, but will also close it. The `Cache` releases all locally held transient resources (such as memory). References to this `Cache` become unusable. <10> In order to release all transient resources (memory, threads, ...) a `CacheManager` provides to `Cache` instances @@ -125,7 +130,10 @@ include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[t <1> If you wish to use disk storage (like for persistent `Cache` instances), you'll have to provide a location where data should be stored on disk to the `CacheManagerBuilder.persistence(String)` static method. -<2> You define a resource pool for the disk. +<2> Defines a resource pool for the disk. +The third parameter is a boolean value which is used to set whether the disk pool is persistent. +When set to true, the pool is persistent, and when set to false, the pool is not persistent. +When this method is used without the third boolean parameter then the pool is not persistent. The example above allocates a very small amount of disk storage. Remember that data stored on disk will have to be serialized / deserialized and written / read from disk - @@ -137,6 +145,8 @@ Note that Ehcache 3 only offers persistence in the case of clean shutdowns. ==== Three tiers +The example below illustrates how to use disk storage for a non-persistent `Cache` instance. + [source,java,indent=0] ---- include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=threeTiersCacheManager] @@ -146,7 +156,7 @@ include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[t location where data should be stored on disk to the `CacheManagerBuilder.persistence(String)` static method. <2> You define a resource pool for the heap. <3> You define a resource pool for the off-heap. -<4> You define a resource pool for the disk. +<4> You define a non-persistent resource pool for the disk. Remember this pool is not persistent because we declared the disk pool using the method that does not use the boolean persistent parameter. ==== Byte-sized heap @@ -159,12 +169,12 @@ NOTE: Byte sizing has a runtime performance impact that depends on the size and include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[tag=byteSizedTieredCache] ---- -<1> You can also size the heap tier in bytes. This will limit the amount of heap used by that tier for - storing key-value pairs. Note that there is a cost associated to sizing objects. -<2> The sizing mechanism can be configured along two axes: The first one specifies the maximum number - of objects to traverse while walking through the object graph, the second defines the maximum size of a - single object. If the sizing goes above any of these two limits, the mutative operation on the - cache will be ignored. +<1> You can also size the heap tier in bytes. +This will limit the amount of memory used by the heap tier for storing key-value pairs. +Note that there is a cost associated to sizing objects. +<2> The sizing can also be further restrained by 2 additional configuration settings: +The first one specifies the maximum number of objects to traverse while walking the object graph, the second defines the maximum size of a single object. +If the sizing goes above any of these two limits, the mutative operation on the cache will be ignored. <3> A default configuration can be provided at CacheManager level to be used by the caches unless defined explicitly. @@ -172,7 +182,7 @@ include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[t Limited size adjustment can be performed on a live cache. -NOTE: Presently, `updateResourcePools()` only supports updating the heap tier and without changing the resource type. +NOTE: `updateResourcePools()` only allows you to change the heap tier sizing, not the pool type. Thus you can't change the sizing of off-heap or disk tiers. [source,java,indent=0] ---- @@ -202,7 +212,7 @@ See the section on <> for more information about the option [[configuring-with-xml]] === Configuring With XML -...It wouldn't be Java without _some_ XML +...It wouldn't be Java without _some_ XML. You can create an XML file to configure a `CacheManager`: @@ -217,7 +227,7 @@ include::{sourcedir31}/xml/src/test/resources/configs/docs/getting-started.xml[t <4> ...as well as up to 500 MB of off-heap memory before it starts evicting <5> `` elements let you create an abstract configuration that further `` configurations can then _extend_ <6> `bar` is such a `Cache`. `bar` uses the `` named `myDefaults` and overrides its `key-type` to a wider type. -<7> `simpleCache` is another such a `Cache`. It uses `myDefaults` configuration for its sole `CacheConfiguration`. +<7> `simpleCache` is another such `Cache`. It uses `myDefaults` configuration for its sole `CacheConfiguration`. Refer to the <> for more details on the XML format. diff --git a/docs/src/docs/asciidoc/user/serializers-copiers.adoc b/docs/src/docs/asciidoc/user/serializers-copiers.adoc index f188361a8f..c59d9d40d7 100644 --- a/docs/src/docs/asciidoc/user/serializers-copiers.adoc +++ b/docs/src/docs/asciidoc/user/serializers-copiers.adoc @@ -9,7 +9,7 @@ ifdef::notBuildingForSite[] include::menu.adoc[] endif::notBuildingForSite[] -While Ehcache is a Java cache, it cannot always store its mappings as Java objects. +While Ehcache is a Java cache, it cannot always store its mapping as Java objects. The <> is capable of storing cached objects either by reference (where the given key and value references are stored) or by value (where a copy of the given key and value are made and those copies are then stored). diff --git a/docs/src/docs/asciidoc/user/xa.adoc b/docs/src/docs/asciidoc/user/xa.adoc index 1f4db9a36b..19d0cdfd89 100644 --- a/docs/src/docs/asciidoc/user/xa.adoc +++ b/docs/src/docs/asciidoc/user/xa.adoc @@ -32,7 +32,7 @@ endif::notBuildingForSite[] * The isolation level is guaranteed by the use of the `Copier` mechanism. When no copiers are configured for either the key or the value, default ones are automatically used instead. You cannot disable the `Copier` mechanism for a transactional cache. - * Accessing a cache access outside of a JTA transaction context is forbidden. + * Accessing a cache outside of a JTA transaction context is forbidden. * There is no protection against the ABA problem. * Everything else works orthogonally. diff --git a/docs/src/docs/asciidoc/user/xml.adoc b/docs/src/docs/asciidoc/user/xml.adoc index c8224f7e59..e72af905b3 100644 --- a/docs/src/docs/asciidoc/user/xml.adoc +++ b/docs/src/docs/asciidoc/user/xml.adoc @@ -16,15 +16,14 @@ Using an XML file you can configure a `CacheManager` at creation time, according === `` root element -The root element of our XML configuration. One `` element and, by implication, one XML file, -provides the definition for a `CacheManager`. With Ehcache 3, however, you may create multiple -`CacheManager` instances using the same XML configuration file. Unlike the JSR-107 -`javax.cache.spi.CachingProvider`, Ehcache does not maintain a registry of `CacheManager` instances. +The root element of our XML configuration. One `` element in an XML file provides the definition for a `CacheManager`. +NOTE: Ehcache allows for creating multiple `CacheManager` instances using the same XML configuration file. +In contrast to the JSR-107 `javax.cache.spi.CachingProvider`, Ehcache does not maintain a registry of `CacheManager` instances. === `` elements -`` elements are an extension point for specifying `CacheManager` managed services. +`` elements are extension points for specifying services managed by the `CacheManager`. Each `Service` defined in this way is managed with the same lifecycle as the `CacheManager` -- for each `Service` defined for a `CacheManager`, the `Service.start` From 4bf5e7c0be31ca13795486d10dd663d7fcbee3b9 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Mon, 12 Dec 2016 17:43:32 +0100 Subject: [PATCH 195/218] Bump to latest Terracotta version * Adapt to EntityRef.create and EntityRef.destroy API changes * Adapt to the passthrough changes --- build.gradle | 10 +++++----- .../client/internal/EhcacheClientEntityFactory.java | 11 +++++++++++ .../client/internal/lock/VoltronReadWriteLock.java | 8 ++++++++ .../internal/service/AbstractClientEntityFactory.java | 8 ++++++++ .../client/internal/UnitTestConnectionService.java | 8 ++++++++ 5 files changed, 40 insertions(+), 5 deletions(-) diff --git a/build.gradle b/build.gradle index bf77edc387..ef83992549 100644 --- a/build.gradle +++ b/build.gradle @@ -28,15 +28,15 @@ ext { sizeofVersion = '0.3.0' // Clustered - terracottaPlatformVersion = '5.0.14.beta' + terracottaPlatformVersion = '5.0.15.beta' managementVersion = terracottaPlatformVersion - terracottaApisVersion = '1.0.14.beta' - terracottaCoreVersion = '5.0.14-beta2' + terracottaApisVersion = '1.0.15.beta' + terracottaCoreVersion = '5.0.15-beta' offheapResourceVersion = terracottaPlatformVersion entityApiVersion = terracottaApisVersion - terracottaPassthroughTestingVersion = '1.0.14.beta' + terracottaPassthroughTestingVersion = '1.0.15.beta' entityTestLibVersion = terracottaPassthroughTestingVersion - galvanVersion = '1.0.14-beta2' + galvanVersion = '1.0.15-beta' // Tools findbugsVersion = '3.0.1' diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java index b3758887fe..0b91b03c86 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityFactory.java @@ -27,9 +27,11 @@ import org.terracotta.connection.Connection; import org.terracotta.connection.entity.EntityRef; import org.terracotta.exception.EntityAlreadyExistsException; +import org.terracotta.exception.EntityConfigurationException; import org.terracotta.exception.EntityNotFoundException; import org.terracotta.exception.EntityNotProvidedException; import org.terracotta.exception.EntityVersionMismatchException; +import org.terracotta.exception.PermanentEntityException; import java.util.Map; import java.util.UUID; @@ -140,6 +142,12 @@ public void create(final String identifier, final ServerSideConfiguration config } catch (EntityVersionMismatchException e) { LOGGER.error("Unable to create clustered tier manager for id {}", identifier, e); throw new AssertionError(e); + } catch (PermanentEntityException e) { + LOGGER.error("Unable to create entity - server indicates it is permanent", e); + throw new AssertionError(e); + } catch (EntityConfigurationException e) { + LOGGER.error("Unable to create entity - configuration exception", e); + throw new AssertionError(e); } } finally { if (localMaintenance != null) { @@ -227,6 +235,9 @@ public void destroy(final String identifier) throws EhcacheEntityNotFoundExcepti throw new AssertionError(e); } catch (EntityNotFoundException e) { throw new EhcacheEntityNotFoundException(e); + } catch (PermanentEntityException e) { + LOGGER.error("Unable to destroy entity - server says it is permanent", e); + throw new AssertionError(e); } } finally { if (localMaintenance != null) { diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLock.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLock.java index 311345a508..8ffa4eb42c 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLock.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLock.java @@ -24,9 +24,11 @@ import org.terracotta.connection.Connection; import org.terracotta.connection.entity.EntityRef; import org.terracotta.exception.EntityAlreadyExistsException; +import org.terracotta.exception.EntityConfigurationException; import org.terracotta.exception.EntityNotFoundException; import org.terracotta.exception.EntityNotProvidedException; import org.terracotta.exception.EntityVersionMismatchException; +import org.terracotta.exception.PermanentEntityException; public class VoltronReadWriteLock { @@ -86,6 +88,9 @@ private void tryDestroy() { throw new AssertionError(e); } catch (EntityNotFoundException e) { // Nothing to do + } catch (PermanentEntityException e) { + LOGGER.error("Failed to destroy lock entity - server says it is permanent", e); + throw new AssertionError(e); } } @@ -131,6 +136,9 @@ private VoltronReadWriteLockClient createClientEntity() { LOGGER.debug("Created lock entity " + reference.getName()); } catch (EntityAlreadyExistsException f) { //ignore + } catch (EntityConfigurationException e) { + LOGGER.error("Error creating lock entity - configuration exception", e); + throw new AssertionError(e); } try { return reference.fetchEntity(); diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/AbstractClientEntityFactory.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/AbstractClientEntityFactory.java index 82733fdab0..b07093a170 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/AbstractClientEntityFactory.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/service/AbstractClientEntityFactory.java @@ -23,9 +23,11 @@ import org.terracotta.connection.entity.Entity; import org.terracotta.connection.entity.EntityRef; import org.terracotta.exception.EntityAlreadyExistsException; +import org.terracotta.exception.EntityConfigurationException; import org.terracotta.exception.EntityNotFoundException; import org.terracotta.exception.EntityNotProvidedException; import org.terracotta.exception.EntityVersionMismatchException; +import org.terracotta.exception.PermanentEntityException; abstract class AbstractClientEntityFactory implements ClientEntityFactory { @@ -86,6 +88,9 @@ public void create() throws EntityAlreadyExistsException { } catch (EntityVersionMismatchException e) { LOGGER.error("Unable to create entity {} for id {}", entityType.getName(), entityIdentifier, e); throw new AssertionError(e); + } catch (EntityConfigurationException e) { + LOGGER.error("Unable to create entity - configuration exception", e); + throw new AssertionError(e); } } @@ -109,6 +114,9 @@ public void destroy() throws EntityNotFoundException, EntityBusyException { } catch (EntityNotProvidedException e) { LOGGER.error("Unable to destroy entity {} for id {}", entityType.getName(), entityIdentifier, e); throw new AssertionError(e); + } catch (PermanentEntityException e) { + LOGGER.error("Unable to destroy entity - server says it is permanent", e); + throw new AssertionError(e); } } diff --git a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java index 4ea7daa922..6c47a30e6c 100644 --- a/clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java +++ b/clustered/client/src/test/java/org/ehcache/clustered/client/internal/UnitTestConnectionService.java @@ -55,14 +55,18 @@ import org.terracotta.entity.ServiceProviderConfiguration; import org.terracotta.exception.EntityNotFoundException; import org.terracotta.exception.EntityNotProvidedException; +import org.terracotta.exception.PermanentEntityException; import org.terracotta.offheapresource.OffHeapResourcesProvider; import org.terracotta.offheapresource.config.MemoryUnit; import org.terracotta.offheapresource.config.OffheapResourcesType; import org.terracotta.offheapresource.config.ResourceType; +import org.terracotta.passthrough.IAsynchronousServerCrasher; import org.terracotta.passthrough.PassthroughConnection; import org.terracotta.passthrough.PassthroughServer; import org.terracotta.passthrough.PassthroughServerRegistry; +import static org.mockito.Mockito.mock; + /** * A {@link ConnectionService} implementation used to simulate Voltron server connections for unit testing purposes. @@ -142,6 +146,8 @@ public static void add(URI uri, PassthroughServer server) { } SERVERS.put(keyURI, new ServerDescriptor(server)); + // TODO rework that better + server.registerAsynchronousServerCrasher(mock(IAsynchronousServerCrasher.class)); server.start(true, false); LOGGER.info("Started PassthroughServer at {}", keyURI); } @@ -240,6 +246,8 @@ public static PassthroughServer remove(URI uri) { LOGGER.error("Entity destroy failed: ", ex); } catch (EntityNotFoundException ex) { LOGGER.error("Entity destroy failed: ", ex); + } catch (PermanentEntityException ex) { + LOGGER.error("Entity destroy failed (permanent???): ", ex); } } From 12fcc6711e73ad3573f4db7032e773bbced65d47 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Mon, 12 Dec 2016 20:46:29 +0100 Subject: [PATCH 196/218] :bug: Fix #1666 Restore lock entity behavior Remove the associated TODO and restore original behavior since the underlying bug (terracotta-oss/terracotta-core#379) has been fixed. --- .../clustered/client/internal/lock/VoltronReadWriteLock.java | 3 +-- .../replication/BasicLifeCyclePassiveReplicationTest.java | 1 - 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLock.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLock.java index 8ffa4eb42c..b6d9d6e991 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLock.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/lock/VoltronReadWriteLock.java @@ -72,8 +72,7 @@ private Hold tryLock(final HoldType type) { return new HoldImpl(client, type); } else { client.close(); - //TODO Restore this clean up operation once https://github.com/Terracotta-OSS/terracotta-core/issues/379 is fixed -// tryDestroy(); + tryDestroy(); return null; } } diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java index 2c0a3d3550..efb71f5c44 100644 --- a/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/replication/BasicLifeCyclePassiveReplicationTest.java @@ -267,7 +267,6 @@ public void testDestroyCacheManager() throws Exception { } @Test -// @Ignore("enable back once https://github.com/Terracotta-OSS/terracotta-core/issues/379 is fixed") public void testDestroyLockEntity() throws Exception { VoltronReadWriteLock lock1 = new VoltronReadWriteLock(CLUSTER.newConnection(), "my-lock"); VoltronReadWriteLock.Hold hold1 = lock1.tryReadLock(); From 328234b6f7ef89a9c757edec6717513593404c1c Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Tue, 13 Dec 2016 00:20:20 +0100 Subject: [PATCH 197/218] :arrow_up: #1707 Move to Terracotta release instead of beta --- build.gradle | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/build.gradle b/build.gradle index ef83992549..b037cd413f 100644 --- a/build.gradle +++ b/build.gradle @@ -28,15 +28,15 @@ ext { sizeofVersion = '0.3.0' // Clustered - terracottaPlatformVersion = '5.0.15.beta' + terracottaPlatformVersion = '5.1.0' managementVersion = terracottaPlatformVersion - terracottaApisVersion = '1.0.15.beta' - terracottaCoreVersion = '5.0.15-beta' + terracottaApisVersion = '1.1.0' + terracottaCoreVersion = '5.1.0' offheapResourceVersion = terracottaPlatformVersion entityApiVersion = terracottaApisVersion - terracottaPassthroughTestingVersion = '1.0.15.beta' + terracottaPassthroughTestingVersion = '1.1.0' entityTestLibVersion = terracottaPassthroughTestingVersion - galvanVersion = '1.0.15-beta' + galvanVersion = '1.1.0' // Tools findbugsVersion = '3.0.1' From 3f5c9b44ad8bbceb04328bf7fb0275a3297bff96 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Mon, 12 Dec 2016 22:47:19 +0100 Subject: [PATCH 198/218] :memo: #1707 Update version and readme following 3.2.0 release --- README.adoc | 10 +++++----- build.gradle | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/README.adoc b/README.adoc index ce1a681a5c..589c601756 100644 --- a/README.adoc +++ b/README.adoc @@ -12,16 +12,16 @@ For samples, documentation, and usage information, please see http://ehcache.org == Current release -We released 3.1.3 on September 30th 2016. +We released 3.2.0 on December 12th 2016. -The https://github.com/ehcache/ehcache3/releases/tag/v3.1.3[release notes] contain the links to the artifacts and the documentation to help you get started. +The https://github.com/ehcache/ehcache3/releases/tag/v3.2.0[release notes] contain the links to the artifacts and the documentation to help you get started. -You should consider upgrading to 3.1.x as it does all 3.0.x does and more with a fully compatible API. -The only thing to note is that transactional support has been moved to a separate jar. +You should consider upgrading to 3.2.x as it does all 3.0.x and 3.1.x do and more with a fully compatible API. +The only thing to note cmopared to 3.0.x is that transactional support has been moved to a separate jar. == Current development & next release -We are now working on the missing features of the clustering tier of Ehcache 3 which will be included in upcoming 3.2.x releases. +We are still working on the missing features of the clustering tier of Ehcache 3 which will be included in upcoming releases. We may still do 3.1.x release to include all fixes that have been made on it, but this is now less a priority. There is no longer any plan for a 3.0.x release. diff --git a/build.gradle b/build.gradle index b037cd413f..646e02015b 100644 --- a/build.gradle +++ b/build.gradle @@ -18,7 +18,7 @@ import org.gradle.internal.jvm.Jvm ext { - baseVersion = findProperty('overrideVersion') ?: '3.2.0-SNAPSHOT' + baseVersion = findProperty('overrideVersion') ?: '3.2.1-SNAPSHOT' // Third parties offheapVersion = '2.3.2' From 459824144a50504f813b3dd698e79b78fa92825b Mon Sep 17 00:00:00 2001 From: "Madduri, Venkata Sairam" Date: Tue, 13 Dec 2016 16:20:43 +0530 Subject: [PATCH 199/218] Issue #1710: Add support for EhcacheStateService loadExisting --- .../org/ehcache/clustered/server/EhcacheActiveEntity.java | 1 + .../ehcache/clustered/server/EhcacheStateServiceImpl.java | 5 +++++ .../ehcache/clustered/server/state/EhcacheStateService.java | 2 ++ 3 files changed, 8 insertions(+) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java index 2ef99024db..2a65ec7d52 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheActiveEntity.java @@ -414,6 +414,7 @@ public void createNew() { @Override public void loadExisting() { + ehcacheStateService.loadExisting(); LOGGER.debug("Preparing for handling Inflight Invalidations and independent Passive Evictions in loadExisting"); inflightInvalidations = new ConcurrentHashMap<>(); addInflightInvalidationsForEventualCaches(); diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java index ab2ed61e55..dfa428902c 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java @@ -461,6 +461,11 @@ public InvalidationTracker removeInvalidationtracker(String cacheId) { return this.invalidationMap.remove(cacheId); } + @Override + public void loadExisting() { + //nothing to do + } + public boolean isConfigured() { return configured; } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java index 4ef7dbb79a..bcbba9d402 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/state/EhcacheStateService.java @@ -66,4 +66,6 @@ public interface EhcacheStateService { InvalidationTracker removeInvalidationtracker(String cacheId); + void loadExisting(); + } From 094a47fc1dfe297d960260fbfe36c3887aba4324 Mon Sep 17 00:00:00 2001 From: Albin Suresh Date: Tue, 13 Dec 2016 19:41:19 +0530 Subject: [PATCH 200/218] Closes #1678 Refactor codecs to reuse configuration encoding and decoding logic for extensibility --- .../internal/EhcacheClientEntityService.java | 8 +- .../internal/messages/CommonConfigCodec.java | 201 ++++++++++++++++++ .../common/internal/messages/ConfigCodec.java | 43 ++++ .../internal/messages/EhcacheCodec.java | 9 +- .../messages/LifeCycleMessageCodec.java | 169 +++++---------- .../internal/messages/MessageCodecUtils.java | 57 ----- .../internal/messages/ResponseCodec.java | 4 +- .../internal/messages/ServerStoreOpCodec.java | 4 +- .../messages/StateRepositoryOpCodec.java | 2 +- .../messages/CommonConfigCodecTest.java | 50 +++++ .../messages/LifeCycleMessageCodecTest.java | 2 +- .../server/EhcacheServerEntityService.java | 15 +- .../internal/messages/EhcacheServerCodec.java | 6 - .../messages/EhcacheSyncMessageCodec.java | 105 +++------ .../PassiveReplicationMessageCodec.java | 48 ++--- .../messages/EhcacheSyncMessageCodecTest.java | 5 +- .../PassiveReplicationMessageCodecTest.java | 3 +- 17 files changed, 432 insertions(+), 299 deletions(-) create mode 100644 clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodec.java create mode 100644 clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ConfigCodec.java create mode 100644 clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodecTest.java diff --git a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityService.java b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityService.java index ddcdfcfd1f..b45d486f70 100644 --- a/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityService.java +++ b/clustered/client/src/main/java/org/ehcache/clustered/client/internal/EhcacheClientEntityService.java @@ -19,10 +19,15 @@ import java.util.UUID; import org.ehcache.clustered.common.internal.ClusteredEhcacheIdentity; +import org.ehcache.clustered.common.internal.messages.CommonConfigCodec; import org.ehcache.clustered.common.internal.messages.EhcacheCodec; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.ehcache.clustered.common.internal.messages.LifeCycleMessageCodec; +import org.ehcache.clustered.common.internal.messages.ResponseCodec; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpCodec; +import org.ehcache.clustered.common.internal.messages.StateRepositoryOpCodec; import org.terracotta.entity.EntityClientEndpoint; import org.terracotta.entity.EntityClientService; import org.terracotta.entity.MessageCodec; @@ -51,6 +56,7 @@ public EhcacheClientEntity create(EntityClientEndpoint getMessageCodec() { - return EhcacheCodec.messageCodec(); + return new EhcacheCodec(new ServerStoreOpCodec(), new LifeCycleMessageCodec(new CommonConfigCodec()), + new StateRepositoryOpCodec(), new ResponseCodec()); } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodec.java new file mode 100644 index 0000000000..941211362e --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodec.java @@ -0,0 +1,201 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.PoolAllocation; +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.terracotta.runnel.EnumMapping; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.StructBuilder; +import org.terracotta.runnel.decoding.Enm; +import org.terracotta.runnel.decoding.PrimitiveDecodingSupport; +import org.terracotta.runnel.decoding.StructArrayDecoder; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.PrimitiveEncodingSupport; +import org.terracotta.runnel.encoding.StructArrayEncoder; +import org.terracotta.runnel.encoding.StructEncoder; + +import java.util.HashMap; +import java.util.Map; + +import static org.terracotta.runnel.EnumMappingBuilder.newEnumMappingBuilder; +import static org.terracotta.runnel.StructBuilder.newStructBuilder; + +/** + * Encodes and decodes configuration objects such as {@link ServerSideConfiguration} and {@link ServerStoreConfiguration}. + *

+ * This class is made extensible and hence must remain public. + */ +@SuppressWarnings("WeakerAccess") +public class CommonConfigCodec implements ConfigCodec { + + private static final String STORE_CONFIG_KEY_TYPE_FIELD = "keyType"; + private static final String STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD = "keySerializerType"; + private static final String STORE_CONFIG_VALUE_TYPE_FIELD = "valueType"; + private static final String STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD = "valueSerializerType"; + private static final String STORE_CONFIG_CONSISTENCY_FIELD = "consistency"; + private static final String POOL_SIZE_FIELD = "poolSize"; + private static final String POOL_RESOURCE_NAME_FIELD = "resourceName"; + private static final String DEFAULT_RESOURCE_FIELD = "defaultResource"; + private static final String POOLS_SUB_STRUCT = "pools"; + private static final String POOL_NAME_FIELD = "poolName"; + + private static final EnumMapping CONSISTENCY_ENUM_MAPPING = newEnumMappingBuilder(Consistency.class) + .mapping(Consistency.EVENTUAL, 1) + .mapping(Consistency.STRONG, 2) + .build(); + + private static final Struct POOLS_STRUCT = newStructBuilder() + .string(POOL_NAME_FIELD, 10) + .int64(POOL_SIZE_FIELD, 20) + .string(POOL_RESOURCE_NAME_FIELD, 30).build(); + + @Override + public InjectTuple injectServerStoreConfiguration(StructBuilder baseBuilder, final int index) { + final StructBuilder structBuilder = baseBuilder.string(STORE_CONFIG_KEY_TYPE_FIELD, index) + .string(STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD, index + 10) + .string(STORE_CONFIG_VALUE_TYPE_FIELD, index + 11) + .string(STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD, index + 15) + .enm(STORE_CONFIG_CONSISTENCY_FIELD, index + 16, CONSISTENCY_ENUM_MAPPING) + .int64(POOL_SIZE_FIELD, index + 20) + .string(POOL_RESOURCE_NAME_FIELD, index + 30); + + return new InjectTuple() { + @Override + public int getLastIndex() { + return index + 30; + } + + @Override + public StructBuilder getUpdatedBuilder() { + return structBuilder; + } + }; + } + + @Override + public InjectTuple injectServerSideConfiguration(StructBuilder baseBuilder, final int index) { + final StructBuilder structBuilder = baseBuilder.string(DEFAULT_RESOURCE_FIELD, index + 10) + .structs(POOLS_SUB_STRUCT, index + 20, POOLS_STRUCT); + + return new InjectTuple() { + @Override + public int getLastIndex() { + return index + 20; + } + + @Override + public StructBuilder getUpdatedBuilder() { + return structBuilder; + } + }; + } + + @Override + public void encodeServerStoreConfiguration(PrimitiveEncodingSupport encoder, ServerStoreConfiguration configuration) { + encoder.string(STORE_CONFIG_KEY_TYPE_FIELD, configuration.getStoredKeyType()) + .string(STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD, configuration.getKeySerializerType()) + .string(STORE_CONFIG_VALUE_TYPE_FIELD, configuration.getStoredValueType()) + .string(STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD, configuration.getValueSerializerType()); + if (configuration.getConsistency() != null) { + encoder.enm(STORE_CONFIG_CONSISTENCY_FIELD, configuration.getConsistency()); + } + + PoolAllocation poolAllocation = configuration.getPoolAllocation(); + if (poolAllocation instanceof PoolAllocation.Dedicated) { + PoolAllocation.Dedicated dedicatedPool = (PoolAllocation.Dedicated) poolAllocation; + encoder.int64(POOL_SIZE_FIELD, dedicatedPool.getSize()); + if (dedicatedPool.getResourceName() != null) { + encoder.string(POOL_RESOURCE_NAME_FIELD, dedicatedPool.getResourceName()); + } + } else if (poolAllocation instanceof PoolAllocation.Shared) { + encoder.string(POOL_RESOURCE_NAME_FIELD, ((PoolAllocation.Shared) poolAllocation).getResourcePoolName()); + } + } + + @Override + public ServerStoreConfiguration decodeServerStoreConfiguration(PrimitiveDecodingSupport decoder) { + String keyType = decoder.string(STORE_CONFIG_KEY_TYPE_FIELD); + String keySerializer = decoder.string(STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD); + String valueType = decoder.string(STORE_CONFIG_VALUE_TYPE_FIELD); + String valueSerializer = decoder.string(STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD); + Enm consistencyEnm = decoder.enm(STORE_CONFIG_CONSISTENCY_FIELD); + Consistency consistency = Consistency.EVENTUAL; + if (consistencyEnm.isValid()) { + consistency = consistencyEnm.get(); + } + Long poolSize = decoder.int64(POOL_SIZE_FIELD); + String poolResource = decoder.string(POOL_RESOURCE_NAME_FIELD); + PoolAllocation poolAllocation = new PoolAllocation.Unknown(); + if (poolSize != null) { + poolAllocation = new PoolAllocation.Dedicated(poolResource, poolSize); + } else if (poolResource != null) { + poolAllocation = new PoolAllocation.Shared(poolResource); + } + return new ServerStoreConfiguration(poolAllocation, keyType, valueType, null, null, keySerializer, valueSerializer, consistency); + } + + @Override + public void encodeServerSideConfiguration(StructEncoder encoder, ServerSideConfiguration configuration) { + if (configuration.getDefaultServerResource() != null) { + encoder.string(DEFAULT_RESOURCE_FIELD, configuration.getDefaultServerResource()); + } + + if (!configuration.getResourcePools().isEmpty()) { + StructArrayEncoder> poolsEncoder = encoder.structs(POOLS_SUB_STRUCT); + for (Map.Entry poolEntry : configuration.getResourcePools().entrySet()) { + poolsEncoder.string(POOL_NAME_FIELD, poolEntry.getKey()) + .int64(POOL_SIZE_FIELD, poolEntry.getValue().getSize()); + if (poolEntry.getValue().getServerResource() != null) { + poolsEncoder.string(POOL_RESOURCE_NAME_FIELD, poolEntry.getValue().getServerResource()); + } + poolsEncoder.next(); + } + poolsEncoder.end(); + } + } + + @Override + public ServerSideConfiguration decodeServerSideConfiguration(StructDecoder decoder) { + String defaultResource = decoder.string(DEFAULT_RESOURCE_FIELD); + + HashMap resourcePools = new HashMap(); + StructArrayDecoder> poolsDecoder = decoder.structs(POOLS_SUB_STRUCT); + if (poolsDecoder != null) { + for (int i = 0; i < poolsDecoder.length(); i++) { + String poolName = poolsDecoder.string(POOL_NAME_FIELD); + Long poolSize = poolsDecoder.int64(POOL_SIZE_FIELD); + String poolResourceName = poolsDecoder.string(POOL_RESOURCE_NAME_FIELD); + if (poolResourceName == null) { + resourcePools.put(poolName, new ServerSideConfiguration.Pool(poolSize)); + } else { + resourcePools.put(poolName, new ServerSideConfiguration.Pool(poolSize, poolResourceName)); + } + poolsDecoder.next(); + } + } + + ServerSideConfiguration serverSideConfiguration; + if (defaultResource == null) { + serverSideConfiguration = new ServerSideConfiguration(resourcePools); + } else { + serverSideConfiguration = new ServerSideConfiguration(defaultResource, resourcePools); + } + return serverSideConfiguration; + } +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ConfigCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ConfigCodec.java new file mode 100644 index 0000000000..677f3393b3 --- /dev/null +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ConfigCodec.java @@ -0,0 +1,43 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.terracotta.runnel.StructBuilder; +import org.terracotta.runnel.decoding.PrimitiveDecodingSupport; +import org.terracotta.runnel.decoding.StructDecoder; +import org.terracotta.runnel.encoding.PrimitiveEncodingSupport; +import org.terracotta.runnel.encoding.StructEncoder; + +/** + * Interface that allows extensions to codec. + */ +public interface ConfigCodec { + InjectTuple injectServerSideConfiguration(StructBuilder baseBuilder, int index); + void encodeServerSideConfiguration(StructEncoder encoder, ServerSideConfiguration configuration); + ServerSideConfiguration decodeServerSideConfiguration(StructDecoder decoder); + + InjectTuple injectServerStoreConfiguration(StructBuilder baseBuilder, int index); + void encodeServerStoreConfiguration(PrimitiveEncodingSupport encoder, ServerStoreConfiguration configuration); + ServerStoreConfiguration decodeServerStoreConfiguration(PrimitiveDecodingSupport decoder); + + interface InjectTuple { + int getLastIndex(); + StructBuilder getUpdatedBuilder(); + } +} diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java index fb2982d595..0bd585dcdc 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/EhcacheCodec.java @@ -40,19 +40,12 @@ public class EhcacheCodec implements MessageCodec encoder = RELEASE_STORE_MESSAGE_STRUCTU.encoder(); + StructEncoder encoder = RELEASE_STORE_MESSAGE_STRUCT.encoder(); messageCodecUtils.encodeMandatoryFields(encoder, message); encoder.string(SERVER_STORE_NAME_FIELD, message.getName()); @@ -140,92 +126,39 @@ private byte[] encodeDestroyStoreMessage(LifecycleMessage.DestroyServerStore mes } private byte[] encodeCreateStoreMessage(LifecycleMessage.CreateServerStore message) { - StructEncoder encoder = CREATE_STORE_MESSAGE_STRUCT.encoder(); + StructEncoder encoder = createStoreMessageStruct.encoder(); return encodeBaseServerStoreMessage(message, encoder); } private byte[] encodeValidateStoreMessage(LifecycleMessage.ValidateServerStore message) { - return encodeBaseServerStoreMessage(message, VALIDATE_STORE_MESSAGE_STRUCT.encoder()); + return encodeBaseServerStoreMessage(message, validateStoreMessageStruct.encoder()); } private byte[] encodeBaseServerStoreMessage(LifecycleMessage.BaseServerStore message, StructEncoder encoder) { messageCodecUtils.encodeMandatoryFields(encoder, message); encoder.string(SERVER_STORE_NAME_FIELD, message.getName()); - messageCodecUtils.encodeServerStoreConfiguration(encoder, message.getStoreConfiguration()); + configCodec.encodeServerStoreConfiguration(encoder, message.getStoreConfiguration()); return encoder.encode().array(); } private byte[] encodeTierManagerConfigureMessage(LifecycleMessage.ConfigureStoreManager message) { - return encodeTierManagerCreateOrValidate(message, message.getConfiguration(), CONFIGURE_MESSAGE_STRUCT.encoder()); + return encodeTierManagerCreateOrValidate(message, message.getConfiguration(), configureMessageStruct.encoder()); } private byte[] encodeTierManagerValidateMessage(LifecycleMessage.ValidateStoreManager message) { - return encodeTierManagerCreateOrValidate(message, message.getConfiguration(), VALIDATE_MESSAGE_STRUCT.encoder()); + return encodeTierManagerCreateOrValidate(message, message.getConfiguration(), validateMessageStruct.encoder()); } private byte[] encodeTierManagerCreateOrValidate(LifecycleMessage message, ServerSideConfiguration config, StructEncoder encoder) { messageCodecUtils.encodeMandatoryFields(encoder, message); - encodeServerSideConfiguration(encoder, config); - return encoder.encode().array(); - } - - private void encodeServerSideConfiguration(StructEncoder encoder, ServerSideConfiguration configuration) { - if (configuration == null) { + if (config == null) { encoder.bool(CONFIG_PRESENT_FIELD, false); } else { encoder.bool(CONFIG_PRESENT_FIELD, true); - if (configuration.getDefaultServerResource() != null) { - encoder.string(DEFAULT_RESOURCE_FIELD, configuration.getDefaultServerResource()); - } - - if (!configuration.getResourcePools().isEmpty()) { - StructArrayEncoder> poolsEncoder = encoder.structs(POOLS_SUB_STRUCT); - for (Map.Entry poolEntry : configuration.getResourcePools().entrySet()) { - poolsEncoder.string(POOL_NAME_FIELD, poolEntry.getKey()) - .int64(POOL_SIZE_FIELD, poolEntry.getValue().getSize()); - if (poolEntry.getValue().getServerResource() != null) { - poolsEncoder.string(POOL_RESOURCE_NAME_FIELD, poolEntry.getValue().getServerResource()); - } - poolsEncoder.next(); - } - poolsEncoder.end(); - } - } - } - - private ServerSideConfiguration decodeServerSideConfiguration(StructDecoder decoder) { - boolean configPresent = decoder.bool(CONFIG_PRESENT_FIELD); - - if (configPresent) { - String defaultResource = decoder.string(DEFAULT_RESOURCE_FIELD); - - HashMap resourcePools = new HashMap(); - StructArrayDecoder> poolStructs = decoder.structs(POOLS_SUB_STRUCT); - if (poolStructs != null) { - for (int i = 0; i < poolStructs.length(); i++) { - String poolName = poolStructs.string(POOL_NAME_FIELD); - Long poolSize = poolStructs.int64(POOL_SIZE_FIELD); - String poolResourceName = poolStructs.string(POOL_RESOURCE_NAME_FIELD); - if (poolResourceName == null) { - resourcePools.put(poolName, new ServerSideConfiguration.Pool(poolSize)); - } else { - resourcePools.put(poolName, new ServerSideConfiguration.Pool(poolSize, poolResourceName)); - } - poolStructs.next(); - } - } - - ServerSideConfiguration serverSideConfiguration; - if (defaultResource == null) { - serverSideConfiguration = new ServerSideConfiguration(resourcePools); - } else { - serverSideConfiguration = new ServerSideConfiguration(defaultResource, resourcePools); - } - return serverSideConfiguration; - } else { - return null; + configCodec.encodeServerSideConfiguration(encoder, config); } + return encoder.encode().array(); } public EhcacheEntityMessage decode(EhcacheMessageType messageType, ByteBuffer messageBuffer) { @@ -248,7 +181,7 @@ public EhcacheEntityMessage decode(EhcacheMessageType messageType, ByteBuffer me } private LifecycleMessage.ReleaseServerStore decodeReleaseServerStoreMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = RELEASE_STORE_MESSAGE_STRUCTU.decoder(messageBuffer); + StructDecoder decoder = RELEASE_STORE_MESSAGE_STRUCT.decoder(messageBuffer); Long msgId = decoder.int64(MSG_ID_FIELD); UUID cliendId = messageCodecUtils.decodeUUID(decoder); @@ -274,13 +207,13 @@ private LifecycleMessage.DestroyServerStore decodeDestroyServerStoreMessage(Byte } private LifecycleMessage.ValidateServerStore decodeValidateServerStoreMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = VALIDATE_STORE_MESSAGE_STRUCT.decoder(messageBuffer); + StructDecoder decoder = validateStoreMessageStruct.decoder(messageBuffer); Long msgId = decoder.int64(MSG_ID_FIELD); UUID cliendId = messageCodecUtils.decodeUUID(decoder); String storeName = decoder.string(SERVER_STORE_NAME_FIELD); - ServerStoreConfiguration config = messageCodecUtils.decodeServerStoreConfiguration(decoder); + ServerStoreConfiguration config = configCodec.decodeServerStoreConfiguration(decoder); LifecycleMessage.ValidateServerStore message = new LifecycleMessage.ValidateServerStore(storeName, config, cliendId); message.setId(msgId); @@ -288,13 +221,13 @@ private LifecycleMessage.ValidateServerStore decodeValidateServerStoreMessage(By } private LifecycleMessage.CreateServerStore decodeCreateServerStoreMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = CREATE_STORE_MESSAGE_STRUCT.decoder(messageBuffer); + StructDecoder decoder = createStoreMessageStruct.decoder(messageBuffer); Long msgId = decoder.int64(MSG_ID_FIELD); UUID cliendId = messageCodecUtils.decodeUUID(decoder); String storeName = decoder.string(SERVER_STORE_NAME_FIELD); - ServerStoreConfiguration config = messageCodecUtils.decodeServerStoreConfiguration(decoder); + ServerStoreConfiguration config = configCodec.decodeServerStoreConfiguration(decoder); LifecycleMessage.CreateServerStore message = new LifecycleMessage.CreateServerStore(storeName, config, cliendId); message.setId(msgId); @@ -302,12 +235,17 @@ private LifecycleMessage.CreateServerStore decodeCreateServerStoreMessage(ByteBu } private LifecycleMessage.ValidateStoreManager decodeValidateMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = VALIDATE_MESSAGE_STRUCT.decoder(messageBuffer); + StructDecoder decoder = validateMessageStruct.decoder(messageBuffer); Long msgId = decoder.int64(MSG_ID_FIELD); UUID cliendId = messageCodecUtils.decodeUUID(decoder); + boolean configPresent = decoder.bool(CONFIG_PRESENT_FIELD); + + ServerSideConfiguration config = null; + if (configPresent) { + config = configCodec.decodeServerSideConfiguration(decoder); + } - ServerSideConfiguration config = decodeServerSideConfiguration(decoder); LifecycleMessage.ValidateStoreManager message = new LifecycleMessage.ValidateStoreManager(config, cliendId); if (msgId != null) { @@ -317,12 +255,16 @@ private LifecycleMessage.ValidateStoreManager decodeValidateMessage(ByteBuffer m } private LifecycleMessage.ConfigureStoreManager decodeConfigureMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = CONFIGURE_MESSAGE_STRUCT.decoder(messageBuffer); + StructDecoder decoder = configureMessageStruct.decoder(messageBuffer); Long msgId = decoder.int64(MSG_ID_FIELD); UUID clientId = messageCodecUtils.decodeUUID(decoder); + boolean configPresent = decoder.bool(CONFIG_PRESENT_FIELD); - ServerSideConfiguration config = decodeServerSideConfiguration(decoder); + ServerSideConfiguration config = null; + if (configPresent) { + config = configCodec.decodeServerSideConfiguration(decoder); + } LifecycleMessage.ConfigureStoreManager message = new LifecycleMessage.ConfigureStoreManager(config, clientId); if (msgId != null) { @@ -330,5 +272,4 @@ private LifecycleMessage.ConfigureStoreManager decodeConfigureMessage(ByteBuffer } return message; } - } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java index f4faa6476c..42c1762803 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/MessageCodecUtils.java @@ -40,21 +40,6 @@ public class MessageCodecUtils { public static final String MSB_UUID_FIELD = "msbUUID"; public static final String SERVER_STORE_NAME_FIELD = "serverStoreName"; public static final String KEY_FIELD = "key"; - public static final String DEFAULT_RESOURCE_FIELD = "defaultResource"; - public static final String STORE_CONFIG_KEY_TYPE_FIELD = "keyType"; - public static final String STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD = "keySerializerType"; - public static final String STORE_CONFIG_VALUE_TYPE_FIELD = "valueType"; - public static final String STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD = "valueSerializerType"; - public static final String STORE_CONFIG_CONSISTENCY_FIELD = "consistency"; - public static final String POOLS_SUB_STRUCT = "pools"; - public static final String POOL_NAME_FIELD = "poolName"; - public static final String POOL_SIZE_FIELD = "poolSize"; - public static final String POOL_RESOURCE_NAME_FIELD = "resourceName"; - - public static final EnumMapping CONSISTENCY_ENUM_MAPPING = newEnumMappingBuilder(Consistency.class) - .mapping(Consistency.EVENTUAL, 1) - .mapping(Consistency.STRONG, 2) - .build(); public void encodeMandatoryFields(StructEncoder encoder, EhcacheOperationMessage message) { encoder.enm(EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME, message.getMessageType()) @@ -66,46 +51,4 @@ public void encodeMandatoryFields(StructEncoder encoder, EhcacheOperationM public UUID decodeUUID(StructDecoder decoder) { return new UUID(decoder.int64(MSB_UUID_FIELD), decoder.int64(LSB_UUID_FIELD)); } - - public void encodeServerStoreConfiguration(PrimitiveEncodingSupport encoder, ServerStoreConfiguration configuration) { - encoder.string(STORE_CONFIG_KEY_TYPE_FIELD, configuration.getStoredKeyType()) - .string(STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD, configuration.getKeySerializerType()) - .string(STORE_CONFIG_VALUE_TYPE_FIELD, configuration.getStoredValueType()) - .string(STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD, configuration.getValueSerializerType()); - if (configuration.getConsistency() != null) { - encoder.enm(STORE_CONFIG_CONSISTENCY_FIELD, configuration.getConsistency()); - } - - PoolAllocation poolAllocation = configuration.getPoolAllocation(); - if (poolAllocation instanceof PoolAllocation.Dedicated) { - PoolAllocation.Dedicated dedicatedPool = (PoolAllocation.Dedicated) poolAllocation; - encoder.int64(POOL_SIZE_FIELD, dedicatedPool.getSize()); - if (dedicatedPool.getResourceName() != null) { - encoder.string(POOL_RESOURCE_NAME_FIELD, dedicatedPool.getResourceName()); - } - } else if (poolAllocation instanceof PoolAllocation.Shared) { - encoder.string(POOL_RESOURCE_NAME_FIELD, ((PoolAllocation.Shared) poolAllocation).getResourcePoolName()); - } - } - - public ServerStoreConfiguration decodeServerStoreConfiguration(PrimitiveDecodingSupport decoder) { - String keyType = decoder.string(STORE_CONFIG_KEY_TYPE_FIELD); - String keySerializer = decoder.string(STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD); - String valueType = decoder.string(STORE_CONFIG_VALUE_TYPE_FIELD); - String valueSerializer = decoder.string(STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD); - Enm consistencyEnm = decoder.enm(STORE_CONFIG_CONSISTENCY_FIELD); - Consistency consistency = Consistency.EVENTUAL; - if (consistencyEnm.isValid()) { - consistency = consistencyEnm.get(); - } - Long poolSize = decoder.int64(POOL_SIZE_FIELD); - String poolResource = decoder.string(POOL_RESOURCE_NAME_FIELD); - PoolAllocation poolAllocation = new PoolAllocation.Unknown(); - if (poolSize != null) { - poolAllocation = new PoolAllocation.Dedicated(poolResource, poolSize); - } else if (poolResource != null) { - poolAllocation = new PoolAllocation.Shared(poolResource); - } - return new ServerStoreConfiguration(poolAllocation, keyType, valueType, null, null, keySerializer, valueSerializer, consistency); - } } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java index 83153131f4..b191936923 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ResponseCodec.java @@ -41,7 +41,7 @@ import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.KEY_FIELD; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; -class ResponseCodec { +public class ResponseCodec { private static final String EXCEPTION_FIELD = "exception"; private static final String INVALIDATION_ID_FIELD = "invalidationId"; @@ -93,7 +93,7 @@ class ResponseCodec { private final ChainCodec chainCodec; - ResponseCodec() { + public ResponseCodec() { this.chainCodec = new ChainCodec(); } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java index 615da83c6c..e5f5a28fba 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/ServerStoreOpCodec.java @@ -42,7 +42,7 @@ import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSG_ID_FIELD; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; -class ServerStoreOpCodec { +public class ServerStoreOpCodec { private static final Struct GET_AND_APPEND_MESSAGE_STRUCT = StructBuilder.newStructBuilder() .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) @@ -100,7 +100,7 @@ class ServerStoreOpCodec { private final ChainCodec chainCodec; private final MessageCodecUtils messageCodecUtils = new MessageCodecUtils(); - ServerStoreOpCodec() { + public ServerStoreOpCodec() { this.chainCodec = new ChainCodec(); } diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpCodec.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpCodec.java index 5275004fdd..249da64c47 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpCodec.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/messages/StateRepositoryOpCodec.java @@ -35,7 +35,7 @@ import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; import static org.terracotta.runnel.StructBuilder.newStructBuilder; -class StateRepositoryOpCodec { +public class StateRepositoryOpCodec { private static final String MAP_ID_FIELD = "mapId"; private static final String VALUE_FIELD = "value"; diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodecTest.java new file mode 100644 index 0000000000..d8e1764146 --- /dev/null +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/CommonConfigCodecTest.java @@ -0,0 +1,50 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered.common.internal.messages; + +import org.ehcache.clustered.common.ServerSideConfiguration; +import org.junit.Test; +import org.terracotta.runnel.Struct; +import org.terracotta.runnel.encoding.StructEncoder; + +import java.nio.ByteBuffer; +import java.util.Collections; + +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.*; +import static org.terracotta.runnel.StructBuilder.newStructBuilder; + +public class CommonConfigCodecTest { + + private static final CommonConfigCodec CODEC = new CommonConfigCodec(); + + @Test + public void testEncodeDecodeServerSideConfiguration() throws Exception { + ServerSideConfiguration serverSideConfiguration = + new ServerSideConfiguration("foo", Collections.singletonMap("bar", new ServerSideConfiguration.Pool(1))); + Struct serverSideConfigurationStruct = CODEC.injectServerSideConfiguration(newStructBuilder(), 10).getUpdatedBuilder().build(); + StructEncoder encoder = serverSideConfigurationStruct.encoder(); + CODEC.encodeServerSideConfiguration(encoder, serverSideConfiguration); + ByteBuffer byteBuffer = encoder.encode(); + byteBuffer.rewind(); + ServerSideConfiguration decodedServerSideConfiguration = + CODEC.decodeServerSideConfiguration(serverSideConfigurationStruct.decoder(byteBuffer)); + assertThat(decodedServerSideConfiguration.getDefaultServerResource(), is("foo")); + assertThat(decodedServerSideConfiguration.getResourcePools(), hasKey("bar")); + } +} diff --git a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodecTest.java b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodecTest.java index 582d9cdf15..4f1f8589ee 100644 --- a/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodecTest.java +++ b/clustered/common/src/test/java/org/ehcache/clustered/common/internal/messages/LifeCycleMessageCodecTest.java @@ -41,7 +41,7 @@ public class LifeCycleMessageCodecTest { private static final UUID CLIENT_ID = UUID.randomUUID(); private final LifeCycleMessageFactory factory = new LifeCycleMessageFactory(); - private final LifeCycleMessageCodec codec = new LifeCycleMessageCodec(); + private final LifeCycleMessageCodec codec = new LifeCycleMessageCodec(new CommonConfigCodec()); @Before public void setUp() { diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java index 0cfebf71f5..44bc49b0bc 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheServerEntityService.java @@ -15,10 +15,18 @@ */ package org.ehcache.clustered.server; +import org.ehcache.clustered.common.internal.messages.CommonConfigCodec; +import org.ehcache.clustered.common.internal.messages.ConfigCodec; +import org.ehcache.clustered.common.internal.messages.EhcacheCodec; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheEntityResponse; +import org.ehcache.clustered.common.internal.messages.LifeCycleMessageCodec; +import org.ehcache.clustered.common.internal.messages.ResponseCodec; +import org.ehcache.clustered.common.internal.messages.ServerStoreOpCodec; +import org.ehcache.clustered.common.internal.messages.StateRepositoryOpCodec; import org.ehcache.clustered.server.internal.messages.EhcacheServerCodec; import org.ehcache.clustered.server.internal.messages.EhcacheSyncMessageCodec; +import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessageCodec; import org.terracotta.entity.CommonServerEntity; import org.terracotta.entity.ConcurrencyStrategy; import org.terracotta.entity.EntityServerService; @@ -34,6 +42,7 @@ public class EhcacheServerEntityService implements EntityServerService getConcurrencyStrategy(byte[] c @Override public MessageCodec getMessageCodec() { - return EhcacheServerCodec.getInstance(); + EhcacheCodec ehcacheCodec = new EhcacheCodec(new ServerStoreOpCodec(), + new LifeCycleMessageCodec(CONFIG_CODEC), new StateRepositoryOpCodec(), new ResponseCodec()); + return new EhcacheServerCodec(ehcacheCodec, new PassiveReplicationMessageCodec(CONFIG_CODEC)); } @Override public SyncMessageCodec getSyncMessageCodec() { - return new EhcacheSyncMessageCodec(); + return new EhcacheSyncMessageCodec(CONFIG_CODEC); } @Override diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodec.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodec.java index 3840d3a9a9..743089b0b9 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodec.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/EhcacheServerCodec.java @@ -38,12 +38,6 @@ public class EhcacheServerCodec implements MessageCodec encoder; switch (syncMessage.getMessageType()) { case STATE: { - encoder = STATE_SYNC_STRUCT.encoder(); + encoder = stateSyncStruct.encoder(); EhcacheStateSyncMessage stateSyncMessage = (EhcacheStateSyncMessage)syncMessage; encoder.enm(SYNC_MESSAGE_TYPE_FIELD_NAME, STATE); - encodeServerSideConfiguration(encoder, stateSyncMessage.getConfiguration()); + configCodec.encodeServerSideConfiguration(encoder, stateSyncMessage.getConfiguration()); encoder.structs(STORES_SUB_STRUCT, stateSyncMessage.getStoreConfigs().entrySet(), (storeEncoder, storeEntry) -> { storeEncoder.string(SERVER_STORE_NAME_FIELD, storeEntry.getKey()); - codecUtils.encodeServerStoreConfiguration(storeEncoder, storeEntry.getValue()); + configCodec.encodeServerStoreConfiguration(storeEncoder, storeEntry.getValue()); }); return encoder.encode().array(); } @@ -139,24 +124,10 @@ public byte[] encode(final int concurrencyKey, final EhcacheEntityMessage messag } } - private void encodeServerSideConfiguration(StructEncoder encoder, ServerSideConfiguration configuration) { - if (configuration.getDefaultServerResource() != null) { - encoder.string(DEFAULT_RESOURCE_FIELD, configuration.getDefaultServerResource()); - } - encoder.structs(POOLS_SUB_STRUCT, configuration.getResourcePools().entrySet(), (poolEncoder, poolEntry) -> { - poolEncoder.string(POOL_NAME_FIELD, poolEntry.getKey()); - ServerSideConfiguration.Pool pool = poolEntry.getValue(); - poolEncoder.int64(POOL_SIZE_FIELD, pool.getSize()); - if (pool.getServerResource() != null) { - poolEncoder.string(POOL_RESOURCE_NAME_FIELD, pool.getServerResource()); - } - }); - } - @Override public EhcacheSyncMessage decode(final int concurrencyKey, final byte[] payload) throws MessageCodecException { ByteBuffer message = ByteBuffer.wrap(payload); - StructDecoder decoder = STATE_SYNC_STRUCT.decoder(message); + StructDecoder decoder = stateSyncStruct.decoder(message); Enm enm = decoder.enm(SYNC_MESSAGE_TYPE_FIELD_NAME); if (!enm.isFound()) { throw new AssertionError("Invalid message format - misses the message type field"); @@ -168,7 +139,7 @@ public EhcacheSyncMessage decode(final int concurrencyKey, final byte[] payload) switch (enm.get()) { case STATE: - ServerSideConfiguration configuration = decodeServerSideConfiguration(decoder); + ServerSideConfiguration configuration = configCodec.decodeServerSideConfiguration(decoder); Map storeConfigs = decodeStoreConfigurations(decoder); return new EhcacheStateSyncMessage(configuration, storeConfigs); case DATA: @@ -205,30 +176,10 @@ private Map decodeStoreConfigurations(StructDe if (storesDecoder != null) { for (int i = 0; i < storesDecoder.length(); i++) { String storeName = storesDecoder.string(SERVER_STORE_NAME_FIELD); - result.put(storeName, codecUtils.decodeServerStoreConfiguration(storesDecoder)); + result.put(storeName, configCodec.decodeServerStoreConfiguration(storesDecoder)); storesDecoder.next(); } } return result; } - - private ServerSideConfiguration decodeServerSideConfiguration(StructDecoder decoder) { - String defaultResource = decoder.string(DEFAULT_RESOURCE_FIELD); - Map pools = new HashMap<>(); - StructArrayDecoder> poolsDecoder = decoder.structs(POOLS_SUB_STRUCT); - if (poolsDecoder != null) { - for (int i = 0; i < poolsDecoder.length(); i++) { - String poolName = poolsDecoder.string(POOL_NAME_FIELD); - Long poolSize = poolsDecoder.int64(POOL_SIZE_FIELD); - String poolResource = poolsDecoder.string(POOL_RESOURCE_NAME_FIELD); - pools.put(poolName, new ServerSideConfiguration.Pool(poolSize, poolResource)); - poolsDecoder.next(); - } - } - if (defaultResource == null) { - return new ServerSideConfiguration(pools); - } else { - return new ServerSideConfiguration(defaultResource, pools); - } - } } diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java index eced67d4d4..72ca3bdb0e 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodec.java @@ -18,11 +18,13 @@ import org.ehcache.clustered.common.internal.ServerStoreConfiguration; import org.ehcache.clustered.common.internal.messages.ChainCodec; +import org.ehcache.clustered.common.internal.messages.ConfigCodec; import org.ehcache.clustered.common.internal.messages.EhcacheEntityMessage; import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; import org.ehcache.clustered.common.internal.messages.MessageCodecUtils; import org.ehcache.clustered.common.internal.store.Chain; import org.terracotta.runnel.Struct; +import org.terracotta.runnel.StructBuilder; import org.terracotta.runnel.decoding.StructDecoder; import org.terracotta.runnel.encoding.StructEncoder; @@ -32,19 +34,11 @@ import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.EHCACHE_MESSAGE_TYPES_ENUM_MAPPING; import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_INDEX; import static org.ehcache.clustered.common.internal.messages.EhcacheMessageType.MESSAGE_TYPE_FIELD_NAME; -import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.CONSISTENCY_ENUM_MAPPING; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.KEY_FIELD; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.LSB_UUID_FIELD; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSB_UUID_FIELD; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.MSG_ID_FIELD; -import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.POOL_RESOURCE_NAME_FIELD; -import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.POOL_SIZE_FIELD; import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.SERVER_STORE_NAME_FIELD; -import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.STORE_CONFIG_CONSISTENCY_FIELD; -import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD; -import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.STORE_CONFIG_KEY_TYPE_FIELD; -import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD; -import static org.ehcache.clustered.common.internal.messages.MessageCodecUtils.STORE_CONFIG_VALUE_TYPE_FIELD; import static org.terracotta.runnel.StructBuilder.newStructBuilder; public class PassiveReplicationMessageCodec { @@ -78,31 +72,35 @@ public class PassiveReplicationMessageCodec { .int64(KEY_FIELD, 30) .build(); - private static final Struct CREATE_SERVER_STORE_REPLICATION_STRUCT = newStructBuilder() + private static final Struct DESTROY_SERVER_STORE_REPLICATION_STRUCT = newStructBuilder() .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) .int64(MSG_ID_FIELD, 15) .int64(MSB_UUID_FIELD, 20) .int64(LSB_UUID_FIELD, 21) .string(SERVER_STORE_NAME_FIELD, 30) - .string(STORE_CONFIG_KEY_TYPE_FIELD, 40) - .string(STORE_CONFIG_KEY_SERIALIZER_TYPE_FIELD, 41) - .string(STORE_CONFIG_VALUE_TYPE_FIELD, 45) - .string(STORE_CONFIG_VALUE_SERIALIZER_TYPE_FIELD, 46) - .enm(STORE_CONFIG_CONSISTENCY_FIELD, 50, CONSISTENCY_ENUM_MAPPING) - .int64(POOL_SIZE_FIELD, 60) - .string(POOL_RESOURCE_NAME_FIELD, 65) .build(); - private static final Struct DESTROY_SERVER_STORE_REPLICATION_STRUCT = newStructBuilder() + private final StructBuilder CREATE_STORE_MESSAGE_STRUCT_BUILDER_PREFIX = newStructBuilder() .enm(MESSAGE_TYPE_FIELD_NAME, MESSAGE_TYPE_FIELD_INDEX, EHCACHE_MESSAGE_TYPES_ENUM_MAPPING) .int64(MSG_ID_FIELD, 15) .int64(MSB_UUID_FIELD, 20) .int64(LSB_UUID_FIELD, 21) - .string(SERVER_STORE_NAME_FIELD, 30) - .build(); + .string(SERVER_STORE_NAME_FIELD, 30); + private static final int CREATE_STORE_NEXT_INDEX = 40; - private final ChainCodec chainCodec = new ChainCodec(); - private final MessageCodecUtils messageCodecUtils = new MessageCodecUtils(); + private final Struct createStoreReplicationMessageStruct; + + private final ChainCodec chainCodec ; + private final MessageCodecUtils messageCodecUtils; + private final ConfigCodec configCodec; + + public PassiveReplicationMessageCodec(final ConfigCodec configCodec) { + this.chainCodec = new ChainCodec(); + this.messageCodecUtils = new MessageCodecUtils(); + this.configCodec = configCodec; + createStoreReplicationMessageStruct = this.configCodec.injectServerStoreConfiguration( + CREATE_STORE_MESSAGE_STRUCT_BUILDER_PREFIX, CREATE_STORE_NEXT_INDEX).getUpdatedBuilder().build(); + } public byte[] encode(PassiveReplicationMessage message) { @@ -134,11 +132,11 @@ private byte[] encoreDestroyServerStoreReplicationMessage(PassiveReplicationMess } private byte[] encodeCreateServerStoreReplicationMessage(PassiveReplicationMessage.CreateServerStoreReplicationMessage message) { - StructEncoder encoder = CREATE_SERVER_STORE_REPLICATION_STRUCT.encoder(); + StructEncoder encoder = createStoreReplicationMessageStruct.encoder(); messageCodecUtils.encodeMandatoryFields(encoder, message); encoder.string(SERVER_STORE_NAME_FIELD, message.getStoreName()); - messageCodecUtils.encodeServerStoreConfiguration(encoder, message.getStoreConfiguration()); + configCodec.encodeServerStoreConfiguration(encoder, message.getStoreConfiguration()); return encoder.encode().array(); } @@ -216,13 +214,13 @@ private PassiveReplicationMessage.DestroyServerStoreReplicationMessage decodeDes } private PassiveReplicationMessage.CreateServerStoreReplicationMessage decodeCreateServerStoreReplicationMessage(ByteBuffer messageBuffer) { - StructDecoder decoder = CREATE_SERVER_STORE_REPLICATION_STRUCT.decoder(messageBuffer); + StructDecoder decoder = createStoreReplicationMessageStruct.decoder(messageBuffer); Long msgId = decoder.int64(MSG_ID_FIELD); UUID clientId = messageCodecUtils.decodeUUID(decoder); String storeName = decoder.string(SERVER_STORE_NAME_FIELD); - ServerStoreConfiguration configuration = messageCodecUtils.decodeServerStoreConfiguration(decoder); + ServerStoreConfiguration configuration = configCodec.decodeServerStoreConfiguration(decoder); return new PassiveReplicationMessage.CreateServerStoreReplicationMessage(msgId, clientId, storeName, configuration); } diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java index 84a64c3950..9073fc23d8 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/EhcacheSyncMessageCodecTest.java @@ -19,6 +19,7 @@ import org.ehcache.clustered.common.PoolAllocation; import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.messages.CommonConfigCodec; import org.ehcache.clustered.common.internal.store.Chain; import org.junit.Test; @@ -59,7 +60,7 @@ public void testStateSyncMessageEncodeDecode() throws Exception { storeConfigs.put("cache2", serverStoreConfiguration2); EhcacheStateSyncMessage message = new EhcacheStateSyncMessage(serverSideConfig, storeConfigs); - EhcacheSyncMessageCodec codec = new EhcacheSyncMessageCodec(); + EhcacheSyncMessageCodec codec = new EhcacheSyncMessageCodec(new CommonConfigCodec()); EhcacheStateSyncMessage decodedMessage = (EhcacheStateSyncMessage) codec.decode(0, codec.encode(0, message)); assertThat(decodedMessage.getConfiguration().getDefaultServerResource(), is("default-pool")); @@ -90,7 +91,7 @@ public void testStateSyncMessageEncodeDecode() throws Exception { @Test public void testDataSyncMessageEncodeDecode() throws Exception { - EhcacheSyncMessageCodec codec = new EhcacheSyncMessageCodec(); + EhcacheSyncMessageCodec codec = new EhcacheSyncMessageCodec(new CommonConfigCodec()); Map chainMap = new HashMap<>(); Chain chain = getChain(true, createPayload(10L), createPayload(100L), createPayload(1000L)); chainMap.put(1L, chain); diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodecTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodecTest.java index 93b4ba8b00..d846eb8882 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodecTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/internal/messages/PassiveReplicationMessageCodecTest.java @@ -19,6 +19,7 @@ import org.ehcache.clustered.common.Consistency; import org.ehcache.clustered.common.PoolAllocation; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; +import org.ehcache.clustered.common.internal.messages.CommonConfigCodec; import org.ehcache.clustered.common.internal.messages.EhcacheMessageType; import org.ehcache.clustered.common.internal.store.Chain; import org.ehcache.clustered.server.internal.messages.PassiveReplicationMessage.ChainReplicationMessage; @@ -43,7 +44,7 @@ public class PassiveReplicationMessageCodecTest { private static final long MESSAGE_ID = 42L; - private PassiveReplicationMessageCodec codec = new PassiveReplicationMessageCodec(); + private PassiveReplicationMessageCodec codec = new PassiveReplicationMessageCodec(new CommonConfigCodec()); @Test public void testClientIDTrackerMessageCodec() { From b005470cb5449aff1e38c6e17950cc066e151e4e Mon Sep 17 00:00:00 2001 From: Henri Tremblay Date: Tue, 13 Dec 2016 12:02:44 -0500 Subject: [PATCH 201/218] Improve javadoc on CacheManagerBuilder.persistence --- .../java/org/ehcache/config/builders/CacheManagerBuilder.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/impl/src/main/java/org/ehcache/config/builders/CacheManagerBuilder.java b/impl/src/main/java/org/ehcache/config/builders/CacheManagerBuilder.java index 2de7ef7c66..261b91364e 100644 --- a/impl/src/main/java/org/ehcache/config/builders/CacheManagerBuilder.java +++ b/impl/src/main/java/org/ehcache/config/builders/CacheManagerBuilder.java @@ -374,11 +374,13 @@ public static CacheManagerBuilder newCacheManagerBuilder() { } /** - * Convenience method to get a {@link CacheManagerConfiguration} for a {@link PersistentCacheManager}. + * Convenience method to get a {@link CacheManagerConfiguration} for a {@link PersistentCacheManager} stored on disk. The actual + * level of persistence is configured on the disk resource pool per cache. * * @param location the file location for persistent data * @return a {@code CacheManagerConfiguration} * + * @see ResourcePoolsBuilder#disk(long, MemoryUnit, boolean) * @see #with(CacheManagerConfiguration) * @see PersistentCacheManager */ From 688a21a09cecab48419267eaacd0aef26ee26e36 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Wed, 30 Nov 2016 17:08:31 +0100 Subject: [PATCH 202/218] :shirt: #1430 Clear warnings in xml module * Can't clear all as some are in the generated code. --- xml/src/main/java/org/ehcache/xml/XmlConfiguration.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/xml/src/main/java/org/ehcache/xml/XmlConfiguration.java b/xml/src/main/java/org/ehcache/xml/XmlConfiguration.java index cad0290dba..facfba4500 100644 --- a/xml/src/main/java/org/ehcache/xml/XmlConfiguration.java +++ b/xml/src/main/java/org/ehcache/xml/XmlConfiguration.java @@ -325,6 +325,7 @@ private void parseConfiguration() templates.putAll(configurationParser.getTemplates()); } + @SuppressWarnings("unchecked") private Expiry getExpiry(ClassLoader cacheClassLoader, ConfigurationParser.Expiry parsedExpiry) throws ClassNotFoundException, InstantiationException, IllegalAccessException { final Expiry expiry; @@ -459,6 +460,7 @@ public CacheConfigurationBuilder newCacheConfigurationBuilderFromTe return internalCacheConfigurationBuilderFromTemplate(name, keyType, valueType, resourcePoolsBuilder.build()); } + @SuppressWarnings("unchecked") private CacheConfigurationBuilder internalCacheConfigurationBuilderFromTemplate(final String name, final Class keyType, final Class valueType, @@ -554,6 +556,7 @@ private CacheConfigurationBuilder handleListenersConfig(Configurati } if (listenersConfig.listeners() != null) { for (ConfigurationParser.Listener listener : listenersConfig.listeners()) { + @SuppressWarnings("unchecked") final Class> cacheEventListenerClass = (Class>)getClassForName(listener.className(), defaultClassLoader); final List eventListToFireOn = listener.fireOn(); Set eventSetToFireOn = new HashSet(); From ebc063a0a61661af1b5230a578ca9ad200d23a6d Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Fri, 2 Dec 2016 11:51:46 +0100 Subject: [PATCH 203/218] :shirt: #1430 Clear warnings in 107 module --- 107/build.gradle | 4 ++++ .../org/ehcache/jsr107/ConfigurationMerger.java | 4 +++- .../DefaultJsr107SerializationProvider.java | 1 + .../org/ehcache/jsr107/Eh107CacheManager.java | 3 ++- .../jsr107/Eh107CacheStatisticsMXBean.java | 14 +++++++++++--- .../jsr107/Eh107CompleteConfiguration.java | 2 +- ...EhCache107ConfigurationIntegrationDocTest.java | 4 ++++ .../ConfigStatsManagementActivationTest.java | 8 ++++++++ .../org/ehcache/jsr107/Eh107CacheTypeTest.java | 15 +++++++++------ .../ehcache/jsr107/Eh107XmlIntegrationTest.java | 1 + .../ehcache/jsr107/LoadAtomicsWith107Test.java | 7 +++++-- .../ehcache/jsr107/LoaderWriterConfigTest.java | 14 ++++++++------ .../java/org/ehcache/jsr107/LoaderWriterTest.java | 14 ++++++++++---- 13 files changed, 67 insertions(+), 24 deletions(-) diff --git a/107/build.gradle b/107/build.gradle index e40e9a190d..0c4ace8679 100644 --- a/107/build.gradle +++ b/107/build.gradle @@ -40,6 +40,10 @@ dependencies { } } +tasks.withType(JavaCompile) { + options.compilerArgs += ['-Werror'] +} + javadoc { exclude '**/tck/**' } diff --git a/107/src/main/java/org/ehcache/jsr107/ConfigurationMerger.java b/107/src/main/java/org/ehcache/jsr107/ConfigurationMerger.java index b7d941887b..f143ad89e1 100644 --- a/107/src/main/java/org/ehcache/jsr107/ConfigurationMerger.java +++ b/107/src/main/java/org/ehcache/jsr107/ConfigurationMerger.java @@ -187,6 +187,7 @@ private CacheConfigurationBuilder handleStoreByValue(Eh107CompleteC return builder; } + @SuppressWarnings("unchecked") private static CacheConfigurationBuilder addDefaultCopiers(CacheConfigurationBuilder builder, Class keyType, Class valueType ) { Set immutableTypes = new HashSet(); immutableTypes.add(String.class); @@ -218,7 +219,8 @@ private static void handleCopierDefaultsforImmutableTypes(Map, ClassIns addIdentityCopierIfNoneRegistered(defaults, Character.class); } - private static void addIdentityCopierIfNoneRegistered(Map, ClassInstanceConfiguration>> defaults, Class clazz) { + @SuppressWarnings("unchecked") + private static void addIdentityCopierIfNoneRegistered(Map, ClassInstanceConfiguration>> defaults, Class clazz) { if (!defaults.containsKey(clazz)) { defaults.put(clazz, new DefaultCopierConfiguration(Eh107IdentityCopier.class, DefaultCopierConfiguration.Type.VALUE)); } diff --git a/107/src/main/java/org/ehcache/jsr107/DefaultJsr107SerializationProvider.java b/107/src/main/java/org/ehcache/jsr107/DefaultJsr107SerializationProvider.java index 4528ace604..5267f646c9 100644 --- a/107/src/main/java/org/ehcache/jsr107/DefaultJsr107SerializationProvider.java +++ b/107/src/main/java/org/ehcache/jsr107/DefaultJsr107SerializationProvider.java @@ -24,6 +24,7 @@ */ class DefaultJsr107SerializationProvider extends DefaultSerializationProvider { + @SuppressWarnings("unchecked") DefaultJsr107SerializationProvider() { super(new DefaultSerializationProviderConfiguration() .addSerializerFor(Object.class, (Class) PlainJavaSerializer.class)); diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107CacheManager.java b/107/src/main/java/org/ehcache/jsr107/Eh107CacheManager.java index 4c5bf1ee5c..d9c3c13078 100644 --- a/107/src/main/java/org/ehcache/jsr107/Eh107CacheManager.java +++ b/107/src/main/java/org/ehcache/jsr107/Eh107CacheManager.java @@ -92,6 +92,7 @@ private void refreshAllCaches() { for (Map.Entry> namedCacheEntry : caches.entrySet()) { Eh107Cache cache = namedCacheEntry.getValue(); if (!cache.isClosed()) { + @SuppressWarnings("unchecked") Eh107Configuration configuration = cache.getConfiguration(Eh107Configuration.class); if (configuration.isManagementEnabled()) { enableManagement(cache, true); @@ -114,7 +115,7 @@ private Eh107Cache wrapEhcacheCache(String alias, InternalCache serviceConfiguration : cache.getRuntimeConfiguration().getServiceConfigurations()) { if (serviceConfiguration instanceof DefaultCopierConfiguration) { - DefaultCopierConfiguration copierConfig = (DefaultCopierConfiguration)serviceConfiguration; + DefaultCopierConfiguration copierConfig = (DefaultCopierConfiguration) serviceConfiguration; if(!copierConfig.getClazz().isAssignableFrom(IdentityCopier.class)) storeByValueOnHeap = true; break; diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java b/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java index 162c37c119..0fac32e3aa 100644 --- a/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java +++ b/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java @@ -187,6 +187,7 @@ private static float normalize(float value) { } static > OperationStatistic findCacheStatistic(Cache cache, Class type, String statName) { + @SuppressWarnings("unchecked") Query query = queryBuilder() .children() .filter(context(attributes(Matchers.>allOf(hasAttribute("name", statName), hasAttribute("type", type))))) @@ -199,11 +200,14 @@ static > OperationStatistic findCacheStatistic(Cache if (result.isEmpty()) { throw new RuntimeException("result must not be null"); } - return (OperationStatistic) result.iterator().next().getContext().attributes().get("this"); + @SuppressWarnings("unchecked") + OperationStatistic statistic = (OperationStatistic) result.iterator().next().getContext().attributes().get("this"); + return statistic; } > OperationStatistic findLowestTierStatistic(Cache cache, Class type, String statName) { + @SuppressWarnings("unchecked") Query statQuery = queryBuilder() .descendants() .filter(context(attributes(Matchers.>allOf(hasAttribute("name", statName), hasAttribute("type", type))))) @@ -217,7 +221,9 @@ > OperationStatistic findLowestTierStatistic(Cache ca //if only 1 store then you don't need to find the lowest tier if(statResult.size() == 1) { - return (OperationStatistic) statResult.iterator().next().getContext().attributes().get("this"); + @SuppressWarnings("unchecked") + OperationStatistic statistic = (OperationStatistic) statResult.iterator().next().getContext().attributes().get("this"); + return statistic; } String lowestStoreType = "onheap"; @@ -234,7 +240,9 @@ > OperationStatistic findLowestTierStatistic(Cache ca } } - return (OperationStatistic)lowestTierNode.getContext().attributes().get("this"); + @SuppressWarnings("unchecked") + OperationStatistic statistic = (OperationStatistic) lowestTierNode.getContext().attributes().get("this"); + return statistic; } class CompensatingCounters { diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107CompleteConfiguration.java b/107/src/main/java/org/ehcache/jsr107/Eh107CompleteConfiguration.java index 3d9dbc75e3..c65380d101 100644 --- a/107/src/main/java/org/ehcache/jsr107/Eh107CompleteConfiguration.java +++ b/107/src/main/java/org/ehcache/jsr107/Eh107CompleteConfiguration.java @@ -113,7 +113,7 @@ private static boolean isStoreByValue(Configuration config, CacheCo Collection> serviceConfigurations = ehcacheConfig.getServiceConfigurations(); for (ServiceConfiguration serviceConfiguration : serviceConfigurations) { if (serviceConfiguration instanceof DefaultCopierConfiguration) { - DefaultCopierConfiguration copierConfig = (DefaultCopierConfiguration)serviceConfiguration; + DefaultCopierConfiguration copierConfig = (DefaultCopierConfiguration)serviceConfiguration; if(copierConfig.getType().equals(DefaultCopierConfiguration.Type.VALUE)) { if(copierConfig.getClazz().isAssignableFrom(IdentityCopier.class)) { return false; diff --git a/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java b/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java index 5d087bda1c..77ed29f550 100644 --- a/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java +++ b/107/src/test/java/org/ehcache/docs/EhCache107ConfigurationIntegrationDocTest.java @@ -109,6 +109,7 @@ public void basicConfiguration() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testGettingToEhcacheConfiguration() { // tag::mutableConfigurationExample[] MutableConfiguration configuration = new MutableConfiguration(); @@ -137,6 +138,7 @@ public void testGettingToEhcacheConfiguration() { } @Test + @SuppressWarnings("unchecked") public void testUsingEhcacheConfiguration() throws Exception { // tag::ehcacheBasedConfigurationExample[] CacheConfiguration cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, @@ -172,6 +174,7 @@ public void testWithoutEhcacheExplicitDependencyCanSpecifyXML() throws Exception } @Test + @SuppressWarnings("unchecked") public void testWithoutEhcacheExplicitDependencyAndNoCodeChanges() throws Exception { CacheManager manager = cachingProvider.getCacheManager( getClass().getResource("/org/ehcache/docs/ehcache-jsr107-template-override.xml").toURI(), @@ -216,6 +219,7 @@ public void testWithoutEhcacheExplicitDependencyAndNoCodeChanges() throws Except } @Test + @SuppressWarnings("unchecked") public void testTemplateOverridingStoreByValue() throws Exception { cacheManager = cachingProvider.getCacheManager( getClass().getResource("/org/ehcache/docs/ehcache-jsr107-template-override.xml").toURI(), diff --git a/107/src/test/java/org/ehcache/jsr107/ConfigStatsManagementActivationTest.java b/107/src/test/java/org/ehcache/jsr107/ConfigStatsManagementActivationTest.java index 8fb9693502..6f4c288a51 100644 --- a/107/src/test/java/org/ehcache/jsr107/ConfigStatsManagementActivationTest.java +++ b/107/src/test/java/org/ehcache/jsr107/ConfigStatsManagementActivationTest.java @@ -57,6 +57,7 @@ public void testEnabledAtCacheLevel() throws Exception { .toURI(), provider.getDefaultClassLoader()); Cache cache = cacheManager.getCache("stringCache", String.class, String.class); + @SuppressWarnings("unchecked") Eh107Configuration configuration = cache.getConfiguration(Eh107Configuration.class); assertThat(configuration.isManagementEnabled(), is(true)); @@ -69,6 +70,7 @@ public void testEnabledAtCacheManagerLevel() throws Exception { .toURI(), provider.getDefaultClassLoader()); Cache cache = cacheManager.getCache("stringCache", String.class, String.class); + @SuppressWarnings("unchecked") Eh107Configuration configuration = cache.getConfiguration(Eh107Configuration.class); assertThat(configuration.isManagementEnabled(), is(true)); @@ -81,6 +83,7 @@ public void testCacheLevelOverridesCacheManagerLevel() throws Exception { .toURI(), provider.getDefaultClassLoader()); Cache cache = cacheManager.getCache("overrideCache", String.class, String.class); + @SuppressWarnings("unchecked") Eh107Configuration configuration = cache.getConfiguration(Eh107Configuration.class); assertThat(configuration.isManagementEnabled(), is(false)); @@ -93,6 +96,7 @@ public void testCacheLevelOnlyOneOverridesCacheManagerLevel() throws Exception { .toURI(), provider.getDefaultClassLoader()); Cache cache = cacheManager.getCache("overrideOneCache", String.class, String.class); + @SuppressWarnings("unchecked") Eh107Configuration configuration = cache.getConfiguration(Eh107Configuration.class); assertThat(configuration.isManagementEnabled(), is(true)); @@ -107,6 +111,7 @@ public void testEnableCacheLevelProgrammatic() throws Exception { .add(new Jsr107CacheConfiguration(ConfigurationElementState.ENABLED, ConfigurationElementState.ENABLED)); Cache cache = cacheManager.createCache("test", Eh107Configuration.fromEhcacheCacheConfiguration(configurationBuilder)); + @SuppressWarnings("unchecked") Eh107Configuration configuration = cache.getConfiguration(Eh107Configuration.class); assertThat(configuration.isManagementEnabled(), is(true)); assertThat(configuration.isStatisticsEnabled(), is(true)); @@ -125,6 +130,7 @@ public void testManagementDisabledOverriddenFromTemplate() throws Exception { Cache cache = cacheManager.createCache("enables-mbeans", configuration); + @SuppressWarnings("unchecked") Eh107Configuration eh107Configuration = cache.getConfiguration(Eh107Configuration.class); assertThat(eh107Configuration.isManagementEnabled(), is(true)); assertThat(eh107Configuration.isStatisticsEnabled(), is(true)); @@ -143,6 +149,7 @@ public void testManagementEnabledOverriddenFromTemplate() throws Exception { Cache cache = cacheManager.createCache("disables-mbeans", configuration); + @SuppressWarnings("unchecked") Eh107Configuration eh107Configuration = cache.getConfiguration(Eh107Configuration.class); assertThat(eh107Configuration.isManagementEnabled(), is(false)); assertThat(eh107Configuration.isStatisticsEnabled(), is(false)); @@ -158,6 +165,7 @@ public void basicJsr107StillWorks() throws Exception { configuration.setStatisticsEnabled(true); Cache cache = cacheManager.createCache("cache", configuration); + @SuppressWarnings("unchecked") Eh107Configuration eh107Configuration = cache.getConfiguration(Eh107Configuration.class); assertThat(eh107Configuration.isManagementEnabled(), is(true)); diff --git a/107/src/test/java/org/ehcache/jsr107/Eh107CacheTypeTest.java b/107/src/test/java/org/ehcache/jsr107/Eh107CacheTypeTest.java index 68e1cd7007..d787eef9f8 100644 --- a/107/src/test/java/org/ehcache/jsr107/Eh107CacheTypeTest.java +++ b/107/src/test/java/org/ehcache/jsr107/Eh107CacheTypeTest.java @@ -31,6 +31,7 @@ public class Eh107CacheTypeTest { @Test + @SuppressWarnings("unchecked") public void testCompileTimeTypeSafety() throws Exception { CachingProvider provider = Caching.getCachingProvider(); javax.cache.CacheManager cacheManager = @@ -70,10 +71,11 @@ public void testRunTimeTypeSafety() throws Exception { cache1Conf.setTypes(Long.class, String.class); javax.cache.Cache cache = cacheManager.createCache("cache1", cache1Conf); - Configuration cache1CompleteConf = cache.getConfiguration(Configuration.class); + @SuppressWarnings("unchecked") + Configuration cache1CompleteConf = cache.getConfiguration(Configuration.class); - assertThat((Class)cache1CompleteConf.getKeyType(), is(equalTo(Long.class))); - assertThat((Class)cache1CompleteConf.getValueType(), is(equalTo(String.class))); + assertThat(cache1CompleteConf.getKeyType(), is(equalTo(Long.class))); + assertThat(cache1CompleteConf.getValueType(), is(equalTo(String.class))); try { cacheManager.getCache("cache1"); @@ -94,9 +96,10 @@ public void testTypeOverriding() throws Exception { MutableConfiguration cache1Conf = new MutableConfiguration(); cache1Conf.setTypes(Long.class, String.class); javax.cache.Cache cache = cacheManager.createCache("defaultCache", cache1Conf); - Configuration cache1CompleteConf = cache.getConfiguration(Configuration.class); - assertThat((Class)cache1CompleteConf.getKeyType(), is(equalTo(Long.class))); - assertThat((Class)cache1CompleteConf.getValueType(), is(equalTo(String.class))); + @SuppressWarnings("unchecked") + Configuration cache1CompleteConf = cache.getConfiguration(Configuration.class); + assertThat(cache1CompleteConf.getKeyType(), is(equalTo(Long.class))); + assertThat(cache1CompleteConf.getValueType(), is(equalTo(String.class))); } @Test diff --git a/107/src/test/java/org/ehcache/jsr107/Eh107XmlIntegrationTest.java b/107/src/test/java/org/ehcache/jsr107/Eh107XmlIntegrationTest.java index b773960a6d..9898665083 100644 --- a/107/src/test/java/org/ehcache/jsr107/Eh107XmlIntegrationTest.java +++ b/107/src/test/java/org/ehcache/jsr107/Eh107XmlIntegrationTest.java @@ -92,6 +92,7 @@ public boolean isStoreByValue() { } }); + @SuppressWarnings("unchecked") CompleteConfiguration configuration = cache.getConfiguration(CompleteConfiguration.class); assertThat(configuration, notNullValue()); assertThat(configuration.isStoreByValue(), is(true)); diff --git a/107/src/test/java/org/ehcache/jsr107/LoadAtomicsWith107Test.java b/107/src/test/java/org/ehcache/jsr107/LoadAtomicsWith107Test.java index bddba5a508..70b3a6e5dc 100644 --- a/107/src/test/java/org/ehcache/jsr107/LoadAtomicsWith107Test.java +++ b/107/src/test/java/org/ehcache/jsr107/LoadAtomicsWith107Test.java @@ -20,6 +20,8 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; import javax.cache.Cache; import javax.cache.CacheManager; @@ -41,15 +43,16 @@ * LoadAtomicsWith107Test */ public class LoadAtomicsWith107Test { + @Mock private CacheLoader cacheLoader; + @Mock private CacheWriter cacheWriter; private Cache testCache; private CacheManager cacheManager; @Before public void setUp() throws Exception { - cacheLoader = mock(CacheLoader.class); - cacheWriter = mock(CacheWriter.class); + MockitoAnnotations.initMocks(this); CachingProvider provider = Caching.getCachingProvider(); cacheManager = provider.getCacheManager(this.getClass().getResource("/ehcache-loader-writer-107-load-atomics.xml").toURI(), getClass().getClassLoader()); diff --git a/107/src/test/java/org/ehcache/jsr107/LoaderWriterConfigTest.java b/107/src/test/java/org/ehcache/jsr107/LoaderWriterConfigTest.java index f54d6ce074..968e927869 100644 --- a/107/src/test/java/org/ehcache/jsr107/LoaderWriterConfigTest.java +++ b/107/src/test/java/org/ehcache/jsr107/LoaderWriterConfigTest.java @@ -19,6 +19,8 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; import java.util.Set; @@ -42,10 +44,15 @@ */ public class LoaderWriterConfigTest { + @Mock + private CacheLoader cacheLoader; + @Mock + private CacheWriter cacheWriter; private CachingProvider cachingProvider; @Before public void setUp() { + MockitoAnnotations.initMocks(this); cachingProvider = Caching.getCachingProvider(); } @@ -55,10 +62,8 @@ public void tearDown() { } @Test + @SuppressWarnings("unchecked") public void enablingWriteThroughDoesNotForceReadThrough() throws Exception { - final CacheLoader cacheLoader = mock(CacheLoader.class); - final CacheWriter cacheWriter = mock(CacheWriter.class); - MutableConfiguration config = getConfiguration(false, cacheLoader, true, cacheWriter); Cache cache = cachingProvider.getCacheManager().createCache("writingCache", config); @@ -75,9 +80,6 @@ public void enablingWriteThroughDoesNotForceReadThrough() throws Exception { @Test public void enablingReadThroughDoesNotForceWriteThrough() throws Exception { - final CacheLoader cacheLoader = mock(CacheLoader.class); - final CacheWriter cacheWriter = mock(CacheWriter.class); - MutableConfiguration config = getConfiguration(true, cacheLoader, false, cacheWriter); Cache cache = cachingProvider.getCacheManager().createCache("writingCache", config); diff --git a/107/src/test/java/org/ehcache/jsr107/LoaderWriterTest.java b/107/src/test/java/org/ehcache/jsr107/LoaderWriterTest.java index 5c00587331..56f46ab81a 100644 --- a/107/src/test/java/org/ehcache/jsr107/LoaderWriterTest.java +++ b/107/src/test/java/org/ehcache/jsr107/LoaderWriterTest.java @@ -20,6 +20,9 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -35,8 +38,6 @@ import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; import static org.mockito.Matchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyZeroInteractions; @@ -47,15 +48,16 @@ */ public class LoaderWriterTest { + @Mock private CacheLoader cacheLoader; + @Mock private CacheWriter cacheWriter; private Cache testCache; private CacheManager cacheManager; @Before public void setUp() throws Exception { - cacheLoader = mock(CacheLoader.class); - cacheWriter = mock(CacheWriter.class); + MockitoAnnotations.initMocks(this); CachingProvider provider = Caching.getCachingProvider(); cacheManager = provider.getCacheManager(this.getClass().getResource("/ehcache-loader-writer-107.xml").toURI(), getClass().getClassLoader()); @@ -273,4 +275,8 @@ public void testSimpleRemove2ArgsWithLoaderAndWriter_existsInStore_notEquals() t verifyZeroInteractions(cacheWriter); } + private void reset(Object mock) { + Mockito.reset(mock); + } + } From 34c1e7e1c69e8edeb5c2c4d826b051e1ddbaf21f Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Fri, 2 Dec 2016 11:59:25 +0100 Subject: [PATCH 204/218] :shirt: #1430 Clear warnings in osgi-test module --- osgi-test/build.gradle | 4 ++++ osgi-test/src/test/java/org/ehcache/osgi/Jsr107OsgiTest.java | 1 + 2 files changed, 5 insertions(+) diff --git a/osgi-test/build.gradle b/osgi-test/build.gradle index c1ba150d56..9fe7987689 100644 --- a/osgi-test/build.gradle +++ b/osgi-test/build.gradle @@ -48,6 +48,10 @@ configurations.testRuntime { resolutionStrategy.force 'org.sonatype.plexus:plexus-sec-dispatcher:1.4' } +tasks.withType(JavaCompile) { + options.compilerArgs += ['-Werror'] +} + sourceSets { test { // Needed for PaxExam which makes the dynamic bundle load content of a single dir diff --git a/osgi-test/src/test/java/org/ehcache/osgi/Jsr107OsgiTest.java b/osgi-test/src/test/java/org/ehcache/osgi/Jsr107OsgiTest.java index 1dcb49ab7e..c8cc4aad14 100644 --- a/osgi-test/src/test/java/org/ehcache/osgi/Jsr107OsgiTest.java +++ b/osgi-test/src/test/java/org/ehcache/osgi/Jsr107OsgiTest.java @@ -56,6 +56,7 @@ public Option[] config() { @Test @Ignore("Needs https://github.com/jsr107/jsr107spec/issues/326 to be fixed and so will wait on javax.cache:cache-api:1.0.1 only") + @SuppressWarnings("unchecked") public void testJsr107EhcacheOsgi() throws Exception { CachingProvider cachingProvider = Caching.getCachingProvider("org.ehcache.jsr107.EhcacheCachingProvider", getClass().getClassLoader()); CacheManager cacheManager = cachingProvider.getCacheManager(getClass().getResource("/org/ehcache/osgi/ehcache-107-osgi.xml").toURI(), getClass().getClassLoader()); From 5e8b6f072be7aa8ee421a53093aba94ee12a1a80 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Fri, 2 Dec 2016 12:21:29 +0100 Subject: [PATCH 205/218] :shirt: #1430 Clear warnings in transactions module --- transactions/build.gradle | 4 + .../xa/internal/SoftLockSerializer.java | 8 +- .../SoftLockValueCombinedSerializer.java | 4 +- .../transactions/xa/internal/XAStore.java | 35 ++- .../xa/internal/XAValueHolder.java | 3 +- .../internal/journal/PersistentJournal.java | 1 + ...acheManagerServiceConfigurationParser.java | 4 +- ...ansactionManagerProviderConfiguration.java | 1 + .../transactions/xa/XAGettingStarted.java | 1 + .../xa/internal/EhcacheXAResourceTest.java | 170 ++-------- .../internal/UnSupportedResourceTypeTest.java | 6 +- .../transactions/xa/internal/XAStoreTest.java | 293 ++++-------------- .../xa/internal/XATransactionContextTest.java | 47 ++- .../xa/internal/XAValueHolderTest.java | 3 +- 14 files changed, 147 insertions(+), 433 deletions(-) diff --git a/transactions/build.gradle b/transactions/build.gradle index 04a02318fa..efb780891a 100644 --- a/transactions/build.gradle +++ b/transactions/build.gradle @@ -34,6 +34,10 @@ dependencies { pomOnlyProvided 'javax.transaction:jta:1.1', 'org.codehaus.btm:btm:2.1.4' } +tasks.withType(JavaCompile) { + options.compilerArgs += ['-Werror'] +} + project.signing { required { project.isReleaseVersion && project.gradle.taskGraph.hasTask("uploadArchives") } sign project.configurations.getByName('archives') diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockSerializer.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockSerializer.java index 9b5e3f0e9e..25f4938b8a 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockSerializer.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockSerializer.java @@ -36,7 +36,7 @@ * * @author Ludovic Orban */ -class SoftLockSerializer implements Serializer { +class SoftLockSerializer implements Serializer> { private final ClassLoader classLoader; @@ -45,7 +45,7 @@ class SoftLockSerializer implements Serializer { } @Override - public ByteBuffer serialize(SoftLock object) { + public ByteBuffer serialize(SoftLock object) { ByteArrayOutputStream bout = new ByteArrayOutputStream(); try { ObjectOutputStream oout = new ObjectOutputStream(bout); @@ -64,7 +64,7 @@ public ByteBuffer serialize(SoftLock object) { @SuppressWarnings("unchecked") @Override - public SoftLock read(ByteBuffer entry) throws SerializerException, ClassNotFoundException { + public SoftLock read(ByteBuffer entry) throws SerializerException, ClassNotFoundException { ByteBufferInputStream bin = new ByteBufferInputStream(entry); try { OIS ois = new OIS(bin, classLoader); @@ -85,7 +85,7 @@ public SoftLock read(ByteBuffer entry) throws SerializerException, ClassNotFound } @Override - public boolean equals(SoftLock object, ByteBuffer binary) throws SerializerException, ClassNotFoundException { + public boolean equals(SoftLock object, ByteBuffer binary) throws SerializerException, ClassNotFoundException { return object.equals(read(binary)); } diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockValueCombinedSerializer.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockValueCombinedSerializer.java index afd7b55b02..655b02b06e 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockValueCombinedSerializer.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/internal/SoftLockValueCombinedSerializer.java @@ -29,10 +29,10 @@ */ class SoftLockValueCombinedSerializer implements Serializer> { - private final AtomicReference>> softLockSerializerRef; + private final AtomicReference>> softLockSerializerRef; private final Serializer valueSerializer; - SoftLockValueCombinedSerializer(AtomicReference>> softLockSerializerRef, Serializer valueSerializer) { + SoftLockValueCombinedSerializer(AtomicReference>> softLockSerializerRef, Serializer valueSerializer) { this.softLockSerializerRef = softLockSerializerRef; this.valueSerializer = valueSerializer; } diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java index f3ae3750aa..d6eaf8db56 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAStore.java @@ -710,11 +710,11 @@ public List getConfigurationChangeListeners() return underlyingStore.getConfigurationChangeListeners(); } - private static final class SoftLockValueCombinedSerializerLifecycleHelper { - final AtomicReference softLockSerializerRef; + private static final class SoftLockValueCombinedSerializerLifecycleHelper { + final AtomicReference> softLockSerializerRef; final ClassLoader classLoader; - SoftLockValueCombinedSerializerLifecycleHelper(AtomicReference softLockSerializerRef, ClassLoader classLoader) { + SoftLockValueCombinedSerializerLifecycleHelper(AtomicReference> softLockSerializerRef, ClassLoader classLoader) { this.softLockSerializerRef = softLockSerializerRef; this.classLoader = classLoader; } @@ -861,7 +861,7 @@ public Duration getExpiryForUpdate(K key, ValueSupplier> o }; // get the PersistenceSpaceIdentifier if the cache is persistent, null otherwise - DiskResourceService.PersistenceSpaceIdentifier persistenceSpaceId = findSingletonAmongst(DiskResourceService.PersistenceSpaceIdentifier.class, serviceConfigs); + DiskResourceService.PersistenceSpaceIdentifier persistenceSpaceId = findSingletonAmongst(DiskResourceService.PersistenceSpaceIdentifier.class, (Object[]) serviceConfigs); // find the copiers Collection copierConfigs = findAmongst(DefaultCopierConfiguration.class, underlyingServiceConfigs); @@ -885,12 +885,12 @@ public Duration getExpiryForUpdate(K key, ValueSupplier> o // force-in a value copier if none is configured, or wrap the configured one in a soft lock copier if (valueCopierConfig == null) { - underlyingServiceConfigs.add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)); + underlyingServiceConfigs.add(new DefaultCopierConfiguration(SerializingCopier.asCopierClass(), DefaultCopierConfiguration.Type.VALUE)); } else { CopyProvider copyProvider = serviceProvider.getService(CopyProvider.class); - Copier valueCopier = copyProvider.createValueCopier(storeConfig.getValueType(), storeConfig.getValueSerializer(), valueCopierConfig); - SoftLockValueCombinedCopier softLockValueCombinedCopier = new SoftLockValueCombinedCopier(valueCopier); - underlyingServiceConfigs.add(new DefaultCopierConfiguration((Copier) softLockValueCombinedCopier, DefaultCopierConfiguration.Type.VALUE)); + Copier valueCopier = copyProvider.createValueCopier(storeConfig.getValueType(), storeConfig.getValueSerializer(), valueCopierConfig); + Copier> softLockValueCombinedCopier = new SoftLockValueCombinedCopier(valueCopier); + underlyingServiceConfigs.add(new DefaultCopierConfiguration>(softLockValueCombinedCopier, DefaultCopierConfiguration.Type.VALUE)); } // lookup the required XAStore services @@ -898,13 +898,15 @@ public Duration getExpiryForUpdate(K key, ValueSupplier> o TimeSource timeSource = serviceProvider.getService(TimeSourceService.class).getTimeSource(); // create the soft lock serializer - AtomicReference>> softLockSerializerRef = new AtomicReference>>(); - SoftLockValueCombinedSerializer softLockValueCombinedSerializer = new SoftLockValueCombinedSerializer(softLockSerializerRef, storeConfig.getValueSerializer()); + AtomicReference> softLockSerializerRef = new AtomicReference>(); + SoftLockValueCombinedSerializer softLockValueCombinedSerializer = new SoftLockValueCombinedSerializer(softLockSerializerRef, storeConfig.getValueSerializer()); // create the underlying store - Store.Configuration> underlyingStoreConfig = new StoreConfigurationImpl>(storeConfig.getKeyType(), (Class) SoftLock.class, evictionAdvisor, + @SuppressWarnings("unchecked") + Class> softLockClass = (Class) SoftLock.class; + Store.Configuration> underlyingStoreConfig = new StoreConfigurationImpl>(storeConfig.getKeyType(), softLockClass, evictionAdvisor, storeConfig.getClassLoader(), expiry, storeConfig.getResourcePools(), storeConfig.getDispatcherConcurrency(), storeConfig.getKeySerializer(), softLockValueCombinedSerializer); - Store> underlyingStore = (Store) underlyingStoreProvider.createStore(underlyingStoreConfig, underlyingServiceConfigs.toArray(new ServiceConfiguration[0])); + Store> underlyingStore = underlyingStoreProvider.createStore(underlyingStoreConfig, underlyingServiceConfigs.toArray(new ServiceConfiguration[0])); // create the XA store TransactionManagerWrapper transactionManagerWrapper = transactionManagerProvider.getTransactionManagerWrapper(); @@ -912,8 +914,8 @@ public Duration getExpiryForUpdate(K key, ValueSupplier> o transactionManagerWrapper, timeSource, journal, uniqueXAResourceId); // create the softLockSerializer lifecycle helper - SoftLockValueCombinedSerializerLifecycleHelper helper = - new SoftLockValueCombinedSerializerLifecycleHelper((AtomicReference)softLockSerializerRef, storeConfig.getClassLoader()); + SoftLockValueCombinedSerializerLifecycleHelper helper = + new SoftLockValueCombinedSerializerLifecycleHelper(softLockSerializerRef, storeConfig.getClassLoader()); createdStores.put(store, new CreatedStoreRef(underlyingStoreProvider, helper)); return store; @@ -927,7 +929,7 @@ public void releaseStore(Store resource) { } Store.Provider underlyingStoreProvider = createdStoreRef.storeProvider; - SoftLockValueCombinedSerializerLifecycleHelper helper = createdStoreRef.lifecycleHelper; + SoftLockValueCombinedSerializerLifecycleHelper helper = createdStoreRef.lifecycleHelper; if (resource instanceof XAStore) { XAStore xaStore = (XAStore) resource; @@ -946,6 +948,7 @@ public void releaseStore(Store resource) { } @Override + @SuppressWarnings("unchecked") public void initStore(Store resource) { CreatedStoreRef createdStoreRef = createdStores.get(resource); if (createdStoreRef == null) { @@ -953,7 +956,7 @@ public void initStore(Store resource) { } Store.Provider underlyingStoreProvider = createdStoreRef.storeProvider; - SoftLockValueCombinedSerializerLifecycleHelper helper = createdStoreRef.lifecycleHelper; + SoftLockValueCombinedSerializerLifecycleHelper helper = createdStoreRef.lifecycleHelper; if (resource instanceof XAStore) { XAStore xaStore = (XAStore) resource; diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAValueHolder.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAValueHolder.java index 688257416e..7550b17807 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAValueHolder.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/internal/XAValueHolder.java @@ -107,7 +107,8 @@ public boolean equals(Object other) { if (this == other) return true; if (other == null || getClass() != other.getClass()) return false; - XAValueHolder that = (XAValueHolder) other; + @SuppressWarnings("unchecked") + XAValueHolder that = (XAValueHolder) other; if (!super.equals(that)) return false; return value.equals(that.value); diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/PersistentJournal.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/PersistentJournal.java index efb048d954..2fe690150a 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/PersistentJournal.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/internal/journal/PersistentJournal.java @@ -96,6 +96,7 @@ public void open() throws IOException { boolean valid = ois.readBoolean(); states.clear(); if (valid) { + @SuppressWarnings("unchecked") Map> readStates = (Map>) ois.readObject(); for (Map.Entry> entry : readStates.entrySet()) { SerializableEntry value = entry.getValue(); diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/internal/xml/TxCacheManagerServiceConfigurationParser.java b/transactions/src/main/java/org/ehcache/transactions/xa/internal/xml/TxCacheManagerServiceConfigurationParser.java index fe283f2a82..fcbbd82636 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/internal/xml/TxCacheManagerServiceConfigurationParser.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/internal/xml/TxCacheManagerServiceConfigurationParser.java @@ -57,7 +57,9 @@ public ServiceCreationConfiguration parseServiceCrea try { ClassLoader defaultClassLoader = ClassLoading.getDefaultClassLoader(); Class aClass = Class.forName(transactionManagerProviderConfigurationClassName, true, defaultClassLoader); - return new LookupTransactionManagerProviderConfiguration((Class) aClass); + @SuppressWarnings("unchecked") + Class clazz = (Class) aClass; + return new LookupTransactionManagerProviderConfiguration(clazz); } catch (Exception e) { throw new XmlConfigurationException("Error configuring XA transaction manager", e); } diff --git a/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProviderConfiguration.java b/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProviderConfiguration.java index ad42cee91d..1b7ea5217e 100644 --- a/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProviderConfiguration.java +++ b/transactions/src/main/java/org/ehcache/transactions/xa/txmgr/provider/LookupTransactionManagerProviderConfiguration.java @@ -25,6 +25,7 @@ public class LookupTransactionManagerProviderConfiguration implements ServiceCre private final Class lookupClass; + @SuppressWarnings("unchecked") public LookupTransactionManagerProviderConfiguration(String className) throws ClassNotFoundException { this.lookupClass = (Class) Class.forName(className); } diff --git a/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java b/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java index b2ab065b52..d2fec36d9d 100644 --- a/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java +++ b/transactions/src/test/java/org/ehcache/docs/transactions/xa/XAGettingStarted.java @@ -128,6 +128,7 @@ public void testNonTransactionalAccess() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testXACacheWithWriteThrough() throws Exception { // tag::testXACacheWithWriteThrough[] BitronixTransactionManager transactionManager = diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/internal/EhcacheXAResourceTest.java b/transactions/src/test/java/org/ehcache/transactions/xa/internal/EhcacheXAResourceTest.java index e05d6671d4..d4bd899042 100644 --- a/transactions/src/test/java/org/ehcache/transactions/xa/internal/EhcacheXAResourceTest.java +++ b/transactions/src/test/java/org/ehcache/transactions/xa/internal/EhcacheXAResourceTest.java @@ -20,7 +20,10 @@ import org.ehcache.transactions.xa.internal.journal.Journal; import org.ehcache.transactions.xa.utils.TestXid; import org.hamcrest.Matchers; +import org.junit.Before; import org.junit.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; import javax.transaction.xa.XAException; import javax.transaction.xa.XAResource; @@ -47,13 +50,22 @@ */ public class EhcacheXAResourceTest { + @Mock + private Store> underlyingStore; + @Mock + private Journal journal; + @Mock + private XATransactionContextFactory xaTransactionContextFactory; + @Mock + private XATransactionContext xaTransactionContext; + + @Before + public void setUp() { + MockitoAnnotations.initMocks(this); + } + @Test public void testStartEndWorks() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.createTransactionContext(eq(new TransactionId(new TestXid(0, 0))), refEq(underlyingStore), refEq(journal), anyInt())).thenReturn(xaTransactionContext); @@ -71,11 +83,6 @@ public void testStartEndWorks() throws Exception { @Test public void testTwoNonEndedStartsFails() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.createTransactionContext(eq(new TransactionId(new TestXid(0, 0))), refEq(underlyingStore), refEq(journal), anyInt())).thenReturn(xaTransactionContext); @@ -92,10 +99,6 @@ public void testTwoNonEndedStartsFails() throws Exception { @Test public void testEndWithoutStartFails() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); try { @@ -108,11 +111,6 @@ public void testEndWithoutStartFails() throws Exception { @Test public void testJoinWorks() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.createTransactionContext(eq(new TransactionId(new TestXid(0, 0))), refEq(underlyingStore), refEq(journal), anyInt())).thenReturn(xaTransactionContext); @@ -127,10 +125,6 @@ public void testJoinWorks() throws Exception { @Test public void testRecoverReportsAbortedTx() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(journal.recover()).thenReturn(Collections.singletonMap(new TransactionId(new TestXid(0, 0)), (Collection) Arrays.asList(1L, 2L, 3L))); @@ -142,10 +136,6 @@ public void testRecoverReportsAbortedTx() throws Exception { @Test public void testRecoverIgnoresInFlightTx() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(journal.recover()).thenReturn(Collections.singletonMap(new TransactionId(new TestXid(0, 0)), (Collection) Arrays.asList(1L, 2L, 3L))); @@ -157,10 +147,6 @@ public void testRecoverIgnoresInFlightTx() throws Exception { @Test public void testCannotPrepareUnknownXid() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); try { @@ -173,11 +159,6 @@ public void testCannotPrepareUnknownXid() throws Exception { @Test public void testCannotPrepareNonEndedXid() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.createTransactionContext(eq(new TransactionId(new TestXid(0, 0))), refEq(underlyingStore), refEq(journal), anyInt())).thenReturn(xaTransactionContext); @@ -193,11 +174,6 @@ public void testCannotPrepareNonEndedXid() throws Exception { @Test public void testPrepareOk() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.get(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(xaTransactionContext); @@ -211,11 +187,6 @@ public void testPrepareOk() throws Exception { @Test public void testPrepareReadOnly() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.get(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(xaTransactionContext); @@ -229,11 +200,6 @@ public void testPrepareReadOnly() throws Exception { @Test public void testCannotCommitUnknownXidInFlight() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(false); @@ -250,10 +216,6 @@ public void testCannotCommitUnknownXidInFlight() throws Exception { @Test public void testCannotCommitUnknownXidRecovered() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(false); @@ -268,10 +230,6 @@ public void testCannotCommitUnknownXidRecovered() throws Exception { @Test public void testCannotCommit1PcUnknownXid() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); try { @@ -284,11 +242,6 @@ public void testCannotCommit1PcUnknownXid() throws Exception { @Test public void testCannotCommit1PcNonEndedXid() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.createTransactionContext(eq(new TransactionId(new TestXid(0, 0))), refEq(underlyingStore), refEq(journal), anyInt())).thenReturn(xaTransactionContext); @@ -304,11 +257,6 @@ public void testCannotCommit1PcNonEndedXid() throws Exception { @Test public void testCannotCommitNonPreparedXid() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.get(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(xaTransactionContext); @@ -323,11 +271,6 @@ public void testCannotCommitNonPreparedXid() throws Exception { @Test public void testCannotCommit1PcPreparedXid() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.get(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(xaTransactionContext); @@ -342,11 +285,6 @@ public void testCannotCommit1PcPreparedXid() throws Exception { @Test public void testCommit() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.get(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(xaTransactionContext); @@ -357,11 +295,6 @@ public void testCommit() throws Exception { @Test public void testCommit1Pc() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.get(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(xaTransactionContext); @@ -372,11 +305,6 @@ public void testCommit1Pc() throws Exception { @Test public void testCannotRollbackUnknownXidInFlight() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.get(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(xaTransactionContext); @@ -392,10 +320,6 @@ public void testCannotRollbackUnknownXidInFlight() throws Exception { @Test public void testCannotRollbackUnknownXidRecovered() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(false); @@ -410,11 +334,6 @@ public void testCannotRollbackUnknownXidRecovered() throws Exception { @Test public void testCannotRollbackNonEndedXid() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.createTransactionContext(eq(new TransactionId(new TestXid(0, 0))), refEq(underlyingStore), refEq(journal), anyInt())).thenReturn(xaTransactionContext); @@ -430,11 +349,6 @@ public void testCannotRollbackNonEndedXid() throws Exception { @Test public void testRollback() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.get(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(xaTransactionContext); @@ -445,10 +359,6 @@ public void testRollback() throws Exception { @Test public void testForgetUnknownXid() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(false); @@ -463,10 +373,6 @@ public void testForgetUnknownXid() throws Exception { @Test public void testForgetInDoubtXid() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(true); @@ -481,10 +387,6 @@ public void testForgetInDoubtXid() throws Exception { @Test public void testForget() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(journal.isHeuristicallyTerminated(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(true); @@ -496,11 +398,6 @@ public void testForget() throws Exception { @Test public void testTimeoutStart() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.createTransactionContext(eq(new TransactionId(new TestXid(0, 0))), refEq(underlyingStore), refEq(journal), anyInt())).thenReturn(xaTransactionContext); @@ -519,11 +416,6 @@ public void testTimeoutStart() throws Exception { @Test public void testTimeoutEndSuccess() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.createTransactionContext(eq(new TransactionId(new TestXid(0, 0))), refEq(underlyingStore), refEq(journal), anyInt())).thenReturn(xaTransactionContext); @@ -545,11 +437,6 @@ public void testTimeoutEndSuccess() throws Exception { @Test public void testTimeoutEndFail() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.createTransactionContext(eq(new TransactionId(new TestXid(0, 0))), refEq(underlyingStore), refEq(journal), anyInt())).thenReturn(xaTransactionContext); @@ -570,12 +457,8 @@ public void testTimeoutEndFail() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testPrepareTimeout() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.get(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(xaTransactionContext); @@ -593,11 +476,6 @@ public void testPrepareTimeout() throws Exception { @Test public void testCommit1PcTimeout() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - XATransactionContext xaTransactionContext = mock(XATransactionContext.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(xaTransactionContextFactory.get(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(xaTransactionContext); @@ -615,10 +493,6 @@ public void testCommit1PcTimeout() throws Exception { @Test public void testRecoveryCommitOnePhaseFails() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(journal.recover()).thenReturn(Collections.singletonMap(new TransactionId(new TestXid(0, 0)), (Collection) Arrays.asList(1L, 2L, 3L))); @@ -639,10 +513,6 @@ public void testRecoveryCommitOnePhaseFails() throws Exception { @Test public void testRecoveryCommit() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(journal.recover()).thenReturn(Collections.singletonMap(new TransactionId(new TestXid(0, 0)), (Collection) Arrays.asList(1L, 2L, 3L))); @@ -662,10 +532,6 @@ public void testRecoveryCommit() throws Exception { @Test public void testRecoveryRollback() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); - XATransactionContextFactory xaTransactionContextFactory = mock(XATransactionContextFactory.class); - EhcacheXAResource xaResource = new EhcacheXAResource(underlyingStore, journal, xaTransactionContextFactory); when(journal.isInDoubt(eq(new TransactionId(new TestXid(0, 0))))).thenReturn(true); diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/internal/UnSupportedResourceTypeTest.java b/transactions/src/test/java/org/ehcache/transactions/xa/internal/UnSupportedResourceTypeTest.java index acdc433304..04d88dd867 100644 --- a/transactions/src/test/java/org/ehcache/transactions/xa/internal/UnSupportedResourceTypeTest.java +++ b/transactions/src/test/java/org/ehcache/transactions/xa/internal/UnSupportedResourceTypeTest.java @@ -19,6 +19,7 @@ import org.ehcache.config.ResourcePools; import org.ehcache.config.ResourceType; import org.ehcache.core.spi.store.Store; +import org.ehcache.spi.service.ServiceConfiguration; import org.junit.Test; import java.util.HashSet; @@ -36,7 +37,8 @@ public class UnSupportedResourceTypeTest { public void testUnSupportedResourceType() { XAStore.Provider provider = new XAStore.Provider(); - Store.Configuration configuration = mock(Store.Configuration.class); + @SuppressWarnings("unchecked") + Store.Configuration configuration = mock(Store.Configuration.class); ResourcePools resourcePools = mock(ResourcePools.class); Set> resourceTypes = new HashSet>(); @@ -46,7 +48,7 @@ public void testUnSupportedResourceType() { when(resourcePools.getResourceTypeSet()).thenReturn(resourceTypes); try { - provider.createStore(configuration, null); + provider.createStore(configuration, (ServiceConfiguration) null); fail("IllegalStateException expected"); } catch (IllegalStateException e) { diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreTest.java b/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreTest.java index 5c69f1bb77..df914d04b0 100644 --- a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreTest.java +++ b/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAStoreTest.java @@ -39,7 +39,6 @@ import org.ehcache.impl.internal.events.NullStoreEventDispatcher; import org.ehcache.impl.internal.sizeof.NoopSizeOfEngine; import org.ehcache.impl.internal.spi.copy.DefaultCopyProvider; -import org.ehcache.impl.internal.store.disk.OffHeapDiskStore; import org.ehcache.impl.internal.store.heap.OnHeapStore; import org.ehcache.impl.internal.store.offheap.MemorySizeParser; import org.ehcache.impl.internal.store.offheap.OffHeapStore; @@ -58,12 +57,10 @@ import org.ehcache.transactions.xa.internal.journal.TransientJournal; import org.ehcache.transactions.xa.internal.txmgr.NullXAResourceRegistry; import org.ehcache.transactions.xa.txmgr.TransactionManagerWrapper; -import org.ehcache.transactions.xa.txmgr.btm.BitronixTransactionManagerLookup; -import org.ehcache.transactions.xa.txmgr.provider.LookupTransactionManagerProvider; -import org.ehcache.transactions.xa.txmgr.provider.LookupTransactionManagerProviderConfiguration; import org.ehcache.transactions.xa.txmgr.provider.TransactionManagerProvider; import org.ehcache.transactions.xa.utils.JavaSerializer; import org.ehcache.transactions.xa.utils.TestXid; +import org.junit.Before; import org.junit.Test; import java.util.Arrays; @@ -97,6 +94,8 @@ import static java.util.Collections.emptySet; import static org.ehcache.core.internal.service.ServiceLocator.dependencySet; +import static org.ehcache.expiry.Duration.of; +import static org.ehcache.expiry.Expirations.timeToLiveExpiration; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -111,7 +110,38 @@ */ public class XAStoreTest { + @SuppressWarnings("unchecked") + private final Class> valueClass = (Class) SoftLock.class; private final TestTransactionManager testTransactionManager = new TestTransactionManager(); + private TransactionManagerWrapper transactionManagerWrapper; + private OnHeapStore> onHeapStore; + private Journal journal; + private TestTimeSource testTimeSource; + private ClassLoader classLoader; + private Serializer keySerializer; + private Serializer> valueSerializer; + private StoreEventDispatcher> eventDispatcher; + private final Expiry expiry = timeToLiveExpiration(of(1, TimeUnit.SECONDS)); + private Copier keyCopier; + private Copier> valueCopier; + + @Before + public void setUp() { + transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); + classLoader = ClassLoader.getSystemClassLoader(); + keySerializer = new JavaSerializer(classLoader); + valueSerializer = new JavaSerializer>(classLoader); + CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); + keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); + valueCopier = copyProvider.createValueCopier(valueClass, valueSerializer); + Store.Configuration> onHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, + null, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), + 0, keySerializer, valueSerializer); + testTimeSource = new TestTimeSource(); + eventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); + onHeapStore = new OnHeapStore>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); + journal = new TransientJournal(); + } @Test public void testXAStoreProviderFailsToRankWhenNoTMProviderConfigured() throws Exception { @@ -139,19 +169,6 @@ public Collection getServicesOfType(Class serviceType) @Test public void testSimpleGetPutRemove() throws Exception { String uniqueXAResourceId = "testSimpleGetPutRemove"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, - null, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - Journal journal = new TransientJournal(); XAStore xaStore = new XAStore(Long.class, String.class, onHeapStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); @@ -206,20 +223,6 @@ public void testSimpleGetPutRemove() throws Exception { @Test public void testConflictingGetPutRemove() throws Exception { String uniqueXAResourceId = "testConflictingGetPutRemove"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, - null, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - Journal journal = new TransientJournal(); - final XAStore xaStore = new XAStore(Long.class, String.class, onHeapStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); final AtomicReference exception = new AtomicReference(); @@ -337,20 +340,6 @@ public void run() { @Test public void testIterate() throws Exception { String uniqueXAResourceId = "testIterate"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, - null, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - Journal journal = new TransientJournal(); - XAStore xaStore = new XAStore(Long.class, String.class, onHeapStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); testTransactionManager.begin(); @@ -440,20 +429,6 @@ public void testIterate() throws Exception { @Test public void testPutIfAbsent() throws Exception { String uniqueXAResourceId = "testPutIfAbsent"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, - null, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - Journal journal = new TransientJournal(); - final XAStore xaStore = new XAStore(Long.class, String.class, onHeapStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); final AtomicReference exception = new AtomicReference(); @@ -503,20 +478,6 @@ public Object call() throws Exception { @Test public void testRemove2Args() throws Exception { String uniqueXAResourceId = "testRemove2Args"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, - null, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - Journal journal = new TransientJournal(); - final XAStore xaStore = new XAStore(Long.class, String.class, onHeapStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); final AtomicReference exception = new AtomicReference(); @@ -602,20 +563,6 @@ public Object call() throws Exception { @Test public void testReplace2Args() throws Exception { String uniqueXAResourceId = "testReplace2Args"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, - null, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - Journal journal = new TransientJournal(); - final XAStore xaStore = new XAStore(Long.class, String.class, onHeapStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); final AtomicReference exception = new AtomicReference(); @@ -700,20 +647,6 @@ public Object call() throws Exception { @Test public void testReplace3Args() throws Exception { String uniqueXAResourceId = "testReplace3Args"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, - null, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - Journal journal = new TransientJournal(); - final XAStore xaStore = new XAStore(Long.class, String.class, onHeapStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); final AtomicReference exception = new AtomicReference(); @@ -796,23 +729,10 @@ public Object call() throws Exception { @Test public void testCompute() throws Exception { String uniqueXAResourceId = "testCompute"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, - classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - StoreEventDispatcher eventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - Store.Configuration offHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, + Store.Configuration> offHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = (OffHeapStore) new OffHeapStore(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); + OffHeapStore> offHeapStore = new OffHeapStore>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); OffHeapStoreLifecycleHelper.init(offHeapStore); TieredStore> tieredStore = new TieredStore>(onHeapStore, offHeapStore); Journal journal = new TransientJournal(); @@ -1036,23 +956,10 @@ public String apply(Long aLong, String s) { @Test public void testComputeIfAbsent() throws Exception { String uniqueXAResourceId = "testComputeIfAbsent"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, - classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), - 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - StoreEventDispatcher eventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - Store.Configuration offHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + Store.Configuration> offHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = (OffHeapStore) new OffHeapStore(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); + OffHeapStore> offHeapStore = new OffHeapStore>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); OffHeapStoreLifecycleHelper.init(offHeapStore); TieredStore> tieredStore = new TieredStore>(onHeapStore, offHeapStore); Journal journal = new TransientJournal(); @@ -1114,24 +1021,14 @@ public String apply(Long aLong) { @Test public void testExpiry() throws Exception { String uniqueXAResourceId = "testExpiry"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); - Expiry expiry = Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.SECONDS)); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, - classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), + Store.Configuration> onHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, + null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - StoreEventDispatcher eventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - Store.Configuration offHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + onHeapStore = new OnHeapStore>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); + Store.Configuration> offHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = (OffHeapStore) new OffHeapStore(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); + OffHeapStore> offHeapStore = new OffHeapStore>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); OffHeapStoreLifecycleHelper.init(offHeapStore); TieredStore> tieredStore = new TieredStore>(onHeapStore, offHeapStore); Journal journal = new TransientJournal(); @@ -1156,13 +1053,6 @@ public void testExpiry() throws Exception { @Test public void testExpiryCreateException() throws Exception { String uniqueXAResourceId = "testExpiryCreateException"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); Expiry expiry = new Expiry() { @Override @@ -1180,19 +1070,16 @@ public Duration getExpiryForUpdate(Object key, ValueSupplier o throw new AssertionError(); } }; - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + Store.Configuration> onHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - StoreEventDispatcher eventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - Store.Configuration offHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + OnHeapStore> onHeapStore = new OnHeapStore>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); + Store.Configuration> offHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = (OffHeapStore) new OffHeapStore(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); + OffHeapStore> offHeapStore = new OffHeapStore>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); OffHeapStoreLifecycleHelper.init(offHeapStore); TieredStore> tieredStore = new TieredStore>(onHeapStore, offHeapStore); - Journal journal = new TransientJournal(); XAStore xaStore = new XAStore(Long.class, String.class, tieredStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); @@ -1205,14 +1092,6 @@ public Duration getExpiryForUpdate(Object key, ValueSupplier o @Test public void testExpiryAccessException() throws Exception { String uniqueXAResourceId = "testExpiryAccessException"; - final TestTimeSource testTimeSource = new TestTimeSource(); - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); Expiry expiry = new Expiry() { @Override @@ -1233,18 +1112,16 @@ public Duration getExpiryForUpdate(Object key, ValueSupplier o return Duration.INFINITE; } }; - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + Store.Configuration> onHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), 0, keySerializer, valueSerializer); - StoreEventDispatcher eventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - Store.Configuration offHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + OnHeapStore> onHeapStore = new OnHeapStore>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); + Store.Configuration> offHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = (OffHeapStore) new OffHeapStore(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); + OffHeapStore> offHeapStore = new OffHeapStore>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); OffHeapStoreLifecycleHelper.init(offHeapStore); TieredStore> tieredStore = new TieredStore>(onHeapStore, offHeapStore); - Journal journal = new TransientJournal(); XAStore xaStore = new XAStore(Long.class, String.class, tieredStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); @@ -1265,14 +1142,6 @@ public Duration getExpiryForUpdate(Object key, ValueSupplier o @Test public void testExpiryUpdateException() throws Exception{ String uniqueXAResourceId = "testExpiryUpdateException"; - final TestTimeSource testTimeSource = new TestTimeSource(); - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); Expiry expiry = new Expiry() { @Override @@ -1293,18 +1162,16 @@ public Duration getExpiryForUpdate(Object key, ValueSupplier o return Duration.INFINITE; } }; - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + Store.Configuration> onHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), 0, keySerializer, valueSerializer); - StoreEventDispatcher eventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - Store.Configuration offHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + OnHeapStore> onHeapStore = new OnHeapStore>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); + Store.Configuration> offHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = (OffHeapStore) new OffHeapStore(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); + OffHeapStore> offHeapStore = new OffHeapStore>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); OffHeapStoreLifecycleHelper.init(offHeapStore); TieredStore> tieredStore = new TieredStore>(onHeapStore, offHeapStore); - Journal journal = new TransientJournal(); XAStore xaStore = new XAStore(Long.class, String.class, tieredStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); @@ -1323,27 +1190,17 @@ public Duration getExpiryForUpdate(Object key, ValueSupplier o @Test public void testBulkCompute() throws Exception { String uniqueXAResourceId = "testBulkCompute"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); Expiry expiry = Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.SECONDS)); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + Store.Configuration> onHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - StoreEventDispatcher eventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - Store.Configuration offHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + OnHeapStore> onHeapStore = new OnHeapStore>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); + Store.Configuration> offHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = (OffHeapStore) new OffHeapStore(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); + OffHeapStore> offHeapStore = new OffHeapStore>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); OffHeapStoreLifecycleHelper.init(offHeapStore); TieredStore> tieredStore = new TieredStore>(onHeapStore, offHeapStore); - Journal journal = new TransientJournal(); XAStore xaStore = new XAStore(Long.class, String.class, tieredStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); @@ -1415,27 +1272,17 @@ public void testBulkCompute() throws Exception { @Test public void testBulkComputeIfAbsent() throws Exception { String uniqueXAResourceId = "testBulkComputeIfAbsent"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); Expiry expiry = Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.SECONDS)); - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + Store.Configuration> onHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().heap(10, EntryUnit.ENTRIES).build(), 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - StoreEventDispatcher eventDispatcher = NullStoreEventDispatcher.nullStoreEventDispatcher(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); - Store.Configuration offHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, null, + OnHeapStore> onHeapStore = new OnHeapStore>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); + Store.Configuration> offHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, null, classLoader, expiry, ResourcePoolsBuilder.newResourcePoolsBuilder().offheap(10, MemoryUnit.MB).build(), 0, keySerializer, valueSerializer); - OffHeapStore> offHeapStore = (OffHeapStore) new OffHeapStore(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); + OffHeapStore> offHeapStore = new OffHeapStore>(offHeapConfig, testTimeSource, eventDispatcher, MemorySizeParser.parse("10M")); OffHeapStoreLifecycleHelper.init(offHeapStore); TieredStore> tieredStore = new TieredStore>(onHeapStore, offHeapStore); - Journal journal = new TransientJournal(); XAStore xaStore = new XAStore(Long.class, String.class, tieredStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); @@ -1495,14 +1342,6 @@ public void testBulkComputeIfAbsent() throws Exception { @Test public void testCustomEvictionAdvisor() throws Exception { String uniqueXAResourceId = "testCustomEvictionAdvisor"; - TransactionManagerWrapper transactionManagerWrapper = new TransactionManagerWrapper(testTransactionManager, new NullXAResourceRegistry()); - ClassLoader classLoader = ClassLoader.getSystemClassLoader(); - Serializer keySerializer = new JavaSerializer(classLoader); - Serializer valueSerializer = new JavaSerializer(classLoader); - CopyProvider copyProvider = new DefaultCopyProvider(new DefaultCopyProviderConfiguration()); - Copier keyCopier = copyProvider.createKeyCopier(Long.class, keySerializer); - Copier valueCopier = copyProvider.createValueCopier(SoftLock.class, valueSerializer); - final AtomicBoolean invoked = new AtomicBoolean(); EvictionAdvisor evictionAdvisor = new EvictionAdvisor() { @@ -1512,14 +1351,12 @@ public boolean adviseAgainstEviction(Long key, SoftLock value) { return false; } }; - Store.Configuration onHeapConfig = new StoreConfigurationImpl(Long.class, SoftLock.class, + Store.Configuration> onHeapConfig = new StoreConfigurationImpl>(Long.class, valueClass, evictionAdvisor, classLoader, Expirations.noExpiration(), ResourcePoolsBuilder.newResourcePoolsBuilder() .heap(10, EntryUnit.ENTRIES) .build(), 0, keySerializer, valueSerializer); - TestTimeSource testTimeSource = new TestTimeSource(); - OnHeapStore> onHeapStore = (OnHeapStore) new OnHeapStore(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), NullStoreEventDispatcher.nullStoreEventDispatcher()); - Journal journal = new TransientJournal(); + OnHeapStore> onHeapStore = new OnHeapStore>(onHeapConfig, testTimeSource, keyCopier, valueCopier, new NoopSizeOfEngine(), eventDispatcher); final XAStore xaStore = new XAStore(Long.class, String.class, onHeapStore, transactionManagerWrapper, testTimeSource, journal, uniqueXAResourceId); diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XATransactionContextTest.java b/transactions/src/test/java/org/ehcache/transactions/xa/internal/XATransactionContextTest.java index ce46db5a07..9527aad8dc 100644 --- a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XATransactionContextTest.java +++ b/transactions/src/test/java/org/ehcache/transactions/xa/internal/XATransactionContextTest.java @@ -30,7 +30,10 @@ import org.hamcrest.Description; import org.hamcrest.Matcher; import org.junit.Assert; +import org.junit.Before; import org.junit.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -64,6 +67,16 @@ */ public class XATransactionContextTest { + @Mock + private Store> underlyingStore; + @Mock + private Journal journal; + + @Before + public void setUp() { + MockitoAnnotations.initMocks(this); + } + @Test public void testSimpleCommands() throws Exception { TestTimeSource timeSource = new TestTimeSource(); @@ -196,8 +209,6 @@ public void testHasTimedOut() throws Exception { @Test public void testPrepareReadOnly() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -210,9 +221,8 @@ public void testPrepareReadOnly() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testPrepare() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -256,8 +266,6 @@ public Object answer(InvocationOnMock invocation) throws Throwable { @Test public void testCommitNotPreparedInFlightThrows() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -265,6 +273,7 @@ public void testCommitNotPreparedInFlightThrows() throws Exception { xaTransactionContext.addCommand(1L, new StorePutCommand("one", new XAValueHolder("un", timeSource.getTimeMillis()))); xaTransactionContext.addCommand(2L, new StorePutCommand("two", new XAValueHolder("deux", timeSource.getTimeMillis()))); + @SuppressWarnings("unchecked") Store.ValueHolder> mockValueHolder = mock(Store.ValueHolder.class); when(mockValueHolder.value()).thenReturn(new SoftLock(null, "two", null)); when(underlyingStore.get(eq(2L))).thenReturn(mockValueHolder); @@ -278,9 +287,8 @@ public void testCommitNotPreparedInFlightThrows() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testCommit() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -320,8 +328,6 @@ public void testCommit() throws Exception { @Test public void testCommitInOnePhasePreparedThrows() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -337,9 +343,8 @@ public void testCommitInOnePhasePreparedThrows() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testCommitInOnePhase() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -452,8 +457,6 @@ public Object answer(InvocationOnMock invocation) throws Throwable { @Test public void testRollbackPhase1() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -467,9 +470,8 @@ public void testRollbackPhase1() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testRollbackPhase2() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); final TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -512,8 +514,6 @@ public SoftLock value() { @Test public void testCommitInOnePhaseTimeout() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); final TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -533,8 +533,6 @@ public void testCommitInOnePhaseTimeout() throws Exception { @Test public void testPrepareTimeout() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); final TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -553,9 +551,8 @@ public void testPrepareTimeout() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testCommitConflictsEvicts() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); final TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -595,9 +592,8 @@ public SoftLock value() { } @Test + @SuppressWarnings("unchecked") public void testPrepareConflictsEvicts() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); final TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); @@ -616,9 +612,8 @@ public void testPrepareConflictsEvicts() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testRollbackConflictsEvicts() throws Exception { - Store> underlyingStore = mock(Store.class); - Journal journal = mock(Journal.class); final TestTimeSource timeSource = new TestTimeSource(); XATransactionContext xaTransactionContext = new XATransactionContext(new TransactionId(new TestXid(0, 0)), underlyingStore, journal, timeSource, timeSource.getTimeMillis() + 30000); diff --git a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAValueHolderTest.java b/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAValueHolderTest.java index e4bf078b95..8d6effd3e1 100644 --- a/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAValueHolderTest.java +++ b/transactions/src/test/java/org/ehcache/transactions/xa/internal/XAValueHolderTest.java @@ -45,6 +45,7 @@ public void testSerialization() throws Exception { outputStream.writeObject(valueHolder); outputStream.close(); + @SuppressWarnings("unchecked") XAValueHolder result = (XAValueHolder) new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray())).readObject(); assertThat(result.getId(), is(valueHolder.getId())); @@ -54,4 +55,4 @@ public void testSerialization() throws Exception { assertThat(result.value(), is(valueHolder.value())); assertThat(result.hits(), is(valueHolder.hits())); } -} \ No newline at end of file +} From 78919c21cd3c73e8c50acf46f353e40c664a48d7 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Mon, 5 Dec 2016 09:39:48 +0100 Subject: [PATCH 206/218] :shirt: #1430 Clear warnings in management module --- management/build.gradle | 4 ++++ .../actions/EhcacheActionProviderTest.java | 13 +++++++++---- .../statistics/EhcacheStatisticsProviderTest.java | 1 + .../DefaultManagementRegistryServiceTest.java | 4 ++-- 4 files changed, 16 insertions(+), 6 deletions(-) diff --git a/management/build.gradle b/management/build.gradle index 1f3ae8fc08..b9a7240d73 100644 --- a/management/build.gradle +++ b/management/build.gradle @@ -35,3 +35,7 @@ dependencies { testCompile project(':xml') testCompile "com.fasterxml.jackson.core:jackson-databind:2.7.5" } + +tasks.withType(JavaCompile) { + options.compilerArgs += ['-Werror'] +} diff --git a/management/src/test/java/org/ehcache/management/providers/actions/EhcacheActionProviderTest.java b/management/src/test/java/org/ehcache/management/providers/actions/EhcacheActionProviderTest.java index e7d311c08e..d628e32223 100644 --- a/management/src/test/java/org/ehcache/management/providers/actions/EhcacheActionProviderTest.java +++ b/management/src/test/java/org/ehcache/management/providers/actions/EhcacheActionProviderTest.java @@ -52,6 +52,7 @@ public class EhcacheActionProviderTest { ManagementRegistryServiceConfiguration cmConfig_0 = new DefaultManagementRegistryConfiguration().setContext(cmContext_0); @Test + @SuppressWarnings("unchecked") public void testDescriptions() throws Exception { EhcacheActionProvider ehcacheActionProvider = new EhcacheActionProvider(cmConfig); @@ -122,8 +123,10 @@ public void testCallAction_happyPathNoParam() throws Exception { public void testCallAction_happyPathWithParams() throws Exception { EhcacheActionProvider ehcacheActionProvider = new EhcacheActionProvider(cmConfig_0); - EhcacheWithLoaderWriter ehcache = mock(EhcacheWithLoaderWriter.class); - CacheRuntimeConfiguration cacheRuntimeConfiguration = mock(CacheRuntimeConfiguration.class); + @SuppressWarnings("unchecked") + EhcacheWithLoaderWriter ehcache = mock(EhcacheWithLoaderWriter.class); + @SuppressWarnings("unchecked") + CacheRuntimeConfiguration cacheRuntimeConfiguration = mock(CacheRuntimeConfiguration.class); when(cacheRuntimeConfiguration.getClassLoader()).thenReturn(ClassLoader.getSystemClassLoader()); when(cacheRuntimeConfiguration.getKeyType()).thenReturn(Long.class); when(ehcache.getRuntimeConfiguration()).thenReturn(cacheRuntimeConfiguration); @@ -201,8 +204,10 @@ public void testCallAction_noSuchMethodName() throws Exception { public void testCallAction_noSuchMethod() throws Exception { EhcacheActionProvider ehcacheActionProvider = new EhcacheActionProvider(cmConfig_0); - EhcacheWithLoaderWriter ehcache = mock(EhcacheWithLoaderWriter.class); - CacheRuntimeConfiguration cacheRuntimeConfiguration = mock(CacheRuntimeConfiguration.class); + @SuppressWarnings("unchecked") + EhcacheWithLoaderWriter ehcache = mock(EhcacheWithLoaderWriter.class); + @SuppressWarnings("unchecked") + CacheRuntimeConfiguration cacheRuntimeConfiguration = mock(CacheRuntimeConfiguration.class); when(cacheRuntimeConfiguration.getClassLoader()).thenReturn(ClassLoader.getSystemClassLoader()); when(ehcache.getRuntimeConfiguration()).thenReturn(cacheRuntimeConfiguration); ehcacheActionProvider.register(new CacheBinding("cache-0", ehcache)); diff --git a/management/src/test/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProviderTest.java b/management/src/test/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProviderTest.java index 05731ecb9d..a7229f0978 100644 --- a/management/src/test/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProviderTest.java +++ b/management/src/test/java/org/ehcache/management/providers/statistics/EhcacheStatisticsProviderTest.java @@ -60,6 +60,7 @@ public void tearDown() throws Exception { } @Test + @SuppressWarnings("unchecked") public void testDescriptions() throws Exception { EhcacheStatisticsProvider ehcacheStatisticsProvider = new EhcacheStatisticsProvider(cmConfig_0, executor) { @Override diff --git a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java index 5397cb36cf..b00b764cf4 100644 --- a/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java +++ b/management/src/test/java/org/ehcache/management/registry/DefaultManagementRegistryServiceTest.java @@ -282,8 +282,8 @@ public void testCanGetStats() { .with("cacheManagerName", "myCM") .with("cacheName", "aCache2"); - Cache cache1 = cacheManager1.getCache("aCache1", Long.class, String.class); - Cache cache2 = cacheManager1.getCache("aCache2", Long.class, String.class); + Cache cache1 = cacheManager1.getCache("aCache1", Long.class, String.class); + Cache cache2 = cacheManager1.getCache("aCache2", Long.class, String.class); cache1.put(1L, "one"); cache2.put(3L, "three"); From 7c3a9edf617b6c1c988690213be77699873e9151 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Wed, 14 Dec 2016 08:49:23 +0100 Subject: [PATCH 207/218] :bug: Fix #1699 putIfAbsent failure counts as hit --- .../java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java b/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java index 3965fbec02..1224c4c63f 100644 --- a/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java +++ b/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java @@ -168,7 +168,7 @@ private long getMisses() { private long getHits() { return getBulkCount(BulkOps.GET_ALL_HITS) + get.sum(EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER, CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER)) + - putIfAbsent.sum(EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.PUT)) + + putIfAbsent.sum(EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.HIT)) + replace.sum(EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.HIT, CacheOperationOutcomes.ReplaceOutcome.MISS_PRESENT)) + conditionalRemove.sum(EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.SUCCESS, CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE_KEY_PRESENT)); } From b10efd504b838f601f65f3fa1b73f8e3fb70b24f Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Wed, 14 Dec 2016 09:43:33 +0100 Subject: [PATCH 208/218] :construction: Clean up CacheOperationOutcomes.GetOutcome Dropped _NO_LOADER / _WITH_LOADER variants since the implementation classes are now different, there is no possible confusion. --- .../jsr107/Eh107CacheStatisticsMXBean.java | 4 ++-- .../main/java/org/ehcache/core/Ehcache.java | 18 +++++++-------- .../ehcache/core/EhcacheWithLoaderWriter.java | 22 +++++++++---------- .../statistics/CacheOperationOutcomes.java | 12 ++++------ .../org/ehcache/core/EhcacheBasicGetTest.java | 4 ++-- .../EhcacheWithLoaderWriterBasicGetTest.java | 14 ++++++------ .../statistics/StandardEhcacheStatistics.java | 5 ++--- 7 files changed, 37 insertions(+), 42 deletions(-) diff --git a/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java b/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java index 35e84127ca..661e5392b3 100644 --- a/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java +++ b/107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java @@ -157,7 +157,7 @@ public float getAverageRemoveTime() { private long getMisses() { return getBulkCount(BulkOps.GET_ALL_MISS) + - get.sum(EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER, CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER)) + + get.sum(EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS)) + putIfAbsent.sum(EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.PUT)) + replace.sum(EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.MISS_NOT_PRESENT)) + conditionalRemove.sum(EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE_KEY_MISSING)); @@ -165,7 +165,7 @@ private long getMisses() { private long getHits() { return getBulkCount(BulkOps.GET_ALL_HITS) + - get.sum(EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER, CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER)) + + get.sum(EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT)) + putIfAbsent.sum(EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.HIT)) + replace.sum(EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.HIT, CacheOperationOutcomes.ReplaceOutcome.MISS_PRESENT)) + conditionalRemove.sum(EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.SUCCESS, CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE_KEY_PRESENT)); diff --git a/core/src/main/java/org/ehcache/core/Ehcache.java b/core/src/main/java/org/ehcache/core/Ehcache.java index b6a9a0b27f..271f09cd9b 100644 --- a/core/src/main/java/org/ehcache/core/Ehcache.java +++ b/core/src/main/java/org/ehcache/core/Ehcache.java @@ -171,10 +171,10 @@ public V get(final K key) { // Check for expiry first if (valueHolder == null) { - getObserver.end(GetOutcome.MISS_NO_LOADER); + getObserver.end(GetOutcome.MISS); return null; } else { - getObserver.end(GetOutcome.HIT_NO_LOADER); + getObserver.end(GetOutcome.HIT); return valueHolder.value(); } } catch (StoreAccessException e) { @@ -741,9 +741,9 @@ public void compute(K key, final BiFunction c @Override public V apply(K mappedKey, V mappedValue) { if (mappedValue == null) { - getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.MISS); } else { - getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT); } V newValue = computeFunction.apply(mappedKey, mappedValue); @@ -799,10 +799,10 @@ public V apply(K mappedKey, V mappedValue) { V returnValue = existingValue.get(); if (returnValue != null) { - getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT); removeObserver.end(RemoveOutcome.SUCCESS); } else { - getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.MISS); } return returnValue; } @@ -834,10 +834,10 @@ public V apply(K mappedKey, V mappedValue) { V returnValue = existingValue.get(); if (returnValue != null) { - getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT); putObserver.end(PutOutcome.UPDATED); } else { - getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER); + getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.MISS); putObserver.end(PutOutcome.PUT); } return returnValue; @@ -908,7 +908,7 @@ public Entry next() { if (!quiet) getObserver.begin(); if (nextException == null) { - if (!quiet) getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER); + if (!quiet) getObserver.end(org.ehcache.core.statistics.CacheOperationOutcomes.GetOutcome.HIT); current = next; advance(); return new ValueHolderBasedEntry(current); diff --git a/core/src/main/java/org/ehcache/core/EhcacheWithLoaderWriter.java b/core/src/main/java/org/ehcache/core/EhcacheWithLoaderWriter.java index e67c19be24..faf7231931 100644 --- a/core/src/main/java/org/ehcache/core/EhcacheWithLoaderWriter.java +++ b/core/src/main/java/org/ehcache/core/EhcacheWithLoaderWriter.java @@ -190,10 +190,10 @@ private V getNoLoader(K key) { // Check for expiry first if (valueHolder == null) { - getObserver.end(GetOutcome.MISS_NO_LOADER); + getObserver.end(GetOutcome.MISS); return null; } else { - getObserver.end(GetOutcome.HIT_NO_LOADER); + getObserver.end(GetOutcome.HIT); return valueHolder.value(); } } catch (StoreAccessException e) { @@ -235,10 +235,10 @@ public V apply(final K k) { // Check for expiry first if (valueHolder == null) { - getObserver.end(GetOutcome.MISS_WITH_LOADER); + getObserver.end(GetOutcome.MISS); return null; } else { - getObserver.end(GetOutcome.HIT_WITH_LOADER); + getObserver.end(GetOutcome.HIT); return valueHolder.value(); } } catch (StoreAccessException e) { @@ -1234,9 +1234,9 @@ public void compute(K key, final BiFunction c @Override public V apply(K mappedKey, V mappedValue) { if (mappedValue == null) { - getObserver.end(GetOutcome.MISS_NO_LOADER); + getObserver.end(GetOutcome.MISS); } else { - getObserver.end(GetOutcome.HIT_NO_LOADER); + getObserver.end(GetOutcome.HIT); } V newValue = computeFunction.apply(mappedKey, mappedValue); @@ -1309,10 +1309,10 @@ public V apply(K mappedKey, V mappedValue) { V returnValue = existingValue.get(); if (returnValue != null) { - getObserver.end(GetOutcome.HIT_NO_LOADER); + getObserver.end(GetOutcome.HIT); removeObserver.end(RemoveOutcome.SUCCESS); } else { - getObserver.end(GetOutcome.MISS_NO_LOADER); + getObserver.end(GetOutcome.MISS); } return returnValue; } @@ -1350,10 +1350,10 @@ public V apply(K mappedKey, V mappedValue) { V returnValue = existingValue.get(); if (returnValue != null) { - getObserver.end(GetOutcome.HIT_NO_LOADER); + getObserver.end(GetOutcome.HIT); putObserver.end(PutOutcome.UPDATED); } else { - getObserver.end(GetOutcome.MISS_NO_LOADER); + getObserver.end(GetOutcome.MISS); putObserver.end(PutOutcome.PUT); } return returnValue; @@ -1424,7 +1424,7 @@ public Entry next() { if (!quiet) getObserver.begin(); if (nextException == null) { - if (!quiet) getObserver.end(GetOutcome.HIT_NO_LOADER); + if (!quiet) getObserver.end(GetOutcome.HIT); current = next; advance(); return new ValueHolderBasedEntry(current); diff --git a/core/src/main/java/org/ehcache/core/statistics/CacheOperationOutcomes.java b/core/src/main/java/org/ehcache/core/statistics/CacheOperationOutcomes.java index 1edd170384..052f2df30e 100755 --- a/core/src/main/java/org/ehcache/core/statistics/CacheOperationOutcomes.java +++ b/core/src/main/java/org/ehcache/core/statistics/CacheOperationOutcomes.java @@ -39,14 +39,10 @@ enum ClearOutcome implements CacheOperationOutcomes { * Outcomes for cache Get operations. */ enum GetOutcome implements CacheOperationOutcomes { - /** hit, no loader */ - HIT_NO_LOADER, - /** miss, no loader */ - MISS_NO_LOADER, - /** hit */ - HIT_WITH_LOADER, - /** miss */ - MISS_WITH_LOADER, + /** hit, loader or not is Cache impl specific */ + HIT, + /** miss, loader or not is Cache impl specific*/ + MISS, /** failure */ FAILURE }; diff --git a/core/src/test/java/org/ehcache/core/EhcacheBasicGetTest.java b/core/src/test/java/org/ehcache/core/EhcacheBasicGetTest.java index c142ef411d..5fb8a11b82 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheBasicGetTest.java +++ b/core/src/test/java/org/ehcache/core/EhcacheBasicGetTest.java @@ -68,7 +68,7 @@ public void testGetNoStoreEntry() throws Exception { assertThat(ehcache.get("key"), is(nullValue())); verify(this.store).get(eq("key")); verifyZeroInteractions(this.spiedResilienceStrategy); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS)); } /** @@ -110,7 +110,7 @@ public void testGetHasStoreEntry() throws Exception { verify(this.store).get(eq("key")); verifyZeroInteractions(this.spiedResilienceStrategy); assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT)); } /** diff --git a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicGetTest.java b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicGetTest.java index 12f7cca041..b2a7bfd3d4 100644 --- a/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicGetTest.java +++ b/core/src/test/java/org/ehcache/core/EhcacheWithLoaderWriterBasicGetTest.java @@ -81,7 +81,7 @@ public void testGetNoStoreEntry() throws Exception { assertThat(ehcache.get("key"), is(nullValue())); verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); verifyZeroInteractions(this.spiedResilienceStrategy); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS)); validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); } @@ -104,7 +104,7 @@ public void testGetNoStoreEntryNoCacheLoaderWriterEntry() throws Exception { verify(this.cacheLoaderWriter).load(eq("key")); verifyZeroInteractions(this.spiedResilienceStrategy); assertThat(fakeStore.getEntryMap().containsKey("key"), is(false)); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.CacheLoadingOutcome.SUCCESS)); } @@ -128,7 +128,7 @@ public void testGetNoStoreEntryHasCacheLoaderWriterEntry() throws Exception { verify(this.cacheLoaderWriter).load(eq("key")); verifyZeroInteractions(this.spiedResilienceStrategy); assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT)); validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.CacheLoadingOutcome.SUCCESS)); } @@ -257,7 +257,7 @@ public void testGetHasStoreEntry() throws Exception { verify(this.store).computeIfAbsent(eq("key"), getAnyFunction()); verifyZeroInteractions(this.spiedResilienceStrategy); assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT)); validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); } @@ -281,7 +281,7 @@ public void testGetHasStoreEntryNoCacheLoaderWriterEntry() throws Exception { verify(this.cacheLoaderWriter, never()).load(eq("key")); verifyZeroInteractions(this.spiedResilienceStrategy); assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT)); validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); } @@ -306,7 +306,7 @@ public void testGetHasStoreEntryHasCacheLoaderWriterEntry() throws Exception { verify(this.cacheLoaderWriter, never()).load(eq("key")); verifyZeroInteractions(this.spiedResilienceStrategy); assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT)); validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); } @@ -331,7 +331,7 @@ public void testGetHasStoreEntryCacheLoadingException() throws Exception { verify(this.cacheLoaderWriter, never()).load(eq("key")); verifyZeroInteractions(this.spiedResilienceStrategy); assertThat(fakeStore.getEntryMap().get("key"), equalTo("value")); - validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER)); + validateStats(ehcache, EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT)); validateStats(ehcache, EnumSet.noneOf(CacheOperationOutcomes.CacheLoadingOutcome.class)); } diff --git a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java index 8a4323c54c..0dfc137bb1 100644 --- a/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java +++ b/management/src/main/java/org/ehcache/management/providers/statistics/StandardEhcacheStatistics.java @@ -23,7 +23,6 @@ import org.ehcache.management.providers.ExposedCacheBinding; import org.terracotta.context.extended.OperationStatisticDescriptor; import org.terracotta.context.extended.StatisticsRegistry; -import org.terracotta.management.model.capabilities.descriptors.Descriptor; import org.terracotta.management.model.capabilities.descriptors.StatisticDescriptor; import org.terracotta.management.model.stats.Statistic; import org.terracotta.management.registry.collect.StatisticsRegistryMetadata; @@ -50,8 +49,8 @@ class StandardEhcacheStatistics extends ExposedCacheBinding { this.statisticsRegistryMetadata = new StatisticsRegistryMetadata(statisticsRegistry); - EnumSet hit = of(CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER, CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER); - EnumSet miss = of(CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER, CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER); + EnumSet hit = of(CacheOperationOutcomes.GetOutcome.HIT); + EnumSet miss = of(CacheOperationOutcomes.GetOutcome.MISS); OperationStatisticDescriptor getCacheStatisticDescriptor = OperationStatisticDescriptor.descriptor("get", singleton("cache"), CacheOperationOutcomes.GetOutcome.class); statisticsRegistry.registerCompoundOperations("Cache:Hit", getCacheStatisticDescriptor, hit); From 7b0a68a6568aeddb7e2dc6c5ee672c9aca8abc71 Mon Sep 17 00:00:00 2001 From: akomakom Date: Tue, 26 Jul 2016 13:00:35 -0400 Subject: [PATCH 209/218] Adding nexus staging plugin --- build.gradle | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/build.gradle b/build.gradle index 646e02015b..daf6e20215 100644 --- a/build.gradle +++ b/build.gradle @@ -16,6 +16,28 @@ import scripts.* import org.gradle.internal.jvm.Jvm +buildscript { + repositories { + mavenCentral() + } + dependencies { + classpath "io.codearte.gradle.nexus:gradle-nexus-staging-plugin:0.5.3" + } +} + +// This adds tasks to auto close or release nexus staging repos +// see https://github.com/Codearte/gradle-nexus-staging-plugin/ +project.plugins.apply 'io.codearte.nexus-staging' +project.nexusStaging { + username = project.sonatypeUser + password = project.sonatypePwd + packageGroup = 'org.ehcache' +} + +// Disable automatic promotion for added safety +closeAndPromoteRepository.enabled = false + + ext { baseVersion = findProperty('overrideVersion') ?: '3.2.1-SNAPSHOT' From 9485e32d3f14e6fd3ab7e1e7db56a4006c1cd9cc Mon Sep 17 00:00:00 2001 From: akomakom Date: Tue, 26 Jul 2016 13:00:35 -0400 Subject: [PATCH 210/218] Adding nexus staging plugin --- build.gradle | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/build.gradle b/build.gradle index 646e02015b..4b89a93ebd 100644 --- a/build.gradle +++ b/build.gradle @@ -16,6 +16,28 @@ import scripts.* import org.gradle.internal.jvm.Jvm +buildscript { + repositories { + mavenCentral() + } + dependencies { + classpath "io.codearte.gradle.nexus:gradle-nexus-staging-plugin:0.5.3" + } +} + +// This adds tasks to auto close or release nexus staging repos +// see https://github.com/Codearte/gradle-nexus-staging-plugin/ +project.plugins.apply 'io.codearte.nexus-staging' +project.nexusStaging { + username = project.sonatypeUser + password = project.sonatypePwd + packageGroup = 'org.ehcache' +} + +// Disable automatic promotion for added safety +closeAndPromoteRepository.enabled = false + + ext { baseVersion = findProperty('overrideVersion') ?: '3.2.1-SNAPSHOT' From 993ba925ae3e9c201ecddf745fe10bb3940ab3f8 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Wed, 14 Dec 2016 21:41:01 +0100 Subject: [PATCH 211/218] Update to latest Terracotta preview --- build.gradle | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/build.gradle b/build.gradle index 646e02015b..29a8cfd077 100644 --- a/build.gradle +++ b/build.gradle @@ -31,12 +31,12 @@ ext { terracottaPlatformVersion = '5.1.0' managementVersion = terracottaPlatformVersion terracottaApisVersion = '1.1.0' - terracottaCoreVersion = '5.1.0' + terracottaCoreVersion = '5.1.1-pre1' offheapResourceVersion = terracottaPlatformVersion entityApiVersion = terracottaApisVersion - terracottaPassthroughTestingVersion = '1.1.0' + terracottaPassthroughTestingVersion = '1.1.1-pre1' entityTestLibVersion = terracottaPassthroughTestingVersion - galvanVersion = '1.1.0' + galvanVersion = '1.1.1-pre1' // Tools findbugsVersion = '3.0.1' From ff4ac977458b0c5f78e97b46a0c3e8bc37c78d50 Mon Sep 17 00:00:00 2001 From: Anthony Dahanne Date: Wed, 14 Dec 2016 21:29:24 -0500 Subject: [PATCH 212/218] :arrow_up: Update to latest tc platform with HA and failover support for M&M --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 29a8cfd077..28f3db12f2 100644 --- a/build.gradle +++ b/build.gradle @@ -28,7 +28,7 @@ ext { sizeofVersion = '0.3.0' // Clustered - terracottaPlatformVersion = '5.1.0' + terracottaPlatformVersion = '5.1.1-pre2' managementVersion = terracottaPlatformVersion terracottaApisVersion = '1.1.0' terracottaCoreVersion = '5.1.1-pre1' From 61349f2dcf6eb984f2f54c78cadd0dcc501a319e Mon Sep 17 00:00:00 2001 From: Chris Bradley Date: Thu, 15 Dec 2016 13:35:46 +0100 Subject: [PATCH 213/218] Include feedback from general proofreading review --- .../docs/asciidoc/user/caching-concepts.adoc | 36 +++++++++---------- .../docs/asciidoc/user/eviction-advisor.adoc | 7 ++-- 2 files changed, 23 insertions(+), 20 deletions(-) diff --git a/docs/src/docs/asciidoc/user/caching-concepts.adoc b/docs/src/docs/asciidoc/user/caching-concepts.adoc index c1cd258df1..dfc1ff1fb5 100644 --- a/docs/src/docs/asciidoc/user/caching-concepts.adoc +++ b/docs/src/docs/asciidoc/user/caching-concepts.adoc @@ -37,12 +37,12 @@ and technical decision based upon the requirements and assumptions of your appli [[storage-tiers]] == Storage Tiers -You can configure Ehcache to use various data storage areas. -When a cache is configured to use more than one storage area, those areas are arranged and managed as `tiers `. -They are organized in a hierarchy, withthe lowest tier being called the `authority` tier and the others being part of the `caching` tier. -The caching tier can itself be composed of more than one storage area. -The_hottest_ data is kept in the caching tier, which is typically less abundantbut faster than the authority tier. -All the datais kept in the authority tier, which isslower but more abundant. +You can configure Ehcache to use various data storage areas. +When a cache is configured to use more than one storage area, those areas are arranged and managed as `tiers`. +They are organized in a hierarchy, with the lowest tier being called the `authority` tier and the others being part of the `caching` tier. +The caching tier can itself be composed of more than one storage area. +The _hottest_ data is kept in the caching tier, which is typically less abundant but faster than the authority tier. +All the data is kept in the authority tier, which is slower but more abundant. Data stores supported by Ehcache include: @@ -50,16 +50,15 @@ Data stores supported by Ehcache include: your Java application, all of which must be scanned by the JVM's garbage collector. The more heap space your JVM utilizes, the more your application's performance will be impacted by garbage collection pauses. This store is extremely fast, but is typically your most limited storage resource. -* Off-Heap Store - Limited in size only by available RAM. Not subject -to Java garbage collection (GC). Is quite fast, yet slower than the On-Heap Store because data must be moved to and -from the JVM's heap as it is stored and re-accessed. -* Disk Store - Utilizes a disk (file system) to store cache entries. This type of storage resource is typically very -abundant but much slower than the RAM-based stores. -* Clustered Store - This data store is a cache on a remote server. -The remote server may optionally have a failover server providing improved high availability. -Since clustered storage comes with performance penalties due to such -factors as network latency as well as for establishing client/server consistency, this tier, by nature, is slower than -local off-heap storage. +* Off-Heap Store - Limited in size only by available RAM. +Not subject to Java garbage collection (GC). +Is quite fast, yet slower than the On-Heap Store because data must be moved to and from the JVM's heap as it is stored and re-accessed. +* Disk Store - Utilizes a disk (file system) to store cache entries. +This type of storage resource is typically very abundant but much slower than the RAM-based stores. +* Clustered Store - This data store is a cache on a remote server. +The remote server may optionally have a failover server providing improved high availability. +Since clustered storage comes with performance penalties due to such factors as network latency as well as for establishing client/server consistency, +this tier, by nature, is slower than local off-heap storage. image::EhcacheTerminology.png[] @@ -67,7 +66,8 @@ image::EhcacheTerminology.png[] === Standalone -The data set is held in the application node. If a standalone topology is used where there are multiple application nodes running the +The data set is held in the application node. Any other application nodes are independent with no +communication between them. If a standalone topology is used where there are multiple application nodes running the same application, then their caches are completely independent. === Distributed / Clustered @@ -75,7 +75,7 @@ same application, then their caches are completely independent. The data is held in a remote server (or array of servers) with a subset of hot data held in each application node. This topology offers offers a selection of consistency options. A distributed topology is the recommended approach in a clustered or scaled-out application environment. -It provides the best combination of performance, availability and scalability. +It provides the best combination of performance, availability, and scalability. image::ClusteredEhcacheTopology.png[] diff --git a/docs/src/docs/asciidoc/user/eviction-advisor.adoc b/docs/src/docs/asciidoc/user/eviction-advisor.adoc index e48f50b7c3..561363bc84 100644 --- a/docs/src/docs/asciidoc/user/eviction-advisor.adoc +++ b/docs/src/docs/asciidoc/user/eviction-advisor.adoc @@ -17,6 +17,10 @@ NOTE: This is an advanced topic/feature that will not be of interest to most use You can affect which elements are selected for eviction from the cache by providing a class that implements the `org.ehcache.config.EvictionAdvisor` interface. +NOTE: Eviction advisors are not used for clustered storage tiers. +In a cache with a heap tier and clustered storage tier, the heap tier will use the eviction advisor but the clustered storage tier will evict independently, irrespective of the eviction advisor. +The description below applies to using an eviction advisor for the heap tier of a cache. + `EvictionAdvisor` implementations are invoked when Ehcache attempts to evict entries from the cache (in order to make room for new entries) in order to determine whether the given entry should not be considered a good candidate for eviction. If the eviction is advised against, Ehcache will try to honor the preference of @@ -28,8 +32,7 @@ include::{sourcedir31}/impl/src/test/java/org/ehcache/docs/GettingStarted.java[t ---- <1> Configure a constrained heap, as the eviction advisor is only relevant when mappings get evicted from the cache. -<2> If you want to hint to the eviction algorithm to advise against the eviction of some mappings, you have to - configure an instance of `EvictionAdvisor`. +<2> If you want to give the the eviction algorithm a hint to advise against the eviction of some mappings, you have to configure an instance of `EvictionAdvisor`. In this particular example, the `OddKeysEvictionAdvisor` class will advise against eviction of any key that is an odd number. The cache is constrained to only be allowed to contain two entries, however the code has put three entries From 1f90609750a6d13d9e57bf70f6dba56a18e455f2 Mon Sep 17 00:00:00 2001 From: Chris Bradley Date: Thu, 15 Dec 2016 15:53:46 +0100 Subject: [PATCH 214/218] Some text tweaking related to which storage tiers are affected by eviction advisors. --- docs/src/docs/asciidoc/user/eviction-advisor.adoc | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/src/docs/asciidoc/user/eviction-advisor.adoc b/docs/src/docs/asciidoc/user/eviction-advisor.adoc index 561363bc84..049284b61d 100644 --- a/docs/src/docs/asciidoc/user/eviction-advisor.adoc +++ b/docs/src/docs/asciidoc/user/eviction-advisor.adoc @@ -18,8 +18,9 @@ You can affect which elements are selected for eviction from the cache by provid `org.ehcache.config.EvictionAdvisor` interface. NOTE: Eviction advisors are not used for clustered storage tiers. -In a cache with a heap tier and clustered storage tier, the heap tier will use the eviction advisor but the clustered storage tier will evict independently, irrespective of the eviction advisor. -The description below applies to using an eviction advisor for the heap tier of a cache. +For example, in a cache with a heap tier and clustered storage tier, +the heap tier will use the eviction advisor but the clustered storage tier will evict independently, irrespective of the eviction advisor. +The description below applies to using an eviction advisor for the cache tiers other than a clustered storage tier. `EvictionAdvisor` implementations are invoked when Ehcache attempts to evict entries from the cache (in order to make room for new entries) in order to determine whether the given entry should not be considered From 3dfb7edfe0c9677ee66c1c07a59ddc9d831c6570 Mon Sep 17 00:00:00 2001 From: Anthony Dahanne Date: Thu, 15 Dec 2016 15:30:03 -0500 Subject: [PATCH 215/218] :arrow_up: Upgrade to latest tc platform --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 28f3db12f2..0de821acb5 100644 --- a/build.gradle +++ b/build.gradle @@ -28,7 +28,7 @@ ext { sizeofVersion = '0.3.0' // Clustered - terracottaPlatformVersion = '5.1.1-pre2' + terracottaPlatformVersion = '5.1.1-pre3' managementVersion = terracottaPlatformVersion terracottaApisVersion = '1.1.0' terracottaCoreVersion = '5.1.1-pre1' From 671bf46f33003b5a163d49343592ad2abfffb334 Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Mon, 19 Dec 2016 11:39:45 +0100 Subject: [PATCH 216/218] Bump to latest Terracotta .1-pre2 version --- build.gradle | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/build.gradle b/build.gradle index 7c9807c6e0..c5aafb303e 100644 --- a/build.gradle +++ b/build.gradle @@ -21,7 +21,7 @@ buildscript { mavenCentral() } dependencies { - classpath "io.codearte.gradle.nexus:gradle-nexus-staging-plugin:0.5.3" + classpath "io.codearte.gradle.nexus:gradle-nexus-staging-plugin:0.5.3" } } @@ -53,12 +53,12 @@ ext { terracottaPlatformVersion = '5.1.1-pre3' managementVersion = terracottaPlatformVersion terracottaApisVersion = '1.1.0' - terracottaCoreVersion = '5.1.1-pre1' + terracottaCoreVersion = '5.1.1-pre2' offheapResourceVersion = terracottaPlatformVersion entityApiVersion = terracottaApisVersion - terracottaPassthroughTestingVersion = '1.1.1-pre1' + terracottaPassthroughTestingVersion = '1.1.1-pre2' entityTestLibVersion = terracottaPassthroughTestingVersion - galvanVersion = '1.1.1-pre1' + galvanVersion = '1.1.1-pre2' // Tools findbugsVersion = '3.0.1' From 3ab6e4344c66b334307a32eb56d1259dd9edf45b Mon Sep 17 00:00:00 2001 From: Abhilash Date: Fri, 16 Dec 2016 13:40:25 +0530 Subject: [PATCH 217/218] proper clean up when server store creation fails #1714 --- ...alidServerStoreConfigurationException.java | 4 +- .../ResourcePoolAllocationFailureTest.java | 110 ++++++++++++++++++ .../server/EhcacheStateServiceImpl.java | 17 +-- 3 files changed, 121 insertions(+), 10 deletions(-) create mode 100644 clustered/integration-test/src/test/java/org/ehcache/clustered/ResourcePoolAllocationFailureTest.java diff --git a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerStoreConfigurationException.java b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerStoreConfigurationException.java index caa513268c..31d4fe05cd 100644 --- a/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerStoreConfigurationException.java +++ b/clustered/common/src/main/java/org/ehcache/clustered/common/internal/exceptions/InvalidServerStoreConfigurationException.java @@ -26,8 +26,8 @@ public InvalidServerStoreConfigurationException(String message) { super(message); } - public InvalidServerStoreConfigurationException(Throwable cause) { - super(cause); + public InvalidServerStoreConfigurationException(String message, Throwable cause) { + super(message, cause); } private InvalidServerStoreConfigurationException(InvalidServerStoreConfigurationException cause) { diff --git a/clustered/integration-test/src/test/java/org/ehcache/clustered/ResourcePoolAllocationFailureTest.java b/clustered/integration-test/src/test/java/org/ehcache/clustered/ResourcePoolAllocationFailureTest.java new file mode 100644 index 0000000000..8d8604c9aa --- /dev/null +++ b/clustered/integration-test/src/test/java/org/ehcache/clustered/ResourcePoolAllocationFailureTest.java @@ -0,0 +1,110 @@ +/* + * Copyright Terracotta, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.ehcache.clustered; + +import org.ehcache.PersistentCacheManager; +import org.ehcache.clustered.client.config.ClusteredStoreConfiguration; +import org.ehcache.clustered.client.config.DedicatedClusteredResourcePool; +import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder; +import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder; +import org.ehcache.clustered.client.config.builders.ServerSideConfigurationBuilder; +import org.ehcache.clustered.common.Consistency; +import org.ehcache.clustered.common.internal.exceptions.InvalidServerStoreConfigurationException; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.terracotta.testing.rules.BasicExternalCluster; +import org.terracotta.testing.rules.Cluster; + +import java.io.File; +import java.util.Collections; + +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.startsWith; +import static org.junit.Assert.assertThat; +import static org.junit.Assert.fail; + +public class ResourcePoolAllocationFailureTest { + + private static final String RESOURCE_CONFIG = + "" + + "" + + "64" + + "" + + "\n"; + + @ClassRule + public static Cluster CLUSTER = + new BasicExternalCluster(new File("build/cluster"), 1, Collections.emptyList(), "", RESOURCE_CONFIG, ""); + + @BeforeClass + public static void waitForActive() throws Exception { + CLUSTER.getClusterControl().waitForActive(); + } + + @Test + public void testTooLowResourceException() throws InterruptedException { + + DedicatedClusteredResourcePool resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(10, MemoryUnit.KB); + CacheManagerBuilder cacheManagerBuilder = getPersistentCacheManagerCacheManagerBuilder(resourcePool); + + try { + cacheManagerBuilder.build(true); + fail("InvalidServerStoreConfigurationException expected"); + } catch (Exception e) { + e.printStackTrace(); + assertThat(getRootCause(e), instanceOf(InvalidServerStoreConfigurationException.class)); + assertThat(getRootCause(e).getMessage(), startsWith("Failed to create ServerStore")); + } + resourcePool = ClusteredResourcePoolBuilder.clusteredDedicated(100, MemoryUnit.KB); + cacheManagerBuilder = getPersistentCacheManagerCacheManagerBuilder(resourcePool); + PersistentCacheManager persistentCacheManager = cacheManagerBuilder.build(true); + + assertThat(persistentCacheManager, notNullValue()); + persistentCacheManager.close(); + + } + + private CacheManagerBuilder getPersistentCacheManagerCacheManagerBuilder(DedicatedClusteredResourcePool resourcePool) { + + ClusteringServiceConfigurationBuilder clusteringServiceConfigurationBuilder = ClusteringServiceConfigurationBuilder.cluster(CLUSTER.getConnectionURI().resolve("/crud-cm")); + ServerSideConfigurationBuilder serverSideConfigurationBuilder = clusteringServiceConfigurationBuilder.autoCreate() + .defaultServerResource("primary-server-resource"); + + return CacheManagerBuilder.newCacheManagerBuilder() + .with(serverSideConfigurationBuilder) + .withCache("test-cache", CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, + ResourcePoolsBuilder.newResourcePoolsBuilder() + .with(resourcePool) + ).add(new ClusteredStoreConfiguration(Consistency.EVENTUAL))); + } + + private static Throwable getRootCause(Throwable e) { + Throwable current = e; + while (current.getCause() != null) { + current = current.getCause(); + } + return current; + } + + +} diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java index dfa428902c..8b3789f505 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcacheStateServiceImpl.java @@ -20,7 +20,7 @@ import org.ehcache.clustered.common.PoolAllocation; import org.ehcache.clustered.common.ServerSideConfiguration; import org.ehcache.clustered.common.internal.ServerStoreConfiguration; -import org.ehcache.clustered.common.internal.exceptions.ClusterException; +import org.ehcache.clustered.common.internal.exceptions.*; import org.ehcache.clustered.server.repo.StateRepositoryManager; import org.ehcache.clustered.server.state.ClientMessageTracker; import org.ehcache.clustered.server.state.EhcacheStateService; @@ -28,12 +28,6 @@ import org.ehcache.clustered.server.state.ResourcePageSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.ehcache.clustered.common.internal.exceptions.IllegalMessageException; -import org.ehcache.clustered.common.internal.exceptions.InvalidServerSideConfigurationException; -import org.ehcache.clustered.common.internal.exceptions.InvalidStoreException; -import org.ehcache.clustered.common.internal.exceptions.InvalidStoreManagerException; -import org.ehcache.clustered.common.internal.exceptions.LifecycleException; -import org.ehcache.clustered.common.internal.exceptions.ResourceConfigurationException; import org.terracotta.context.TreeNode; import org.terracotta.offheapresource.OffHeapResource; import org.terracotta.offheapresource.OffHeapResourceIdentifier; @@ -385,8 +379,15 @@ public ServerStoreImpl createStore(String name, ServerStoreConfiguration serverS throw new InvalidStoreException("Clustered tier '" + name + "' already exists"); } + ServerStoreImpl serverStore; PageSource resourcePageSource = getPageSource(name, serverStoreConfiguration.getPoolAllocation()); - ServerStoreImpl serverStore = new ServerStoreImpl(serverStoreConfiguration, resourcePageSource, mapper); + try { + serverStore = new ServerStoreImpl(serverStoreConfiguration, resourcePageSource, mapper); + } catch (RuntimeException rte) { + releaseDedicatedPool(name, resourcePageSource); + throw new InvalidServerStoreConfigurationException("Failed to create ServerStore.", rte); + } + stores.put(name, serverStore); registerStoreStatistics(serverStore, name); From 88b833c464c42ba4e8d44065205e8eaa7639d5bc Mon Sep 17 00:00:00 2001 From: Louis Jacomet Date: Mon, 19 Dec 2016 15:22:53 +0100 Subject: [PATCH 218/218] :bug: Fix #1733 Throw exception on lifecycle or sync invoke error --- .../server/EhcachePassiveEntity.java | 27 +++++--- .../server/EhcachePassiveEntityTest.java | 62 ++++++++++++------- 2 files changed, 61 insertions(+), 28 deletions(-) diff --git a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java index ebcf40ba6b..d91468c8e4 100644 --- a/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java +++ b/clustered/server/src/main/java/org/ehcache/clustered/server/EhcachePassiveEntity.java @@ -78,13 +78,27 @@ public void invoke(EhcacheEntityMessage message) { EhcacheOperationMessage operationMessage = (EhcacheOperationMessage) message; EhcacheMessageType messageType = operationMessage.getMessageType(); if (isStoreOperationMessage(messageType)) { - invokeServerStoreOperation((ServerStoreOpMessage)message); + try { + invokeServerStoreOperation((ServerStoreOpMessage)message); + } catch (ClusterException e) { + // Store operation should not be critical enough to fail a passive + LOGGER.error("Unexpected exception raised during operation: " + message, e); + } } else if (isLifecycleMessage(messageType)) { invokeLifeCycleOperation((LifecycleMessage) message); } else if (isStateRepoOperationMessage(messageType)) { - ehcacheStateService.getStateRepositoryManager().invoke((StateRepositoryOpMessage)message); + try { + ehcacheStateService.getStateRepositoryManager().invoke((StateRepositoryOpMessage)message); + } catch (ClusterException e) { + // State repository operations should not be critical enough to fail a passive + LOGGER.error("Unexpected exception raised during operation: " + message, e); + } } else if (isPassiveReplicationMessage(messageType)) { - invokeRetirementMessages((PassiveReplicationMessage)message); + try { + invokeRetirementMessages((PassiveReplicationMessage)message); + } catch (ClusterException e) { + LOGGER.error("Unexpected exception raised during operation: " + message, e); + } } else { throw new AssertionError("Unsupported EhcacheOperationMessage: " + operationMessage.getMessageType()); } @@ -93,11 +107,10 @@ public void invoke(EhcacheEntityMessage message) { } else { throw new AssertionError("Unsupported EhcacheEntityMessage: " + message.getClass()); } - - } catch (Exception e) { - LOGGER.error("Unexpected exception raised during operation: " + message, e); + } catch (ClusterException e) { + // Reaching here means a lifecycle or sync operation failed + throw new IllegalStateException("A lifecycle or sync operation failed", e); } - } EhcachePassiveEntity(ServiceRegistry services, byte[] config, final KeySegmentMapper mapper) { diff --git a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java index ff902db19e..acdfbc67cc 100644 --- a/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java +++ b/clustered/server/src/test/java/org/ehcache/clustered/server/EhcachePassiveEntityTest.java @@ -143,11 +143,16 @@ public void testConfigureAfterConfigure() throws Exception { .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) .build())); - passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() - .defaultResource("defaultServerResource") - .sharedPool("primary-new", "serverResource1", 4, MemoryUnit.MEGABYTES) - .sharedPool("secondary-new", "serverResource2", 8, MemoryUnit.MEGABYTES) - .build())); + try { + passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() + .defaultResource("defaultServerResource") + .sharedPool("primary-new", "serverResource1", 4, MemoryUnit.MEGABYTES) + .sharedPool("secondary-new", "serverResource2", 8, MemoryUnit.MEGABYTES) + .build())); + fail("invocation should have triggered an exception"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), containsString("operation failed")); + } assertThat(registry.getStoreManagerService() .getSharedResourcePoolIds(), containsInAnyOrder("primary", "secondary")); @@ -173,11 +178,16 @@ public void testConfigureMissingPoolResource() throws Exception { final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); - passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() - .defaultResource("defaultServerResource") - .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) - .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) // missing on 'server' - .build())); + try { + passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() + .defaultResource("defaultServerResource") + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) // missing on 'server' + .build())); + fail("invocation should have triggered an exception"); + } catch (Exception e) { + assertThat(e.getMessage(), containsString("operation failed")); + } assertThat(registry.getStoreManagerService().getSharedResourcePoolIds(), is(Matchers.empty())); @@ -199,11 +209,16 @@ public void testConfigureMissingDefaultResource() throws Exception { final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); - passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() - .defaultResource("defaultServerResource") - .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) - .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) - .build())); + try { + passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() + .defaultResource("defaultServerResource") + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) + .build())); + fail("invocation should have triggered an exception"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), containsString("operation failed")); + } assertThat(registry.getStoreManagerService().getSharedResourcePoolIds(), is(Matchers.empty())); @@ -223,12 +238,17 @@ public void testConfigureLargeSharedPool() throws Exception { final EhcachePassiveEntity passiveEntity = new EhcachePassiveEntity(registry, ENTITY_ID, DEFAULT_MAPPER); - passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() - .defaultResource("defaultServerResource") - .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) - .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) - .sharedPool("tooBig", "serverResource2", 64, MemoryUnit.MEGABYTES) - .build())); + try { + passiveEntity.invoke(MESSAGE_FACTORY.configureStoreManager(new ServerSideConfigBuilder() + .defaultResource("defaultServerResource") + .sharedPool("primary", "serverResource1", 4, MemoryUnit.MEGABYTES) + .sharedPool("secondary", "serverResource2", 8, MemoryUnit.MEGABYTES) + .sharedPool("tooBig", "serverResource2", 64, MemoryUnit.MEGABYTES) + .build())); + fail("invocation should have triggered an exception"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), containsString("operation failed")); + } final Set poolIds = registry.getStoreManagerService().getSharedResourcePoolIds(); assertThat(poolIds, is(Matchers.empty()));