@Test public void testTemplateOverridingStoreByValue() throws Exception { cacheManager = cachingProvider.getCacheManager( getClass() .getResource("/org/ehcache/docs/ehcache-jsr107-template-override.xml") .toURI(), getClass().getClassLoader()); MutableConfiguration<Long, String> mutableConfiguration = new MutableConfiguration<Long, String>(); mutableConfiguration.setTypes(Long.class, String.class); Cache<Long, String> myCache = null; myCache = cacheManager.createCache("anyCache", mutableConfiguration); myCache.put(1L, "foo"); assertNotSame("foo", myCache.get(1L)); assertTrue(myCache.getConfiguration(Configuration.class).isStoreByValue()); myCache = cacheManager.createCache("byRefCache", mutableConfiguration); myCache.put(1L, "foo"); assertSame("foo", myCache.get(1L)); assertFalse(myCache.getConfiguration(Configuration.class).isStoreByValue()); myCache = cacheManager.createCache("weirdCache1", mutableConfiguration); myCache.put(1L, "foo"); assertNotSame("foo", myCache.get(1L)); assertTrue(myCache.getConfiguration(Configuration.class).isStoreByValue()); myCache = cacheManager.createCache("weirdCache2", mutableConfiguration); myCache.put(1L, "foo"); assertSame("foo", myCache.get(1L)); assertFalse(myCache.getConfiguration(Configuration.class).isStoreByValue()); }
@Test public void removeRecordWithEntryProcessor() { final int ENTRY_COUNT = 10; CachingProvider cachingProvider = HazelcastServerCachingProvider.createCachingProvider(node1); CacheManager cacheManager = cachingProvider.getCacheManager(); CompleteConfiguration<Integer, String> cacheConfig = new MutableConfiguration<Integer, String>().setTypes(Integer.class, String.class); ICache<Integer, String> cache = cacheManager.createCache("MyCache", cacheConfig).unwrap(ICache.class); for (int i = 0; i < ENTRY_COUNT; i++) { cache.put(i * 1000, "Value-" + (i * 1000)); } assertEquals(ENTRY_COUNT, cache.size()); for (int i = 0; i < ENTRY_COUNT; i++) { if (i % 2 == 0) { cache.invoke(i * 1000, new RemoveRecordEntryProcessor()); } } assertEquals(ENTRY_COUNT / 2, cache.size()); }
@Override public CacheManager getCacheManager(URI uri, ClassLoader classLoader, Properties properties) { URI globalUri = uri == null ? getDefaultURI() : uri; ClassLoader globalClassLoader = classLoader == null ? getDefaultClassLoader() : classLoader; Properties globalProperties = properties == null ? new Properties() : properties; synchronized (cacheManagers) { Map<URI, CacheManager> map = cacheManagers.get(globalClassLoader); if (map == null) { if (trace) log.tracef("No cache managers registered under '%s'", globalUri); map = new HashMap<URI, CacheManager>(); cacheManagers.put(globalClassLoader, map); } CacheManager cacheManager = map.get(globalUri); if (cacheManager == null || cacheManager.isClosed()) { // Not found or stopped, create cache manager and add to collection cacheManager = createCacheManager(globalClassLoader, globalUri, globalProperties); if (trace) log.tracef("Created '%s' cache manager", globalUri); map.put(globalUri, cacheManager); } return cacheManager; } }
@Test public void testLatestAccessCacheMergePolicy() { String cacheName = randomMapName(); Config config = newConfig(); HazelcastInstance h1 = Hazelcast.newHazelcastInstance(config); HazelcastInstance h2 = Hazelcast.newHazelcastInstance(config); TestMemberShipListener memberShipListener = new TestMemberShipListener(1); h2.getCluster().addMembershipListener(memberShipListener); TestLifeCycleListener lifeCycleListener = new TestLifeCycleListener(1); h2.getLifecycleService().addLifecycleListener(lifeCycleListener); closeConnectionBetween(h1, h2); assertOpenEventually(memberShipListener.latch); assertClusterSizeEventually(1, h1); assertClusterSizeEventually(1, h2); CachingProvider cachingProvider1 = HazelcastServerCachingProvider.createCachingProvider(h1); CachingProvider cachingProvider2 = HazelcastServerCachingProvider.createCachingProvider(h2); CacheManager cacheManager1 = cachingProvider1.getCacheManager(); CacheManager cacheManager2 = cachingProvider2.getCacheManager(); CacheConfig cacheConfig = newCacheConfig(cacheName, LatestAccessCacheMergePolicy.class.getName()); Cache cache1 = cacheManager1.createCache(cacheName, cacheConfig); Cache cache2 = cacheManager2.createCache(cacheName, cacheConfig); // TODO We assume that until here and also while doing get/put, cluster is still splitted. // This assumptions seems fragile due to time sensitivity. cache1.put("key1", "value"); assertEquals("value", cache1.get("key1")); // Access to record // Prevent updating at the same time sleepAtLeastMillis(1); cache2.put("key1", "LatestUpdatedValue"); assertEquals("LatestUpdatedValue", cache2.get("key1")); // Access to record cache2.put("key2", "value2"); assertEquals("value2", cache2.get("key2")); // Access to record // Prevent updating at the same time sleepAtLeastMillis(1); cache1.put("key2", "LatestUpdatedValue2"); assertEquals("LatestUpdatedValue2", cache1.get("key2")); // Access to record assertOpenEventually(lifeCycleListener.latch); assertClusterSizeEventually(2, h1); assertClusterSizeEventually(2, h2); Cache cacheTest = cacheManager1.getCache(cacheName); assertEquals("LatestUpdatedValue", cacheTest.get("key1")); assertEquals("LatestUpdatedValue2", cacheTest.get("key2")); }
@Setup public void setup(TestContext testContext) { HazelcastInstance hazelcastInstance = testContext.getTargetInstance(); resultsPerWorker = hazelcastInstance.getList(basename + ":ResultMap"); CacheManager cacheManager = createCacheManager(hazelcastInstance); cache = cacheManager.getCache(basename); }
@Test public void testWithoutEhcacheExplicitDependencyAndNoCodeChanges() throws Exception { CacheManager manager = cachingProvider.getCacheManager( getClass() .getResource("/org/ehcache/docs/ehcache-jsr107-template-override.xml") .toURI(), getClass().getClassLoader()); // tag::jsr107SupplementWithTemplatesExample[] MutableConfiguration<Long, String> mutableConfiguration = new MutableConfiguration<Long, String>(); mutableConfiguration.setTypes(Long.class, String.class); // <1> Cache<Long, String> anyCache = manager.createCache("anyCache", mutableConfiguration); // <2> CacheRuntimeConfiguration<Long, String> ehcacheConfig = (CacheRuntimeConfiguration<Long, String>) anyCache .getConfiguration(Eh107Configuration.class) .unwrap(CacheRuntimeConfiguration.class); // <3> ehcacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(); // <4> Cache<Long, String> anotherCache = manager.createCache("byRefCache", mutableConfiguration); assertFalse(anotherCache.getConfiguration(Configuration.class).isStoreByValue()); // <5> MutableConfiguration<String, String> otherConfiguration = new MutableConfiguration<String, String>(); otherConfiguration.setTypes(String.class, String.class); otherConfiguration.setExpiryPolicyFactory( CreatedExpiryPolicy.factoryOf(Duration.ONE_MINUTE)); // <6> Cache<String, String> foosCache = manager.createCache("foos", otherConfiguration); // <7> CacheRuntimeConfiguration<Long, String> foosEhcacheConfig = (CacheRuntimeConfiguration<Long, String>) foosCache .getConfiguration(Eh107Configuration.class) .unwrap(CacheRuntimeConfiguration.class); foosEhcacheConfig.getExpiry().getExpiryForCreation(42L, "Answer!").getAmount(); // <8> CompleteConfiguration<String, String> foosConfig = foosCache.getConfiguration(CompleteConfiguration.class); try { final Factory<ExpiryPolicy> expiryPolicyFactory = foosConfig.getExpiryPolicyFactory(); ExpiryPolicy expiryPolicy = expiryPolicyFactory.create(); // <9> throw new AssertionError("Expected UnsupportedOperationException"); } catch (UnsupportedOperationException e) { // Expected } // end::jsr107SupplementWithTemplatesExample[] assertThat( ehcacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize(), is(20L)); assertThat( foosEhcacheConfig.getExpiry().getExpiryForCreation(42L, "Answer!"), is(new org.ehcache.expiry.Duration(2, TimeUnit.MINUTES))); }
// Issue https://github.com/hazelcast/hazelcast/issues/5865 @Test public void testCompletionTestByPuttingAndRemovingFromDifferentNodes() throws InterruptedException { String cacheName = "simpleCache"; CacheManager cacheManager1 = cachingProvider1.getCacheManager(); CacheManager cacheManager2 = cachingProvider2.getCacheManager(); CacheConfig<Integer, String> config = new CacheConfig<Integer, String>(); final SimpleEntryListener<Integer, String> listener = new SimpleEntryListener<Integer, String>(); MutableCacheEntryListenerConfiguration<Integer, String> listenerConfiguration = new MutableCacheEntryListenerConfiguration<Integer, String>( FactoryBuilder.factoryOf(listener), null, true, true); config.addCacheEntryListenerConfiguration(listenerConfiguration); Cache<Integer, String> cache1 = cacheManager1.createCache(cacheName, config); Cache<Integer, String> cache2 = cacheManager2.getCache(cacheName); assertNotNull(cache1); assertNotNull(cache2); Integer key1 = 1; String value1 = "value1"; cache1.put(key1, value1); assertTrueEventually( new AssertTask() { @Override public void run() throws Exception { assertEquals(1, listener.created.get()); } }); Integer key2 = 2; String value2 = "value2"; cache1.put(key2, value2); assertTrueEventually( new AssertTask() { @Override public void run() throws Exception { assertEquals(2, listener.created.get()); } }); Set<Integer> keys = new HashSet<Integer>(); keys.add(key1); keys.add(key2); cache2.removeAll(keys); assertTrueEventually( new AssertTask() { @Override public void run() throws Exception { assertEquals(2, listener.removed.get()); } }); }
@Bean public CacheManager jCacheManager() { CacheManager cacheManager = this.cachingProvider.getCacheManager(); MutableConfiguration<Object, Object> mutableConfiguration = new MutableConfiguration<>(); mutableConfiguration.setStoreByValue(false); // otherwise value has to be Serializable cacheManager.createCache("testCache", mutableConfiguration); cacheManager.createCache("primary", mutableConfiguration); cacheManager.createCache("secondary", mutableConfiguration); return cacheManager; }
@Bean public JCacheCacheManager cacheManager() { System.out.println("hoge"); CacheManager cacheManager = Caching.getCachingProvider().getCacheManager(); MutableConfiguration<Object, Domain> configuration = new MutableConfiguration<>(); configuration.setExpiryPolicyFactory(new SampleExpiryFactory()); configuration.setStoreByValue(false); cacheManager.createCache("domainCache", configuration); return new JCacheCacheManager(cacheManager); }
public static Cache createCacheWithProperties( CachingProvider provider, Class invoker, String cacheName, Properties properties) { CacheManager manager = provider.getCacheManager( URI.create(invoker.getName()), new TestClassLoader(Thread.currentThread().getContextClassLoader()), properties); properties.setProperty("infinispan.jcache.remote.managed_access", "false"); return manager.createCache(cacheName, new MutableConfiguration()); }
@Test public void testCacheLoader() { final CacheLoader<String, Integer> cacheLoader = new CacheLoader<String, Integer>() { @Override public Integer load(String key) throws CacheLoaderException { return Integer.valueOf(key); } @Override public Map<String, Integer> loadAll(Iterable<? extends String> keys) throws CacheLoaderException { Map<String, Integer> map = new HashMap<>(); for (String key : keys) { map.put(key, Integer.valueOf(key)); } return map; } }; try (CachingProvider cachingProvider = Caching.getCachingProvider(GuavaCachingProvider.class.getName())) { CacheManager cacheManager = cachingProvider.getCacheManager(); MutableConfiguration<String, Integer> custom = new MutableConfiguration<>(); custom.setStoreByValue(false); custom.setTypes(String.class, Integer.class); custom.setReadThrough(true); custom.setCacheLoaderFactory( new Factory<CacheLoader<String, Integer>>() { @Override public CacheLoader<String, Integer> create() { return cacheLoader; } }); Cache<String, Integer> loadingCache = cacheManager.createCache("loadingCache", custom); assertEquals(Integer.valueOf(1), loadingCache.get("1")); assertEquals(Integer.valueOf(2), loadingCache.get("2")); assertEquals(Integer.valueOf(3), loadingCache.get("3")); Set<String> keys = Sets.newHashSet("4", "5", "6"); Map<String, Integer> map = loadingCache.getAll(keys); assertEquals(3, map.size()); assertEquals(Integer.valueOf(4), map.get("4")); assertEquals(Integer.valueOf(5), map.get("5")); assertEquals(Integer.valueOf(6), map.get("6")); } }
/** {@inheritDoc} */ @Override public synchronized void close() { WeakHashMap<ClassLoader, HashMap<URI, CacheManager>> managersByClassLoader = this.cacheManagersByClassLoader; this.cacheManagersByClassLoader = new WeakHashMap<ClassLoader, HashMap<URI, CacheManager>>(); for (ClassLoader classLoader : managersByClassLoader.keySet()) { for (CacheManager cacheManager : managersByClassLoader.get(classLoader).values()) { cacheManager.close(); } } }
private void executeEntryProcessor( Integer key, EntryProcessor<Integer, String, Void> entryProcessor, String cacheName) { CachingProvider cachingProvider = HazelcastServerCachingProvider.createCachingProvider(node1); CacheManager cacheManager = cachingProvider.getCacheManager(); CompleteConfiguration<Integer, String> config = new MutableConfiguration<Integer, String>().setTypes(Integer.class, String.class); Cache<Integer, String> cache = cacheManager.createCache(cacheName, config); cache.invoke(key, entryProcessor); }
/** {@inheritDoc} */ @Override public synchronized void close(ClassLoader classLoader) { ClassLoader managerClassLoader = classLoader == null ? getDefaultClassLoader() : classLoader; HashMap<URI, CacheManager> cacheManagersByURI = cacheManagersByClassLoader.remove(managerClassLoader); if (cacheManagersByURI != null) { for (CacheManager cacheManager : cacheManagersByURI.values()) { cacheManager.close(); } } }
@Test public void testWithoutEhcacheExplicitDependencyCanSpecifyXML() throws Exception { // tag::jsr107UsingXMLConfigExample[] CachingProvider cachingProvider = Caching.getCachingProvider(); CacheManager manager = cachingProvider.getCacheManager( // <1> getClass().getResource("/org/ehcache/docs/ehcache-jsr107-config.xml").toURI(), // <2> getClass().getClassLoader()); // <3> Cache<Long, Product> readyCache = manager.getCache("ready-cache", Long.class, Product.class); // <4> // end::jsr107UsingXMLConfigExample[] assertThat(readyCache, notNullValue()); }
@Test public void testJCacheGettingStarted() { try (CachingProvider provider = Caching.getCachingProvider(); CacheManager manager = provider.getCacheManager(); Cache<String, String> cache = manager.createCache("testCache", new MutableConfiguration<>())) { cache.put("key", "value"); assertThat(cache.get("key")).isEqualTo("value"); cache.remove("key"); assertThat(cache.get("key")).isNull(); } }
@Override public void close() { flush(); cacheEntryList.clear(); if (!cache.isDestroyed() && !manager.isClosed()) { cache.close(); } if (!manager.isClosed()) { manager.close(); } hazelcastInstance.shutdown(); persistentDataStore.close(); LOG.info("JCache Gora datastore destroyed successfully."); }
@Test public void test_CacheReplicationOperation_serialization() throws Exception { TestHazelcastInstanceFactory factory = new TestHazelcastInstanceFactory(1); HazelcastInstance hazelcastInstance = factory.newHazelcastInstance(); try { CachingProvider provider = HazelcastServerCachingProvider.createCachingProvider(hazelcastInstance); CacheManager manager = provider.getCacheManager(); CompleteConfiguration configuration = new MutableConfiguration(); Cache cache1 = manager.createCache("cache1", configuration); Cache cache2 = manager.createCache("cache2", configuration); Cache cache3 = manager.createCache("cache3", configuration); for (int i = 0; i < 1000; i++) { cache1.put("key" + i, i); cache2.put("key" + i, i); cache3.put("key" + i, i); } HazelcastInstanceProxy proxy = (HazelcastInstanceProxy) hazelcastInstance; Field original = HazelcastInstanceProxy.class.getDeclaredField("original"); original.setAccessible(true); HazelcastInstanceImpl impl = (HazelcastInstanceImpl) original.get(proxy); NodeEngineImpl nodeEngine = impl.node.nodeEngine; CacheService cacheService = nodeEngine.getService(CacheService.SERVICE_NAME); int partitionCount = nodeEngine.getPartitionService().getPartitionCount(); for (int partitionId = 0; partitionId < partitionCount; partitionId++) { CachePartitionSegment segment = cacheService.getSegment(partitionId); CacheReplicationOperation operation = new CacheReplicationOperation(segment, 1); Data serialized = service.toData(operation); try { service.toObject(serialized); } catch (Exception e) { throw new Exception("Partition: " + partitionId, e); } } } finally { factory.shutdownAll(); } }
@Test public void testUsingEhcacheConfiguration() throws Exception { // tag::ehcacheBasedConfigurationExample[] CacheConfiguration<Long, String> cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder() .buildConfig(Long.class, String.class); // <1> Cache<Long, String> cache = cacheManager.createCache( "myCache", Eh107Configuration.fromEhcacheCacheConfiguration(cacheConfiguration)); // <2> Eh107Configuration<Long, String> configuration = cache.getConfiguration(Eh107Configuration.class); configuration.unwrap(CacheConfiguration.class); // <3> configuration.unwrap(CacheRuntimeConfiguration.class); // <4> try { cache.getConfiguration(CompleteConfiguration.class); // <5> throw new AssertionError("IllegalArgumentException expected"); } catch (IllegalArgumentException iaex) { // Expected } // end::ehcacheBasedConfigurationExample[] }
static { // Initialize the cache try { CacheFactory cacheFactory = CacheManager.getInstance().getCacheFactory(); cache = cacheFactory.createCache(Collections.emptyMap()); } catch (CacheException e) { log.severe("Exception creating cache: " + e.getMessage() + ": " + e.getStackTrace()); cache = null; } // Read important properties from file Properties properties = new Properties(); try { properties.load(new FileInputStream("WEB-INF/project.properties")); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } CLIENT_ID = properties.getProperty("clientId"); CLIENT_SECRET = properties.getProperty("clientSecret"); REDIRECTION_ENDPOINT = properties.getProperty("redirectionEndpoint"); AUTHORIZATION_ENDPOINT = properties.getProperty("authorizationEndpoint"); TOKEN_ENDPOINT = properties.getProperty("tokenEndpoint"); FACEBOOK_API_ENDPOINT = "https://graph.facebook.com/v2.5/"; MAX_NUMBER_OF_FACEBOOK_POSTS_TO_REQUEST = 200; FACEBOOK_REQUESTED_PROFILE_FIELDS = "id,birthday,hometown,name,website,work"; FACEBOOK_REQUESTED_FEED_FIELDS = "id,name,type,message,status_type,created_time,from,likes%7Bid,name%7D"; }
@Override public void close(URI uri, ClassLoader classLoader) { synchronized (cacheManagers) { if (uri != null) { Map<URI, CacheManager> map = cacheManagers.get(classLoader); if (map != null) { CacheManager cacheManager = map.remove(uri); if (map.isEmpty()) cacheManagers.remove(classLoader); if (cacheManager != null) cacheManager.close(); } } else { Map<URI, CacheManager> cacheManagersToClose = cacheManagers.remove(classLoader); if (cacheManagersToClose != null) close(cacheManagersToClose); } } }
@Test public void testMultiClusterMultipleClients() throws MalformedURLException, URISyntaxException { final String cacheName = "test"; final String key1 = "key1"; final String valuecm1 = "Value-is-cm1"; final String valuecm2 = "Value-is-cm2"; final HazelcastClientCachingProvider cachingProvider = new HazelcastClientCachingProvider(); final CacheManager cm1 = cachingProvider.getCacheManager(uri1, null); final CacheManager cm2 = cachingProvider.getCacheManager(uri2, null); final CacheConfig<String, String> cacheConfig = new CacheConfig<String, String>(); final Cache<String, String> cache1 = cm1.createCache(cacheName, cacheConfig); final Cache<String, String> cache2 = cm2.createCache(cacheName, cacheConfig); cache1.put(key1, valuecm1); cache2.put(key1, valuecm2); assertEquals(valuecm1, cache1.get(key1)); assertEquals(valuecm2, cache2.get(key1)); cachingProvider.close(uri1, null); cachingProvider.close(uri2, null); // cm1.close(); // cm2.close(); final CacheManager cm11 = cachingProvider.getCacheManager(uri1, null); final Cache<String, String> cache11 = cm11.getCache(cacheName); assertEquals(valuecm1, cache11.get(key1)); cm11.close(); }
@Test public void testCustomCacheMergePolicy() { String cacheName = randomMapName(); Config config = newConfig(); HazelcastInstance h1 = Hazelcast.newHazelcastInstance(config); HazelcastInstance h2 = Hazelcast.newHazelcastInstance(config); TestMemberShipListener memberShipListener = new TestMemberShipListener(1); h2.getCluster().addMembershipListener(memberShipListener); TestLifeCycleListener lifeCycleListener = new TestLifeCycleListener(1); h2.getLifecycleService().addLifecycleListener(lifeCycleListener); closeConnectionBetween(h1, h2); assertOpenEventually(memberShipListener.latch); assertClusterSizeEventually(1, h1); assertClusterSizeEventually(1, h2); CachingProvider cachingProvider1 = HazelcastServerCachingProvider.createCachingProvider(h1); CachingProvider cachingProvider2 = HazelcastServerCachingProvider.createCachingProvider(h2); CacheManager cacheManager1 = cachingProvider1.getCacheManager(); CacheManager cacheManager2 = cachingProvider2.getCacheManager(); CacheConfig cacheConfig = newCacheConfig(cacheName, CustomCacheMergePolicy.class.getName()); Cache cache1 = cacheManager1.createCache(cacheName, cacheConfig); Cache cache2 = cacheManager2.createCache(cacheName, cacheConfig); // TODO We assume that until here and also while doing get/put, cluster is still splitted. // This assumptions seems fragile due to time sensitivity. String key = generateKeyOwnedBy(h1); cache1.put(key, "value"); cache2.put(key, Integer.valueOf(1)); assertOpenEventually(lifeCycleListener.latch); assertClusterSizeEventually(2, h1); assertClusterSizeEventually(2, h2); Cache cacheTest = cacheManager2.getCache(cacheName); assertNotNull(cacheTest.get(key)); assertTrue(cacheTest.get(key) instanceof Integer); }
@After public void tearDown() throws Exception { if (cacheManager != null) { cacheManager.close(); } if (cachingProvider != null) { cachingProvider.close(); } }
@Override public void deleteSchema() { cache.removeAll(); manager.destroyCache(super.getPersistentClass().getSimpleName()); persistentDataStore.deleteSchema(); LOG.info( "Deleted schema on persistent store and destroyed cache for persistent bean {}.", super.getPersistentClass().getSimpleName()); }
public CacheService() { try { cache = CacheManager.getInstance().getCacheFactory().createCache(Collections.emptyMap()); } catch (CacheException e) { LOGGER.error("Can't init cache manager. " + e.getMessage()); } localCache = new HashMap<String, Object>(); localCacheTime = System.currentTimeMillis(); }
@Test public void testCachesDestroyFromOtherManagers() { CacheManager cacheManager = cachingProvider1.getCacheManager(); CacheManager cacheManager2 = cachingProvider2.getCacheManager(); MutableConfiguration configuration = new MutableConfiguration(); final Cache c1 = cacheManager.createCache("c1", configuration); final Cache c2 = cacheManager2.createCache("c2", configuration); c1.put("key", "value"); c2.put("key", "value"); cacheManager.close(); assertTrueAllTheTime( new AssertTask() { @Override public void run() throws Exception { c2.get("key"); } }, 10); }
/** {@inheritDoc} */ @Override public synchronized void close(URI uri, ClassLoader classLoader) { URI managerURI = uri == null ? getDefaultURI() : uri; ClassLoader managerClassLoader = classLoader == null ? getDefaultClassLoader() : classLoader; HashMap<URI, CacheManager> cacheManagersByURI = cacheManagersByClassLoader.get(managerClassLoader); if (cacheManagersByURI != null) { CacheManager cacheManager = cacheManagersByURI.remove(managerURI); if (cacheManager != null) { cacheManager.close(); } if (cacheManagersByURI.size() == 0) { cacheManagersByClassLoader.remove(managerClassLoader); } } }
@Test public void testPassThroughCacheMergePolicy() { String cacheName = randomMapName(); Config config = newConfig(); HazelcastInstance h1 = Hazelcast.newHazelcastInstance(config); HazelcastInstance h2 = Hazelcast.newHazelcastInstance(config); TestMemberShipListener memberShipListener = new TestMemberShipListener(1); h2.getCluster().addMembershipListener(memberShipListener); TestLifeCycleListener lifeCycleListener = new TestLifeCycleListener(1); h2.getLifecycleService().addLifecycleListener(lifeCycleListener); closeConnectionBetween(h1, h2); assertOpenEventually(memberShipListener.latch); assertClusterSizeEventually(1, h1); assertClusterSizeEventually(1, h2); CachingProvider cachingProvider1 = HazelcastServerCachingProvider.createCachingProvider(h1); CachingProvider cachingProvider2 = HazelcastServerCachingProvider.createCachingProvider(h2); CacheManager cacheManager1 = cachingProvider1.getCacheManager(); CacheManager cacheManager2 = cachingProvider2.getCacheManager(); CacheConfig cacheConfig = newCacheConfig(cacheName, PassThroughCacheMergePolicy.class.getName()); Cache cache1 = cacheManager1.createCache(cacheName, cacheConfig); Cache cache2 = cacheManager2.createCache(cacheName, cacheConfig); String key = generateKeyOwnedBy(h1); cache1.put(key, "value"); cache2.put(key, "passThroughValue"); assertOpenEventually(lifeCycleListener.latch); assertClusterSizeEventually(2, h1); assertClusterSizeEventually(2, h2); Cache cacheTest = cacheManager2.getCache(cacheName); assertEquals("passThroughValue", cacheTest.get(key)); }
@Override public void createSchema() { if (manager.getCache(super.getPersistentClass().getSimpleName(), keyClass, persistentClass) == null) { cacheEntryList.clear(); cache = manager.createCache(persistentClass.getSimpleName(), cacheConfig).unwrap(ICache.class); } cache.registerCacheEntryListener( new MutableCacheEntryListenerConfiguration<>( JCacheCacheFactoryBuilder.factoryOfEntryListener( new JCacheCacheEntryListener<K, T>(cacheEntryList)), null, true, true)); persistentDataStore.createSchema(); LOG.info( "Created schema on persistent store and initialized cache for persistent bean {}.", super.getPersistentClass().getSimpleName()); }