@Test public void testRemovedEntry_shouldNotBeReached_afterMigration() throws Exception { String mapName = randomMapName(); TestHazelcastInstanceFactory factory = new TestHazelcastInstanceFactory(2); MapStoreTest.SimpleMapStore<Integer, Integer> store = new MapStoreTest.SimpleMapStore<Integer, Integer>(); store.store.put(1, 0); Config config = createConfig(mapName, store); HazelcastInstance node1 = factory.newHazelcastInstance(config); IMap<Integer, Integer> map = node1.getMap(mapName); map.put(1, 1); map.delete(1); HazelcastInstance node2 = factory.newHazelcastInstance(config); map = node2.getMap(mapName); Integer value = map.get(1); factory.shutdownAll(); assertNull(value); }
@Test(timeout = 30000) public void testPartitionPostpone() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); final HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); assertClusterSizeEventually(3, h1); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); KeyValueSource<Integer, Integer> kvs = KeyValueSource.fromMap(m1); KeyValueSource<Integer, Integer> wrapper = new MapKeyValueSourceAdapter<Integer, Integer>(kvs); Job<Integer, Integer> job = tracker.newJob(wrapper); ICompletableFuture<Map<String, List<Integer>>> future = job.mapper(new TestMapper()).submit(); Map<String, List<Integer>> result = future.get(); assertEquals(100, result.size()); for (List<Integer> value : result.values()) { assertEquals(1, value.size()); } }
@Test(expected = HazelcastInstanceNotActiveException.class) public void testShutDownNodeWhenOtherWaitingOnConditionAwait() throws InterruptedException { final TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2); final HazelcastInstance instance = nodeFactory.newHazelcastInstance(new Config()); nodeFactory.newHazelcastInstance(new Config()); final String name = "testShutDownNodeWhenOtherWaitingOnConditionAwait"; final ILock lock = instance.getLock(name); final ICondition condition = lock.newCondition("s"); final CountDownLatch latch = new CountDownLatch(1); new Thread( new Runnable() { public void run() { try { latch.await(1, TimeUnit.MINUTES); Thread.sleep(5000); } catch (InterruptedException e) { e.printStackTrace(); } instance.getLifecycleService().shutdown(); } }) .start(); lock.lock(); try { latch.countDown(); condition.await(); } catch (InterruptedException e) { } lock.unlock(); }
@Test(timeout = 30000) public void testDataSerializableIntermediateObject() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker jobTracker = h1.getJobTracker("default"); Job<Integer, Integer> job = jobTracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Integer> future = job.mapper(new TestMapper()) .combiner(new DataSerializableIntermediateCombinerFactory()) .reducer(new DataSerializableIntermediateReducerFactory()) .submit(new DataSerializableIntermediateCollator()); // Precalculate result int expectedResult = 0; for (int i = 0; i < 100; i++) { expectedResult += i; } expectedResult = (int) ((double) expectedResult / 100); assertEquals(expectedResult, (int) future.get()); }
@Test public void test_whenMemberAdded() { TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(); HazelcastInstance hz = factory.newHazelcastInstance(); InternalPartitionServiceImpl partitionService = getNode(hz).partitionService; final int partitionCount = partitionService.getPartitionCount(); warmUpPartitionsAndDrainEvents(hz, partitionCount); final AtomicInteger count = addEventCountingPartitionListener(partitionService); factory.newHazelcastInstance(); assertClusterSizeEventually(2, hz); assertTrueEventually( new AssertTask() { @Override public void run() throws Exception { int currentCount = count.get(); assertTrue( "Expecting events equal or greater than partition-count! Count: " + currentCount, currentCount >= partitionCount); } }); }
@Test(timeout = 30000) public void testMapperReducerCollator() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Integer> future = job.mapper(new GroupingTestMapper()) .reducer(new TestReducerFactory()) .submit(new TestCollator()); int result = future.get(); // Precalculate result int expectedResult = 0; for (int i = 0; i < 100; i++) { expectedResult += i; } for (int i = 0; i < 4; i++) { assertEquals(expectedResult, result); } }
@Test(timeout = 30000) public void testMapperReducer() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Map<String, Integer>> future = job.mapper(new GroupingTestMapper()).reducer(new TestReducerFactory()).submit(); Map<String, Integer> result = future.get(); // Precalculate results int[] expectedResults = new int[4]; for (int i = 0; i < 100; i++) { int index = i % 4; expectedResults[index] += i; } for (int i = 0; i < 4; i++) { assertEquals(expectedResults[i], (int) result.get(String.valueOf(i))); } }
@Test(timeout = 100000) public void testKeyOwnerDies() throws Exception { final TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); final Config config = new Config(); final HazelcastInstance keyOwner = nodeFactory.newHazelcastInstance(config); final HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); final HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); warmUpPartitions(keyOwner, instance1, instance2); final String key = generateKeyOwnedBy(keyOwner); final ILock lock1 = instance1.getLock(key); lock1.lock(); final CountDownLatch latch = new CountDownLatch(1); new Thread( new Runnable() { public void run() { final ILock lock = instance2.getLock(key); lock.lock(); latch.countDown(); } }) .start(); Thread.sleep(1000); keyOwner.getLifecycleService().shutdown(); Assert.assertTrue(lock1.isLocked()); Assert.assertTrue(lock1.isLockedByCurrentThread()); Assert.assertTrue(lock1.tryLock()); lock1.unlock(); lock1.unlock(); Assert.assertTrue(latch.await(10, TimeUnit.SECONDS)); }
@Test public void test_rollingRestart() { final TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(); final int nodeCount = 3; final HazelcastInstance[] instances = new HazelcastInstance[nodeCount]; instances[0] = factory.newHazelcastInstance(); if (partitionAssignmentType == PartitionAssignmentType.DURING_STARTUP) { warmUpPartitions(instances[0]); } for (int i = 1; i < nodeCount; i++) { instances[i] = factory.newHazelcastInstance(); } if (partitionAssignmentType == PartitionAssignmentType.AT_THE_END) { warmUpPartitions(instances); } changeClusterStateEventually(instances[0], clusterState); Address address = getNode(instances[0]).getThisAddress(); instances[0].shutdown(); instances[0] = factory.newHazelcastInstance(address); for (HazelcastInstance instance : instances) { assertClusterSizeEventually(nodeCount, instance); assertEquals(clusterState, instance.getCluster().getClusterState()); } changeClusterStateEventually(instances[0], ClusterState.ACTIVE); }
@Test(timeout = 30000, expected = CancellationException.class) public void testInProcessCancellation() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); final HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); assertClusterSizeEventually(3, h1); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Map<String, List<Integer>>> future = job.mapper(new TimeConsumingMapper()).submit(); future.cancel(true); try { Map<String, List<Integer>> result = future.get(); fail(); } catch (Exception e) { e.printStackTrace(); throw e; } }
@Test(timeout = 60000) public void testMapReduceWithCustomKeyValueSource() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); final HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); assertClusterSizeEventually(3, h1); assertClusterSizeEventually(3, h2); assertClusterSizeEventually(3, h3); JobTracker jobTracker = h1.getJobTracker("default"); Job<String, Integer> job = jobTracker.newJob(new CustomKeyValueSource()); ICompletableFuture<Map<String, Integer>> completableFuture = job.chunkSize(10) .mapper(new CustomMapper()) .combiner(new CustomCombinerFactory()) .reducer(new CustomReducerFactory()) .submit(); Map<String, Integer> result = completableFuture.get(); assertEquals(1000, result.size()); List<Map.Entry<String, Integer>> entrySet = new ArrayList(result.entrySet()); Collections.sort(entrySet, ENTRYSET_COMPARATOR); int count = 0; for (Map.Entry<String, Integer> entry : entrySet) { assertEquals(String.valueOf(count), entry.getKey()); assertEquals(count++ * 6, (int) entry.getValue()); } }
@Test(timeout = 30000) public void testNullFromObjectCombiner() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker jobTracker = h1.getJobTracker("default"); Job<Integer, Integer> job = jobTracker.newJob(KeyValueSource.fromMap(m1)); JobCompletableFuture<Map<String, BigInteger>> future = job.chunkSize(1) .mapper(new GroupingTestMapper()) .combiner(new ObjectCombinerFactory()) .reducer(new ObjectReducerFactory()) .submit(); int[] expectedResults = new int[4]; for (int i = 0; i < 100; i++) { int index = i % 4; expectedResults[index] += i; } Map<String, BigInteger> map = future.get(); for (int i = 0; i < 4; i++) { assertEquals(BigInteger.valueOf(expectedResults[i]), map.get(String.valueOf(i))); } }
@Test(timeout = 120000) public void testIssue1142ExceptionWhenLoadAllReturnsNull() { Config config = getConfig(); String mapname = "testIssue1142ExceptionWhenLoadAllReturnsNull"; MapStoreConfig mapStoreConfig = new MapStoreConfig(); mapStoreConfig.setImplementation( new MapStoreAdapter<String, String>() { @Override public Set<String> loadAllKeys() { Set keys = new HashSet(); keys.add("key"); return keys; } public Map loadAll(Collection keys) { return null; } }); config.getMapConfig(mapname).setMapStoreConfig(mapStoreConfig); TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2); HazelcastInstance instance = nodeFactory.newHazelcastInstance(config); HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); final IMap map = instance.getMap(mapname); for (int i = 0; i < 300; i++) { map.put(i, i); } assertEquals(300, map.size()); }
@Test public void testLockConditionSignalAllShutDownKeyOwner() throws InterruptedException { final TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2); final Config config = new Config(); final String name = "testLockConditionSignalAllShutDownKeyOwner"; final HazelcastInstance instance = nodeFactory.newHazelcastInstance(config); final AtomicInteger count = new AtomicInteger(0); final int size = 50; int k = 0; final HazelcastInstance keyOwner = nodeFactory.newHazelcastInstance(config); while (!keyOwner .getCluster() .getLocalMember() .equals(instance.getPartitionService().getPartition(++k).getOwner())) { Thread.sleep(10); } final ILock lock = instance.getLock(k); final ICondition condition = lock.newCondition(name); final CountDownLatch awaitLatch = new CountDownLatch(size); final CountDownLatch finalLatch = new CountDownLatch(size); for (int i = 0; i < size; i++) { new Thread( new Runnable() { public void run() { lock.lock(); try { awaitLatch.countDown(); condition.await(); Thread.sleep(5); if (lock.isLockedByCurrentThread()) { count.incrementAndGet(); } } catch (InterruptedException ignored) { } finally { lock.unlock(); finalLatch.countDown(); } } }) .start(); } final ILock lock1 = keyOwner.getLock(k); final ICondition condition1 = lock1.newCondition(name); awaitLatch.await(1, TimeUnit.MINUTES); lock1.lock(); condition1.signalAll(); lock1.unlock(); keyOwner.getLifecycleService().shutdown(); finalLatch.await(2, TimeUnit.MINUTES); Assert.assertEquals(size, count.get()); }
@Test(timeout = 30000) public void testAsyncMapperReducer() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } final Map<String, Integer> listenerResults = new HashMap<String, Integer>(); final Semaphore semaphore = new Semaphore(1); semaphore.acquire(); JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Map<String, Integer>> future = job.mapper(new GroupingTestMapper()) .reducer(new TestReducerFactory()) // .submit(); future.andThen( new ExecutionCallback<Map<String, Integer>>() { @Override public void onResponse(Map<String, Integer> response) { try { listenerResults.putAll(response); } finally { semaphore.release(); } } @Override public void onFailure(Throwable t) { semaphore.release(); } }); // Precalculate results int[] expectedResults = new int[4]; for (int i = 0; i < 100; i++) { int index = i % 4; expectedResults[index] += i; } semaphore.acquire(); for (int i = 0; i < 4; i++) { assertEquals(expectedResults[i], (int) listenerResults.get(String.valueOf(i))); } }
@Test(timeout = 30000) public void testAsyncMapperReducerCollator() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } final int[] result = new int[1]; final Semaphore semaphore = new Semaphore(1); semaphore.acquire(); JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Integer> future = job.mapper(new GroupingTestMapper()) .reducer(new TestReducerFactory()) .submit(new TestCollator()); future.andThen( new ExecutionCallback<Integer>() { @Override public void onResponse(Integer response) { try { result[0] = response.intValue(); } finally { semaphore.release(); } } @Override public void onFailure(Throwable t) { semaphore.release(); } }); // Precalculate result int expectedResult = 0; for (int i = 0; i < 100; i++) { expectedResult += i; } semaphore.acquire(); for (int i = 0; i < 4; i++) { assertEquals(expectedResult, result[0]); } }
@Test public void testMapRecordIdleEvictionOnMigration() throws InterruptedException { Config cfg = new Config(); final String name = "testMapRecordIdleEvictionOnMigration"; MapConfig mc = cfg.getMapConfig(name); int maxIdleSeconds = 10; int size = 100; final int nsize = size / 5; mc.setMaxIdleSeconds(maxIdleSeconds); TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(3); HazelcastInstance instance1 = factory.newHazelcastInstance(cfg); final IMap map = instance1.getMap(name); final CountDownLatch latch = new CountDownLatch(size - nsize); map.addEntryListener( new EntryAdapter() { public void entryEvicted(EntryEvent event) { latch.countDown(); } }, false); for (int i = 0; i < size; i++) { map.put(i, i); } final Thread thread = new Thread( new Runnable() { public void run() { while (!Thread.currentThread().isInterrupted()) { try { for (int i = 0; i < nsize; i++) { map.get(i); } Thread.sleep(1000); } catch (HazelcastInstanceNotActiveException e) { return; } catch (InterruptedException e) { return; } } } }); thread.start(); HazelcastInstance instance2 = factory.newHazelcastInstance(cfg); HazelcastInstance instance3 = factory.newHazelcastInstance(cfg); assertTrue(latch.await(1, TimeUnit.MINUTES)); Assert.assertEquals(nsize, map.size()); thread.interrupt(); thread.join(5000); }
@Test public void testNearCacheEvictionByUsingMapTTLEviction() throws InterruptedException { final int instanceCount = 3; final int ttl = 1; final int size = 1000; final Config cfg = new Config(); final String mapName = "_testNearCacheEvictionByUsingMapTTLEviction_"; final NearCacheConfig nearCacheConfig = new NearCacheConfig(); nearCacheConfig.setInvalidateOnChange(true); nearCacheConfig.setInMemoryFormat(InMemoryFormat.OBJECT); cfg.getMapConfig(mapName).setNearCacheConfig(nearCacheConfig); final MapConfig mapConfig = cfg.getMapConfig(mapName); mapConfig.setTimeToLiveSeconds(ttl); final TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(instanceCount); final HazelcastInstance instance1 = factory.newHazelcastInstance(cfg); final HazelcastInstance instance2 = factory.newHazelcastInstance(cfg); final HazelcastInstance instance3 = factory.newHazelcastInstance(cfg); final IMap map1 = instance1.getMap(mapName); final IMap map2 = instance2.getMap(mapName); final IMap map3 = instance3.getMap(mapName); // observe eviction final CountDownLatch latch = new CountDownLatch(size); map1.addEntryListener( new EntryAdapter() { public void entryEvicted(EntryEvent event) { latch.countDown(); } }, false); // populate map for (int i = 0; i < size; i++) { // populate. map1.put(i, i); // bring near caches. -- here is a time window // that "i" already evicted. so a "get" brings // a NULL object to the near cache. map1.get(i); map2.get(i); map3.get(i); } // wait operations to complete assertOpenEventually(latch); // check map size after eviction. assertEquals(0, map1.size()); assertEquals(0, map2.size()); assertEquals(0, map3.size()); // near cache sizes should be zero after eviction. assertEquals(0, countNotNullValuesInNearCache(mapName, instance1)); assertEquals(0, countNotNullValuesInNearCache(mapName, instance2)); assertEquals(0, countNotNullValuesInNearCache(mapName, instance3)); }
@Test(timeout = 100000) public void testKeyOwnerDiesOnCondition() throws Exception { final TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); final Config config = new Config(); final HazelcastInstance keyOwner = nodeFactory.newHazelcastInstance(config); final HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); final HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); int k = 0; final AtomicInteger atomicInteger = new AtomicInteger(0); while (keyOwner .getCluster() .getLocalMember() .equals(instance1.getPartitionService().getPartition(k++).getOwner())) { Thread.sleep(10); } final int key = k; final ILock lock1 = instance1.getLock(key); final String name = "testKeyOwnerDiesOnCondition"; final ICondition condition1 = lock1.newCondition(name); Thread t = new Thread( new Runnable() { public void run() { final ILock lock = instance2.getLock(key); final ICondition condition = lock.newCondition(name); lock.lock(); try { condition.await(); } catch (InterruptedException e) { e.printStackTrace(); } finally { lock.unlock(); } atomicInteger.incrementAndGet(); } }); t.start(); Thread.sleep(1000); lock1.lock(); keyOwner.getLifecycleService().shutdown(); condition1.signal(); lock1.unlock(); Thread.sleep(1000); t.join(); Assert.assertEquals(1, atomicInteger.get()); }
@Test(timeout = 60000) public void testMapperReducerChunked() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); final HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); assertClusterSizeEventually(3, h1); final IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 10000; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); JobCompletableFuture<Map<String, Integer>> future = job.chunkSize(10) .mapper(new GroupingTestMapper()) .reducer(new TestReducerFactory()) .submit(); final TrackableJob trackableJob = tracker.getTrackableJob(future.getJobId()); final JobProcessInformation processInformation = trackableJob.getJobProcessInformation(); Map<String, Integer> result = future.get(); // Precalculate results int[] expectedResults = new int[4]; for (int i = 0; i < 10000; i++) { int index = i % 4; expectedResults[index] += i; } for (int i = 0; i < 4; i++) { assertEquals(expectedResults[i], (int) result.get(String.valueOf(i))); } assertTrueEventually( new AssertTask() { @Override public void run() { if (processInformation.getProcessedRecords() < 10000) { System.err.println(processInformation.getProcessedRecords()); } assertEquals(10000, processInformation.getProcessedRecords()); } }); }
@Test(timeout = 120000) public void testGetAllKeys() throws Exception { TestEventBasedMapStore testMapStore = new TestEventBasedMapStore(); Map store = testMapStore.getStore(); Set keys = new HashSet(); int size = 1000; for (int i = 0; i < size; i++) { store.put(i, "value" + i); keys.add(i); } Config config = newConfig(testMapStore, 2); TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(config); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(config); IMap map1 = h1.getMap("default"); IMap map2 = h2.getMap("default"); // checkIfMapLoaded("default", h1); // checkIfMapLoaded("default", h2); assertEquals("value1", map1.get(1)); assertEquals("value1", map2.get(1)); assertEquals(1000, map1.size()); assertEquals(1000, map2.size()); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(config); IMap map3 = h3.getMap("default"); // checkIfMapLoaded("default", h3); assertEquals("value1", map1.get(1)); assertEquals("value1", map2.get(1)); assertEquals("value1", map3.get(1)); assertEquals(1000, map1.size()); assertEquals(1000, map2.size()); assertEquals(1000, map3.size()); h3.shutdown(); assertEquals("value1", map1.get(1)); assertEquals("value1", map2.get(1)); assertEquals(1000, map1.size()); assertEquals(1000, map2.size()); }
@Test public void testOneQuorumsFailsOneQuorumSuccessForDifferentMaps() throws Exception { TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(3); String fourNodeQuorum = randomString(); QuorumConfig fourNodeQuorumConfig = new QuorumConfig(fourNodeQuorum, true); fourNodeQuorumConfig.setQuorumFunctionImplementation( new QuorumFunction() { @Override public boolean apply(Collection<Member> members) { return members.size() == 4; } }); String threeNodeQuorum = randomString(); QuorumConfig threeNodeQuorumConfig = new QuorumConfig(threeNodeQuorum, true); threeNodeQuorumConfig.setQuorumFunctionImplementation( new QuorumFunction() { @Override public boolean apply(Collection<Member> members) { return members.size() == 3; } }); MapConfig fourNodeMapConfig = new MapConfig("fourNode"); fourNodeMapConfig.setQuorumName(fourNodeQuorum); MapConfig threeNodeMapConfig = new MapConfig("threeNode"); threeNodeMapConfig.setQuorumName(threeNodeQuorum); Config config = new Config(); config.addQuorumConfig(threeNodeQuorumConfig); config.addQuorumConfig(fourNodeQuorumConfig); config.addMapConfig(fourNodeMapConfig); config.addMapConfig(threeNodeMapConfig); HazelcastInstance h1 = factory.newHazelcastInstance(config); HazelcastInstance h2 = factory.newHazelcastInstance(config); HazelcastInstance h3 = factory.newHazelcastInstance(config); IMap<Object, Object> fourNode = h1.getMap("fourNode"); IMap<Object, Object> threeNode = h1.getMap("threeNode"); threeNode.put(generateKeyOwnedBy(h1), "bar"); try { fourNode.put(generateKeyOwnedBy(h1), "bar"); fail(); } catch (Exception e) { } }
@Test(timeout = 120000) public void testMapStoreNotCalledFromEntryProcessorBackup() throws Exception { final String mapName = "testMapStoreNotCalledFromEntryProcessorBackup_" + randomString(); final int instanceCount = 2; Config config = getConfig(); // Configure map with one backup and dummy map store MapConfig mapConfig = config.getMapConfig(mapName); mapConfig.setBackupCount(1); MapStoreConfig mapStoreConfig = new MapStoreConfig(); MapStoreWithStoreCount mapStore = new MapStoreWithStoreCount(1, 120); mapStoreConfig.setImplementation(mapStore); mapConfig.setMapStoreConfig(mapStoreConfig); TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(instanceCount); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); final IMap<String, String> map = instance1.getMap(mapName); final String key = "key"; final String value = "value"; // executeOnKey map.executeOnKey(key, new ValueSetterEntryProcessor(value)); mapStore.awaitStores(); assertEquals(value, map.get(key)); assertEquals(1, mapStore.getCount()); }
@Test(timeout = 120000) public void testIssue583MapReplaceShouldTriggerMapStore() { final ConcurrentMap<String, Long> store = new ConcurrentHashMap<String, Long>(); final MapStore<String, Long> myMapStore = new SimpleMapStore<String, Long>(store); Config config = getConfig(); config .getMapConfig("myMap") .setMapStoreConfig(new MapStoreConfig().setImplementation(myMapStore)); TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance hc = nodeFactory.newHazelcastInstance(config); IMap<String, Long> myMap = hc.getMap("myMap"); myMap.put("one", 1L); assertEquals(1L, myMap.get("one").longValue()); assertEquals(1L, store.get("one").longValue()); myMap.putIfAbsent("two", 2L); assertEquals(2L, myMap.get("two").longValue()); assertEquals(2L, store.get("two").longValue()); myMap.putIfAbsent("one", 5L); assertEquals(1L, myMap.get("one").longValue()); assertEquals(1L, store.get("one").longValue()); myMap.replace("one", 1L, 111L); assertEquals(111L, myMap.get("one").longValue()); assertEquals(111L, store.get("one").longValue()); myMap.replace("one", 1L); assertEquals(1L, myMap.get("one").longValue()); assertEquals(1L, store.get("one").longValue()); }
@Test(timeout = 120000) public void testOneMemberFlush() throws Exception { TestMapStore testMapStore = new TestMapStore(1, 1, 1); testMapStore.setLoadAllKeys(false); int size = 100; Config config = newConfig(testMapStore, 200); TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(config); IMap map = h1.getMap("default"); assertEquals(0, map.size()); for (int i = 0; i < size; i++) { map.put(i, i); } assertEquals(size, map.size()); assertEquals(0, testMapStore.getStore().size()); assertEquals(size, map.getLocalMapStats().getDirtyEntryCount()); map.flush(); assertEquals(size, testMapStore.getStore().size()); assertEquals(0, map.getLocalMapStats().getDirtyEntryCount()); assertEquals(size, map.size()); for (int i = 0; i < size / 2; i++) { map.remove(i); } assertEquals(size / 2, map.size()); assertEquals(size, testMapStore.getStore().size()); map.flush(); assertEquals(size / 2, testMapStore.getStore().size()); assertEquals(size / 2, map.size()); }
@Test(timeout = 120000) public void testInitialLoadModeEagerMultipleThread() { final String mapName = "default"; final int instanceCount = 2; final int size = 10000; final TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(instanceCount); final CountDownLatch countDownLatch = new CountDownLatch(instanceCount - 1); final Config config = getConfig(); GroupConfig groupConfig = new GroupConfig("testEager"); config.setGroupConfig(groupConfig); MapStoreConfig mapStoreConfig = new MapStoreConfig(); mapStoreConfig.setEnabled(true); mapStoreConfig.setImplementation(new SimpleMapLoader(size, true)); mapStoreConfig.setInitialLoadMode(MapStoreConfig.InitialLoadMode.EAGER); config.getMapConfig(mapName).setMapStoreConfig(mapStoreConfig); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); Runnable runnable = new Runnable() { public void run() { HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); final IMap<Object, Object> map = instance2.getMap(mapName); assertEquals(size, map.size()); countDownLatch.countDown(); } }; new Thread(runnable).start(); assertOpenEventually(countDownLatch, 120); IMap map = instance1.getMap(mapName); assertEquals(size, map.size()); }
@Test(expected = IllegalArgumentException.class) public void testNegativeTtlThrowsException() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(1); HazelcastInstance instance = nodeFactory.newHazelcastInstance(); ReplicatedMap<Integer, Integer> map = instance.getReplicatedMap(randomName()); map.put(1, 1, -1, TimeUnit.DAYS); }
private void testHitsAndLastAccessTimeAreSetWithSingleNode(Config config) throws Exception { final TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(1); final HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); final ReplicatedMap<Integer, Integer> map = instance1.getReplicatedMap(randomMapName()); final int operations = 100; execute( new Runnable() { @Override public void run() { for (int i = 0; i < operations; i++) { map.put(i, i); } } }, ADDED, operations, 1, map); for (int i = 0; i < operations; i++) { map.containsKey(i); } for (int i = 0; i < operations; i++) { final ReplicatedRecord<Integer, Integer> replicatedRecord = getReplicatedRecord(map, i); assertEquals(1, replicatedRecord.getHits()); assertTrue( "Last access time should be set for " + i, replicatedRecord.getLastAccessTime() > 0); } }
private HazelcastInstance newInstance() { final Config config = new Config(); final MapConfig mapConfig = new MapConfig(MAP_NAME); mapConfig.setBackupCount(2); config.addMapConfig(mapConfig); return instanceFactory.newHazelcastInstance(config); }
private void testHitsAreZeroInitiallyWithSingleNode(Config config) throws Exception { final TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(1); final HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); final ReplicatedMap<Integer, Integer> map = instance1.getReplicatedMap(randomMapName()); final int operations = 100; execute( new Runnable() { @Override public void run() { for (int i = 0; i < operations; i++) { map.put(i, i); } } }, ADDED, operations, 1, map); for (int i = 0; i < operations; i++) { final ReplicatedRecord<Integer, Integer> replicatedRecord = getReplicatedRecord(map, i); assertEquals(0, replicatedRecord.getHits()); } }