@Test(timeout = 300000) public void testTwoMemberWriteThrough2() throws Exception { int items = 1000; TestMapStore testMapStore = new TestMapStore(items, 0, 0); Config config = newConfig(testMapStore, 0); TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(config); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(config); IMap map1 = h1.getMap("default"); IMap map2 = h2.getMap("default"); for (int i = 0; i < items; i++) { map1.put(i, "value" + i); } assertTrue( "store operations could not be done wisely ", testMapStore.latchStore.await(30, TimeUnit.SECONDS)); assertEquals(items, testMapStore.getStore().size()); assertEquals(items, map1.size()); assertEquals(items, map2.size()); testMapStore.assertAwait(10); // N put-load N put-store call and 1 loadAllKeys assertEquals(items * 2 + 1, testMapStore.callCount.get()); }
@Test(timeout = 120000) public void testIssue991EvictedNullIssue() throws InterruptedException { MapStoreConfig mapStoreConfig = new MapStoreConfig(); mapStoreConfig.setEnabled(true); mapStoreConfig.setImplementation( new MapLoader<String, String>() { @Override public String load(String key) { return null; } @Override public Map<String, String> loadAll(Collection<String> keys) { return null; } @Override public Set<String> loadAllKeys() { return null; } }); Config config = getConfig(); config.getMapConfig("testIssue991EvictedNullIssue").setMapStoreConfig(mapStoreConfig); HazelcastInstance hc = createHazelcastInstance(config); IMap<Object, Object> map = hc.getMap("testIssue991EvictedNullIssue"); map.get("key"); assertNull(map.get("key")); map.put("key", "value"); Thread.sleep(2000); assertEquals("value", map.get("key")); }
@Test(timeout = 120000) public void testMapStoreNotCalledFromEntryProcessorBackup() throws Exception { final String mapName = "testMapStoreNotCalledFromEntryProcessorBackup_" + randomString(); final int instanceCount = 2; Config config = getConfig(); // Configure map with one backup and dummy map store MapConfig mapConfig = config.getMapConfig(mapName); mapConfig.setBackupCount(1); MapStoreConfig mapStoreConfig = new MapStoreConfig(); MapStoreWithStoreCount mapStore = new MapStoreWithStoreCount(1, 120); mapStoreConfig.setImplementation(mapStore); mapConfig.setMapStoreConfig(mapStoreConfig); TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(instanceCount); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); final IMap<String, String> map = instance1.getMap(mapName); final String key = "key"; final String value = "value"; // executeOnKey map.executeOnKey(key, new ValueSetterEntryProcessor(value)); mapStore.awaitStores(); assertEquals(value, map.get(key)); assertEquals(1, mapStore.getCount()); }
@Test public void testRemovedEntry_shouldNotBeReached_afterMigration() throws Exception { String mapName = randomMapName(); TestHazelcastInstanceFactory factory = new TestHazelcastInstanceFactory(2); MapStoreTest.SimpleMapStore<Integer, Integer> store = new MapStoreTest.SimpleMapStore<Integer, Integer>(); store.store.put(1, 0); Config config = createConfig(mapName, store); HazelcastInstance node1 = factory.newHazelcastInstance(config); IMap<Integer, Integer> map = node1.getMap(mapName); map.put(1, 1); map.delete(1); HazelcastInstance node2 = factory.newHazelcastInstance(config); map = node2.getMap(mapName); Integer value = map.get(1); factory.shutdownAll(); assertNull(value); }
@Test(timeout = 120000) public void testSlowStore() throws Exception { final TestMapStore store = new WaitingOnFirstTestMapStore(); Config config = getConfig(); MapStoreConfig mapStoreConfig = new MapStoreConfig(); mapStoreConfig.setEnabled(true); mapStoreConfig.setWriteDelaySeconds(1); mapStoreConfig.setImplementation(store); config.getMapConfig("default").setMapStoreConfig(mapStoreConfig); HazelcastInstance h1 = createHazelcastInstance(config); final IMap<Integer, Integer> map = h1.getMap("testSlowStore"); int count = 1000; for (int i = 0; i < count; i++) { map.put(i, 1); } Thread.sleep(2000); // sleep for scheduling following puts to a different second for (int i = 0; i < count; i++) { map.put(i, 2); } for (int i = 0; i < count; i++) { final int index = i; assertTrueEventually( new AssertTask() { @Override public void run() throws Exception { final Integer valueInMap = map.get(index); final Integer valueInStore = (Integer) store.getStore().get(index); assertEquals(valueInMap, valueInStore); } }); } }
@Test(timeout = 30000) public void testNullFromObjectCombiner() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker jobTracker = h1.getJobTracker("default"); Job<Integer, Integer> job = jobTracker.newJob(KeyValueSource.fromMap(m1)); JobCompletableFuture<Map<String, BigInteger>> future = job.chunkSize(1) .mapper(new GroupingTestMapper()) .combiner(new ObjectCombinerFactory()) .reducer(new ObjectReducerFactory()) .submit(); int[] expectedResults = new int[4]; for (int i = 0; i < 100; i++) { int index = i % 4; expectedResults[index] += i; } Map<String, BigInteger> map = future.get(); for (int i = 0; i < 4; i++) { assertEquals(BigInteger.valueOf(expectedResults[i]), map.get(String.valueOf(i))); } }
@BeforeTest public void setup() throws Exception { String mappingPath = getClass().getClassLoader().getResource("test_mapping.xml").getPath(); ServerConfig.mappingsPath = mappingPath.replace("/test_mapping.xml", ""); ServerConfig.globalErrors = getClass().getClassLoader().getResource("global-errors/test_global_errors.xml").getPath(); ServerConfig.customJarPath = null; HazelcastConfigInstance.configInstance = mock(HazelcastConfigInstance.class); @SuppressWarnings("unchecked") IMap<String, com.apifest.MappingConfig> mappingsMap = mock(IMap.class); MappingConfig config = mock(MappingConfig.class); doReturn(mock(BasicAction.class)).when(config).getAction(any(MappingAction.class)); mappingsMap.put("v0.1", config); doReturn(mappingsMap).when(HazelcastConfigInstance.configInstance).getMappingConfigs(); IMap<Integer, String> globalErrorsMap = mock(IMap.class); doReturn(globalErrorsMap).when(HazelcastConfigInstance.configInstance).getGlobalErrors(); // mock loggers AddSenderIdInBodyAction.log = mock(Logger.class); RemoveBalanceFilter.log = mock(Logger.class); ServerConfig.getMappingsPath(); }
@Test public void testQueryDuringAndAfterMigrationWithIndex() throws Exception { Config cfg = new Config(); final HazelcastInstance h1 = nodeFactory.newHazelcastInstance(cfg); IMap imap = h1.getMap("employees"); imap.addIndex("name", false); imap.addIndex("active", false); int size = 500; for (int i = 0; i < size; i++) { imap.put(String.valueOf(i), new Employee("joe" + i, i % 60, ((i & 1) == 1), (double) i)); } nodeFactory.newInstances(cfg, 3); final IMap employees = h1.getMap("employees"); assertTrueAllTheTime( new AssertTask() { @Override public void run() throws Exception { Collection<Employee> values = employees.values(new SqlPredicate("active and name LIKE 'joe15%'")); for (Employee employee : values) { assertTrue(employee.isActive() && employee.getName().startsWith("joe15")); } assertEquals(6, values.size()); } }, 3); }
@Test(timeout = MINUTE) public void testQueryDuringAndAfterMigration() throws Exception { HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); int count = 500; IMap imap = h1.getMap("employees"); for (int i = 0; i < count; i++) { imap.put(String.valueOf(i), new Employee("joe" + i, i % 60, ((i & 1) == 1), (double) i)); } nodeFactory.newInstances(new Config(), 3); final IMap employees = h1.getMap("employees"); assertTrueAllTheTime( new AssertTask() { @Override public void run() throws Exception { Collection<Employee> values = employees.values(new SqlPredicate("active and name LIKE 'joe15%'")); for (Employee employee : values) { assertTrue(employee.isActive()); } assertEquals(6, values.size()); } }, 3); }
@Test public void submitCallablePartitionAware_WithExecutionCallback() throws Exception { IExecutorService service = client.getExecutorService(randomString()); String mapName = randomString(); IMap map = client.getMap(mapName); String key = HazelcastTestSupport.generateKeyOwnedBy(server); Member member = server.getCluster().getLocalMember(); Callable<String> runnable = new MapPutPartitionAwareCallable<String, String>(mapName, key); final AtomicReference<String> result = new AtomicReference<String>(); final CountDownLatch responseLatch = new CountDownLatch(1); service.submit( runnable, new ExecutionCallback<String>() { public void onResponse(String response) { result.set(response); responseLatch.countDown(); } public void onFailure(Throwable t) {} }); assertOpenEventually("responseLatch", responseLatch); assertEquals(member.getUuid(), result.get()); assertTrue(map.containsKey(member.getUuid())); }
@Test public void testMapRecordEviction() throws InterruptedException { int size = 100000; Config cfg = new Config(); MapConfig mc = cfg.getMapConfig("testMapRecordEviction"); mc.setTimeToLiveSeconds(1); final CountDownLatch latch = new CountDownLatch(size); mc.addEntryListenerConfig( new EntryListenerConfig() .setImplementation( new EntryAdapter() { public void entryEvicted(EntryEvent event) { latch.countDown(); } }) .setLocal(true)); TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2); final HazelcastInstance[] instances = factory.newInstances(cfg); IMap map = instances[0].getMap("testMapRecordEviction"); for (int i = 0; i < size; i++) { map.put(i, i); } assertTrue(latch.await(5, TimeUnit.MINUTES)); assertEquals(0, map.size()); }
@Test public void submitRunnablePartitionAware_withExecutionCallback() throws Exception { IExecutorService service = client.getExecutorService(randomString()); String mapName = randomString(); String key = HazelcastTestSupport.generateKeyOwnedBy(server); Member member = server.getCluster().getLocalMember(); Runnable runnable = new MapPutPartitionAwareRunnable<String>(mapName, key); final CountDownLatch responseLatch = new CountDownLatch(1); service.submit( runnable, new ExecutionCallback() { @Override public void onResponse(Object response) { responseLatch.countDown(); } @Override public void onFailure(Throwable t) {} }); IMap map = client.getMap(mapName); assertOpenEventually("responseLatch", responseLatch); assertTrue(map.containsKey(member.getUuid())); }
@Test public void submitRunnableToAllMembers_withMultiExecutionCallback() throws Exception { IExecutorService service = client.getExecutorService(randomString()); final CountDownLatch responseLatch = new CountDownLatch(CLUSTER_SIZE); final CountDownLatch completeLatch = new CountDownLatch(1); String mapName = randomString(); Runnable runnable = new MapPutRunnable(mapName); service.submitToAllMembers( runnable, new MultiExecutionCallback() { public void onResponse(Member member, Object value) { responseLatch.countDown(); } public void onComplete(Map<Member, Object> values) { completeLatch.countDown(); } }); IMap map = client.getMap(mapName); assertOpenEventually("responseLatch", responseLatch); assertOpenEventually("completeLatch", completeLatch); assertEquals(CLUSTER_SIZE, map.size()); }
@Test public void testAllUpdatesReflectedToMapStore() throws Exception { int nodeCount = 3; final MapStoreWithCounter mapStore = new MapStoreWithCounter<Integer, String>(); TestMapUsingMapStoreBuilder builder = TestMapUsingMapStoreBuilder.create() .withMapStore(mapStore) .withNodeCount(nodeCount) .withNodeFactory(createHazelcastInstanceFactory(nodeCount)) .withBackupCount(0) .withWriteCoalescing(false) .withWriteDelaySeconds(3); IMap<Object, Object> map = builder.build(); for (int i = 0; i < 500; i++) { map.put(i, randomString()); } for (int i = 0; i < 500; i++) { map.remove(i); } assertTrueEventually( new AssertTask() { @Override public void run() throws Exception { final int storeCount = mapStore.countStore.get(); final int deleteCount = mapStore.countDelete.get(); assertEquals(1000, storeCount + deleteCount); assertTrue(mapStore.store.isEmpty()); } }); }
@Test(timeout = 30000) public void testMapperReducerCollator() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Integer> future = job.mapper(new GroupingTestMapper()) .reducer(new TestReducerFactory()) .submit(new TestCollator()); int result = future.get(); // Precalculate result int expectedResult = 0; for (int i = 0; i < 100; i++) { expectedResult += i; } for (int i = 0; i < 4; i++) { assertEquals(expectedResult, result); } }
@Test public void testPutWithTTL() { final String mapName = randomString(); final int ttlSeconds = 1; final String key = "key"; final String value = "Value"; final IMap map = client.getMap(mapName); final TransactionContext context = client.newTransactionContext(); context.beginTransaction(); final TransactionalMap<Object, Object> txnMap = context.getMap(mapName); txnMap.put(key, value, ttlSeconds, TimeUnit.SECONDS); Object resultFromClientWhileTxnInProgress = map.get(key); context.commitTransaction(); assertNull(resultFromClientWhileTxnInProgress); assertEquals(value, map.get(key)); // waite for ttl to expire sleepSeconds(ttlSeconds + 1); assertNull(map.get(key)); }
@Test(timeout = 30000) public void testPartitionPostpone() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); final HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); assertClusterSizeEventually(3, h1); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); KeyValueSource<Integer, Integer> kvs = KeyValueSource.fromMap(m1); KeyValueSource<Integer, Integer> wrapper = new MapKeyValueSourceAdapter<Integer, Integer>(kvs); Job<Integer, Integer> job = tracker.newJob(wrapper); ICompletableFuture<Map<String, List<Integer>>> future = job.mapper(new TestMapper()).submit(); Map<String, List<Integer>> result = future.get(); assertEquals(100, result.size()); for (List<Integer> value : result.values()) { assertEquals(1, value.size()); } }
@Test public void testKeysetAndValuesWithPredicates() throws Exception { final String mapName = randomString(); IMap map = client.getMap(mapName); final SampleObjects.Employee emp1 = new SampleObjects.Employee("abc-123-xvz", 34, true, 10D); final SampleObjects.Employee emp2 = new SampleObjects.Employee("abc-123-xvz", 20, true, 10D); map.put(emp1, emp1); final TransactionContext context = client.newTransactionContext(); context.beginTransaction(); final TransactionalMap txMap = context.getMap(mapName); assertNull(txMap.put(emp2, emp2)); assertEquals(2, txMap.size()); assertEquals(2, txMap.keySet().size()); assertEquals(0, txMap.keySet(new SqlPredicate("age = 10")).size()); assertEquals(0, txMap.values(new SqlPredicate("age = 10")).size()); assertEquals(2, txMap.keySet(new SqlPredicate("age >= 10")).size()); assertEquals(2, txMap.values(new SqlPredicate("age >= 10")).size()); context.commitTransaction(); assertEquals(2, map.size()); assertEquals(2, map.values().size()); }
@Test(timeout = 30000) public void testDataSerializableIntermediateObject() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker jobTracker = h1.getJobTracker("default"); Job<Integer, Integer> job = jobTracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Integer> future = job.mapper(new TestMapper()) .combiner(new DataSerializableIntermediateCombinerFactory()) .reducer(new DataSerializableIntermediateReducerFactory()) .submit(new DataSerializableIntermediateCollator()); // Precalculate result int expectedResult = 0; for (int i = 0; i < 100; i++) { expectedResult += i; } expectedResult = (int) ((double) expectedResult / 100); assertEquals(expectedResult, (int) future.get()); }
@Test public void testTnxMapReplaceKeyValue() throws Exception { final String mapName = randomString(); final String key1 = "key1"; final String oldValue1 = "old1"; final String newValue1 = "new1"; final String key2 = "key2"; final String oldValue2 = "old2"; IMap map = client.getMap(mapName); map.put(key1, oldValue1); map.put(key2, oldValue2); final TransactionContext context = client.newTransactionContext(); context.beginTransaction(); final TransactionalMap txMap = context.getMap(mapName); txMap.replace(key1, oldValue1, newValue1); txMap.replace(key2, "NOT_OLD_VALUE", "NEW_VALUE_CANT_BE_THIS"); context.commitTransaction(); assertEquals(newValue1, map.get(key1)); assertEquals(oldValue2, map.get(key2)); }
@Test public void testHeartbeatResumedEvent() throws InterruptedException { hazelcastFactory.newHazelcastInstance(); HazelcastInstance client = hazelcastFactory.newHazelcastClient(getClientConfig()); final HazelcastInstance instance2 = hazelcastFactory.newHazelcastInstance(); // make sure client is connected to instance2 String keyOwnedByInstance2 = generateKeyOwnedBy(instance2); IMap<String, String> map = client.getMap(randomString()); map.put(keyOwnedByInstance2, randomString()); HazelcastClientInstanceImpl clientImpl = getHazelcastClientInstanceImpl(client); ClientConnectionManager connectionManager = clientImpl.getConnectionManager(); final CountDownLatch countDownLatch = new CountDownLatch(1); connectionManager.addConnectionHeartbeatListener( new ConnectionHeartbeatListener() { @Override public void heartbeatResumed(Connection connection) { assertEquals( instance2.getCluster().getLocalMember().getAddress(), connection.getEndPoint()); countDownLatch.countDown(); } @Override public void heartbeatStopped(Connection connection) {} }); blockMessagesFromInstance(instance2, client); sleepMillis(HEARTBEAT_TIMEOUT_MILLIS * 2); unblockMessagesFromInstance(instance2, client); assertOpenEventually(countDownLatch); }
@Test(timeout = 20000) public void test_clusterMapSize() throws Exception { CookieStore cookieStore = new BasicCookieStore(); IMap<String, Object> map = hz.getMap(DEFAULT_MAP_NAME); executeRequest("write", serverPort1, cookieStore); assertEquals(1, map.size()); }
// ignored due to: https://github.com/hazelcast/hazelcast/issues/5035 @Ignore @Test public void testMapLoaderLoadUpdatingIndex() throws Exception { final int nodeCount = 3; String mapName = randomString(); SampleIndexableObjectMapLoader loader = new SampleIndexableObjectMapLoader(); Config config = createMapConfig(mapName, loader); NodeBuilder nodeBuilder = new NodeBuilder(nodeCount, config).build(); HazelcastInstance node = nodeBuilder.getRandomNode(); IMap<Integer, SampleIndexableObject> map = node.getMap(mapName); for (int i = 0; i < 10; i++) { map.put(i, new SampleIndexableObject("My-" + i, i)); } final SqlPredicate predicate = new SqlPredicate("name='My-5'"); assertPredicateResultCorrect(map, predicate); map.destroy(); loader.preloadValues = true; node = nodeBuilder.getRandomNode(); map = node.getMap(mapName); assertLoadAllKeysCount(loader, 1); assertPredicateResultCorrect(map, predicate); }
@Test public void testHazelcastInstances() { assertNotNull(map1); assertNotNull(map2); assertNotNull(multiMap); assertNotNull(replicatedMap); assertNotNull(queue); assertNotNull(topic); assertNotNull(set); assertNotNull(list); assertNotNull(executorService); assertNotNull(idGenerator); assertNotNull(atomicLong); assertNotNull(atomicReference); assertNotNull(countDownLatch); assertNotNull(semaphore); assertNotNull(lock); assertEquals("map1", map1.getName()); assertEquals("map2", map2.getName()); assertEquals("testMultimap", multiMap.getName()); assertEquals("replicatedMap", replicatedMap.getName()); assertEquals("testQ", queue.getName()); assertEquals("testTopic", topic.getName()); assertEquals("set", set.getName()); assertEquals("list", list.getName()); assertEquals("idGenerator", idGenerator.getName()); assertEquals("atomicLong", atomicLong.getName()); assertEquals("atomicReference", atomicReference.getName()); assertEquals("countDownLatch", countDownLatch.getName()); assertEquals("semaphore", semaphore.getName()); }
@Test(timeout = 120000) public void testInitialLoadModeEagerMultipleThread() { final String mapName = "default"; final int instanceCount = 2; final int size = 10000; final TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(instanceCount); final CountDownLatch countDownLatch = new CountDownLatch(instanceCount - 1); final Config config = getConfig(); GroupConfig groupConfig = new GroupConfig("testEager"); config.setGroupConfig(groupConfig); MapStoreConfig mapStoreConfig = new MapStoreConfig(); mapStoreConfig.setEnabled(true); mapStoreConfig.setImplementation(new SimpleMapLoader(size, true)); mapStoreConfig.setInitialLoadMode(MapStoreConfig.InitialLoadMode.EAGER); config.getMapConfig(mapName).setMapStoreConfig(mapStoreConfig); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); Runnable runnable = new Runnable() { public void run() { HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); final IMap<Object, Object> map = instance2.getMap(mapName); assertEquals(size, map.size()); countDownLatch.countDown(); } }; new Thread(runnable).start(); assertOpenEventually(countDownLatch, 120); IMap map = instance1.getMap(mapName); assertEquals(size, map.size()); }
@Test(timeout = 30000, expected = CancellationException.class) public void testInProcessCancellation() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); final HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); assertClusterSizeEventually(3, h1); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Map<String, List<Integer>>> future = job.mapper(new TimeConsumingMapper()).submit(); future.cancel(true); try { Map<String, List<Integer>> result = future.get(); fail(); } catch (Exception e) { e.printStackTrace(); throw e; } }
@Test(timeout = 120000) public void testIssue1142ExceptionWhenLoadAllReturnsNull() { Config config = getConfig(); String mapname = "testIssue1142ExceptionWhenLoadAllReturnsNull"; MapStoreConfig mapStoreConfig = new MapStoreConfig(); mapStoreConfig.setImplementation( new MapStoreAdapter<String, String>() { @Override public Set<String> loadAllKeys() { Set keys = new HashSet(); keys.add("key"); return keys; } public Map loadAll(Collection keys) { return null; } }); config.getMapConfig(mapname).setMapStoreConfig(mapStoreConfig); TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2); HazelcastInstance instance = nodeFactory.newHazelcastInstance(config); HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); final IMap map = instance.getMap(mapname); for (int i = 0; i < 300; i++) { map.put(i, i); } assertEquals(300, map.size()); }
@Test(timeout = 30000) public void testMapperReducer() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Map<String, Integer>> future = job.mapper(new GroupingTestMapper()).reducer(new TestReducerFactory()).submit(); Map<String, Integer> result = future.get(); // Precalculate results int[] expectedResults = new int[4]; for (int i = 0; i < 100; i++) { int index = i % 4; expectedResults[index] += i; } for (int i = 0; i < 4; i++) { assertEquals(expectedResults[i], (int) result.get(String.valueOf(i))); } }
@Test(timeout = 120000) public void testMapStoreWriteRemoveOrder() { final String mapName = randomMapName("testMapStoreWriteDeleteOrder"); final int numIterations = 10; final int writeDelaySeconds = 3; // create map store implementation final RecordingMapStore store = new RecordingMapStore(0, 1); // create hazelcast config final Config config = newConfig(mapName, store, writeDelaySeconds); // start hazelcast instance final HazelcastInstance hzInstance = createHazelcastInstance(config); // loop over num iterations final IMap<String, String> map = hzInstance.getMap(mapName); for (int k = 0; k < numIterations; k++) { String key = String.valueOf(k + 10); // 2 digits for sorting in output String value = "v:" + key; // add entry map.put(key, value); // sleep 300ms sleepMillis(1); // remove entry map.remove(key); } // wait for store to finish store.awaitStores(); // wait for remove to finish store.awaitRemoves(); assertEquals(0, store.getStore().size()); }
@Test public void testPut() throws InterruptedException { template.sendBodyAndHeader("direct:put", "my-foo", HazelcastConstants.OBJECT_ID, "4711"); assertTrue(map.containsKey("4711")); assertEquals("my-foo", map.get("4711")); }