@Test(timeout = 30000) public void testDataSerializableIntermediateObject() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker jobTracker = h1.getJobTracker("default"); Job<Integer, Integer> job = jobTracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Integer> future = job.mapper(new TestMapper()) .combiner(new DataSerializableIntermediateCombinerFactory()) .reducer(new DataSerializableIntermediateReducerFactory()) .submit(new DataSerializableIntermediateCollator()); // Precalculate result int expectedResult = 0; for (int i = 0; i < 100; i++) { expectedResult += i; } expectedResult = (int) ((double) expectedResult / 100); assertEquals(expectedResult, (int) future.get()); }
@Test(timeout = 30000) public void testMapperReducerCollator() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Integer> future = job.mapper(new GroupingTestMapper()) .reducer(new TestReducerFactory()) .submit(new TestCollator()); int result = future.get(); // Precalculate result int expectedResult = 0; for (int i = 0; i < 100; i++) { expectedResult += i; } for (int i = 0; i < 4; i++) { assertEquals(expectedResult, result); } }
@Test(timeout = 30000) public void testNullFromObjectCombiner() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker jobTracker = h1.getJobTracker("default"); Job<Integer, Integer> job = jobTracker.newJob(KeyValueSource.fromMap(m1)); JobCompletableFuture<Map<String, BigInteger>> future = job.chunkSize(1) .mapper(new GroupingTestMapper()) .combiner(new ObjectCombinerFactory()) .reducer(new ObjectReducerFactory()) .submit(); int[] expectedResults = new int[4]; for (int i = 0; i < 100; i++) { int index = i % 4; expectedResults[index] += i; } Map<String, BigInteger> map = future.get(); for (int i = 0; i < 4; i++) { assertEquals(BigInteger.valueOf(expectedResults[i]), map.get(String.valueOf(i))); } }
@Test(timeout = 30000) public void testPartitionPostpone() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); final HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); assertClusterSizeEventually(3, h1); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); KeyValueSource<Integer, Integer> kvs = KeyValueSource.fromMap(m1); KeyValueSource<Integer, Integer> wrapper = new MapKeyValueSourceAdapter<Integer, Integer>(kvs); Job<Integer, Integer> job = tracker.newJob(wrapper); ICompletableFuture<Map<String, List<Integer>>> future = job.mapper(new TestMapper()).submit(); Map<String, List<Integer>> result = future.get(); assertEquals(100, result.size()); for (List<Integer> value : result.values()) { assertEquals(1, value.size()); } }
@Test(timeout = 30000) public void testMapperReducer() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Map<String, Integer>> future = job.mapper(new GroupingTestMapper()).reducer(new TestReducerFactory()).submit(); Map<String, Integer> result = future.get(); // Precalculate results int[] expectedResults = new int[4]; for (int i = 0; i < 100; i++) { int index = i % 4; expectedResults[index] += i; } for (int i = 0; i < 4; i++) { assertEquals(expectedResults[i], (int) result.get(String.valueOf(i))); } }
@Test(timeout = 30000, expected = CancellationException.class) public void testInProcessCancellation() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); final HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); assertClusterSizeEventually(3, h1); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Map<String, List<Integer>>> future = job.mapper(new TimeConsumingMapper()).submit(); future.cancel(true); try { Map<String, List<Integer>> result = future.get(); fail(); } catch (Exception e) { e.printStackTrace(); throw e; } }
@Test(timeout = 60000) public void testMapReduceWithCustomKeyValueSource() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); final HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); assertClusterSizeEventually(3, h1); assertClusterSizeEventually(3, h2); assertClusterSizeEventually(3, h3); JobTracker jobTracker = h1.getJobTracker("default"); Job<String, Integer> job = jobTracker.newJob(new CustomKeyValueSource()); ICompletableFuture<Map<String, Integer>> completableFuture = job.chunkSize(10) .mapper(new CustomMapper()) .combiner(new CustomCombinerFactory()) .reducer(new CustomReducerFactory()) .submit(); Map<String, Integer> result = completableFuture.get(); assertEquals(1000, result.size()); List<Map.Entry<String, Integer>> entrySet = new ArrayList(result.entrySet()); Collections.sort(entrySet, ENTRYSET_COMPARATOR); int count = 0; for (Map.Entry<String, Integer> entry : entrySet) { assertEquals(String.valueOf(count), entry.getKey()); assertEquals(count++ * 6, (int) entry.getValue()); } }
@Override public <SuppliedValue, Result> Result aggregate( Supplier<K, V, SuppliedValue> supplier, Aggregation<K, SuppliedValue, Result> aggregation) { HazelcastInstance hazelcastInstance = getContext().getHazelcastInstance(); JobTracker jobTracker = hazelcastInstance.getJobTracker("hz::aggregation-multimap-" + name); return aggregate(supplier, aggregation, jobTracker); }
@Test(timeout = 30000) public void testAsyncMapperReducer() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } final Map<String, Integer> listenerResults = new HashMap<String, Integer>(); final Semaphore semaphore = new Semaphore(1); semaphore.acquire(); JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Map<String, Integer>> future = job.mapper(new GroupingTestMapper()) .reducer(new TestReducerFactory()) // .submit(); future.andThen( new ExecutionCallback<Map<String, Integer>>() { @Override public void onResponse(Map<String, Integer> response) { try { listenerResults.putAll(response); } finally { semaphore.release(); } } @Override public void onFailure(Throwable t) { semaphore.release(); } }); // Precalculate results int[] expectedResults = new int[4]; for (int i = 0; i < 100; i++) { int index = i % 4; expectedResults[index] += i; } semaphore.acquire(); for (int i = 0; i < 4; i++) { assertEquals(expectedResults[i], (int) listenerResults.get(String.valueOf(i))); } }
@Test(timeout = 30000) public void testAsyncMapperReducerCollator() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } final int[] result = new int[1]; final Semaphore semaphore = new Semaphore(1); semaphore.acquire(); JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Integer> future = job.mapper(new GroupingTestMapper()) .reducer(new TestReducerFactory()) .submit(new TestCollator()); future.andThen( new ExecutionCallback<Integer>() { @Override public void onResponse(Integer response) { try { result[0] = response.intValue(); } finally { semaphore.release(); } } @Override public void onFailure(Throwable t) { semaphore.release(); } }); // Precalculate result int expectedResult = 0; for (int i = 0; i < 100; i++) { expectedResult += i; } semaphore.acquire(); for (int i = 0; i < 4; i++) { assertEquals(expectedResult, result[0]); } }
@Test(timeout = 60000) public void testMapperReducerChunked() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); final HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); assertClusterSizeEventually(3, h1); final IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 10000; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); JobCompletableFuture<Map<String, Integer>> future = job.chunkSize(10) .mapper(new GroupingTestMapper()) .reducer(new TestReducerFactory()) .submit(); final TrackableJob trackableJob = tracker.getTrackableJob(future.getJobId()); final JobProcessInformation processInformation = trackableJob.getJobProcessInformation(); Map<String, Integer> result = future.get(); // Precalculate results int[] expectedResults = new int[4]; for (int i = 0; i < 10000; i++) { int index = i % 4; expectedResults[index] += i; } for (int i = 0; i < 4; i++) { assertEquals(expectedResults[i], (int) result.get(String.valueOf(i))); } assertTrueEventually( new AssertTask() { @Override public void run() { if (processInformation.getProcessedRecords() < 10000) { System.err.println(processInformation.getProcessedRecords()); } assertEquals(10000, processInformation.getProcessedRecords()); } }); }
@Test(timeout = 30000) public void testKeyedAsyncMapper() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } final Map<String, List<Integer>> listenerResults = new HashMap<String, List<Integer>>(); final Semaphore semaphore = new Semaphore(1); semaphore.acquire(); JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Map<String, List<Integer>>> future = job.onKeys(50).mapper(new TestMapper()).submit(); future.andThen( new ExecutionCallback<Map<String, List<Integer>>>() { @Override public void onResponse(Map<String, List<Integer>> response) { try { listenerResults.putAll(response); } finally { semaphore.release(); } } @Override public void onFailure(Throwable t) { semaphore.release(); } }); semaphore.acquire(); assertEquals(1, listenerResults.size()); for (List<Integer> value : listenerResults.values()) { assertEquals(1, value.size()); } }
@Test(timeout = 60000) public void testMapReduceWithList() throws Exception { Config config = buildConfig(); HazelcastInstance h1 = hazelcastFactory.newHazelcastInstance(config); HazelcastInstance h2 = hazelcastFactory.newHazelcastInstance(config); HazelcastInstance h3 = hazelcastFactory.newHazelcastInstance(config); assertClusterSizeEventually(3, h1); assertClusterSizeEventually(3, h2); assertClusterSizeEventually(3, h3); HazelcastInstance client = hazelcastFactory.newHazelcastClient(); int expectedResult = 0; IList<Integer> list = h1.getList("default"); for (int o = 0; o < 100; o++) { list.add(o); expectedResult += o; } JobTracker jobTracker = client.getJobTracker("default"); Job<String, Integer> job = jobTracker.newJob(KeyValueSource.fromList(list)); ICompletableFuture<Map<String, Integer>> ICompletableFuture = job.chunkSize(10) .mapper(new ListSetMapReduceTest.ListSetMapper()) .combiner(new ListSetMapReduceTest.ListSetCombinerFactory()) .reducer(new ListSetMapReduceTest.ListSetReducerFactory()) .submit(); Map<String, Integer> result = ICompletableFuture.get(); assertEquals(1, result.size()); int count = 0; for (Map.Entry<String, Integer> entry : result.entrySet()) { assertEquals(list.getName(), entry.getKey()); assertEquals(expectedResult, (int) entry.getValue()); } }
@Test(timeout = 30000, expected = ExecutionException.class) public void testExceptionDistributionWithCollator() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); final HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); assertClusterSizeEventually(3, h1); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Map<String, List<Integer>>> future = job.mapper(new ExceptionThrowingMapper()) .submit( new Collator<Map.Entry<String, List<Integer>>, Map<String, List<Integer>>>() { @Override public Map<String, List<Integer>> collate( Iterable<Map.Entry<String, List<Integer>>> values) { return null; } }); try { Map<String, List<Integer>> result = future.get(); fail(); } catch (Exception e) { e.printStackTrace(); assertTrue(e.getCause() instanceof NullPointerException); throw e; } }
@Test(timeout = 30000) public void testMapperComplexMapping() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Map<String, List<Integer>>> future = job.mapper(new GroupingTestMapper(2)).submit(); Map<String, List<Integer>> result = future.get(); assertEquals(1, result.size()); assertEquals(25, result.values().iterator().next().size()); }