@Test(timeout = 30000) public void testMapperReducerCollator() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Integer> future = job.mapper(new GroupingTestMapper()) .reducer(new TestReducerFactory()) .submit(new TestCollator()); int result = future.get(); // Precalculate result int expectedResult = 0; for (int i = 0; i < 100; i++) { expectedResult += i; } for (int i = 0; i < 4; i++) { assertEquals(expectedResult, result); } }
@Test(timeout = 30000) public void testPartitionPostpone() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); final HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); assertClusterSizeEventually(3, h1); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); KeyValueSource<Integer, Integer> kvs = KeyValueSource.fromMap(m1); KeyValueSource<Integer, Integer> wrapper = new MapKeyValueSourceAdapter<Integer, Integer>(kvs); Job<Integer, Integer> job = tracker.newJob(wrapper); ICompletableFuture<Map<String, List<Integer>>> future = job.mapper(new TestMapper()).submit(); Map<String, List<Integer>> result = future.get(); assertEquals(100, result.size()); for (List<Integer> value : result.values()) { assertEquals(1, value.size()); } }
@Test(timeout = 30000) public void testNullFromObjectCombiner() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker jobTracker = h1.getJobTracker("default"); Job<Integer, Integer> job = jobTracker.newJob(KeyValueSource.fromMap(m1)); JobCompletableFuture<Map<String, BigInteger>> future = job.chunkSize(1) .mapper(new GroupingTestMapper()) .combiner(new ObjectCombinerFactory()) .reducer(new ObjectReducerFactory()) .submit(); int[] expectedResults = new int[4]; for (int i = 0; i < 100; i++) { int index = i % 4; expectedResults[index] += i; } Map<String, BigInteger> map = future.get(); for (int i = 0; i < 4; i++) { assertEquals(BigInteger.valueOf(expectedResults[i]), map.get(String.valueOf(i))); } }
@Test(timeout = 30000) public void testDataSerializableIntermediateObject() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker jobTracker = h1.getJobTracker("default"); Job<Integer, Integer> job = jobTracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Integer> future = job.mapper(new TestMapper()) .combiner(new DataSerializableIntermediateCombinerFactory()) .reducer(new DataSerializableIntermediateReducerFactory()) .submit(new DataSerializableIntermediateCollator()); // Precalculate result int expectedResult = 0; for (int i = 0; i < 100; i++) { expectedResult += i; } expectedResult = (int) ((double) expectedResult / 100); assertEquals(expectedResult, (int) future.get()); }
@Test(timeout = 30000) public void testMapperReducer() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Map<String, Integer>> future = job.mapper(new GroupingTestMapper()).reducer(new TestReducerFactory()).submit(); Map<String, Integer> result = future.get(); // Precalculate results int[] expectedResults = new int[4]; for (int i = 0; i < 100; i++) { int index = i % 4; expectedResults[index] += i; } for (int i = 0; i < 4; i++) { assertEquals(expectedResults[i], (int) result.get(String.valueOf(i))); } }
@Test(timeout = 30000, expected = CancellationException.class) public void testInProcessCancellation() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); final HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); assertClusterSizeEventually(3, h1); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Map<String, List<Integer>>> future = job.mapper(new TimeConsumingMapper()).submit(); future.cancel(true); try { Map<String, List<Integer>> result = future.get(); fail(); } catch (Exception e) { e.printStackTrace(); throw e; } }
@Override public boolean open(NodeEngine nodeEngine) { if (openCount < 2) { openCount++; return false; } return keyValueSource.open(nodeEngine); }
@Test(timeout = 30000) public void testAsyncMapperReducer() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } final Map<String, Integer> listenerResults = new HashMap<String, Integer>(); final Semaphore semaphore = new Semaphore(1); semaphore.acquire(); JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Map<String, Integer>> future = job.mapper(new GroupingTestMapper()) .reducer(new TestReducerFactory()) // .submit(); future.andThen( new ExecutionCallback<Map<String, Integer>>() { @Override public void onResponse(Map<String, Integer> response) { try { listenerResults.putAll(response); } finally { semaphore.release(); } } @Override public void onFailure(Throwable t) { semaphore.release(); } }); // Precalculate results int[] expectedResults = new int[4]; for (int i = 0; i < 100; i++) { int index = i % 4; expectedResults[index] += i; } semaphore.acquire(); for (int i = 0; i < 4; i++) { assertEquals(expectedResults[i], (int) listenerResults.get(String.valueOf(i))); } }
@Test(timeout = 30000) public void testAsyncMapperReducerCollator() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } final int[] result = new int[1]; final Semaphore semaphore = new Semaphore(1); semaphore.acquire(); JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Integer> future = job.mapper(new GroupingTestMapper()) .reducer(new TestReducerFactory()) .submit(new TestCollator()); future.andThen( new ExecutionCallback<Integer>() { @Override public void onResponse(Integer response) { try { result[0] = response.intValue(); } finally { semaphore.release(); } } @Override public void onFailure(Throwable t) { semaphore.release(); } }); // Precalculate result int expectedResult = 0; for (int i = 0; i < 100; i++) { expectedResult += i; } semaphore.acquire(); for (int i = 0; i < 4; i++) { assertEquals(expectedResult, result[0]); } }
@Test(timeout = 60000) public void testMapperReducerChunked() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); final HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); assertClusterSizeEventually(3, h1); final IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 10000; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); JobCompletableFuture<Map<String, Integer>> future = job.chunkSize(10) .mapper(new GroupingTestMapper()) .reducer(new TestReducerFactory()) .submit(); final TrackableJob trackableJob = tracker.getTrackableJob(future.getJobId()); final JobProcessInformation processInformation = trackableJob.getJobProcessInformation(); Map<String, Integer> result = future.get(); // Precalculate results int[] expectedResults = new int[4]; for (int i = 0; i < 10000; i++) { int index = i % 4; expectedResults[index] += i; } for (int i = 0; i < 4; i++) { assertEquals(expectedResults[i], (int) result.get(String.valueOf(i))); } assertTrueEventually( new AssertTask() { @Override public void run() { if (processInformation.getProcessedRecords() < 10000) { System.err.println(processInformation.getProcessedRecords()); } assertEquals(10000, processInformation.getProcessedRecords()); } }); }
@Test(timeout = 30000) public void testKeyedAsyncMapper() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } final Map<String, List<Integer>> listenerResults = new HashMap<String, List<Integer>>(); final Semaphore semaphore = new Semaphore(1); semaphore.acquire(); JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Map<String, List<Integer>>> future = job.onKeys(50).mapper(new TestMapper()).submit(); future.andThen( new ExecutionCallback<Map<String, List<Integer>>>() { @Override public void onResponse(Map<String, List<Integer>> response) { try { listenerResults.putAll(response); } finally { semaphore.release(); } } @Override public void onFailure(Throwable t) { semaphore.release(); } }); semaphore.acquire(); assertEquals(1, listenerResults.size()); for (List<Integer> value : listenerResults.values()) { assertEquals(1, value.size()); } }
@Test(timeout = 30000, expected = ExecutionException.class) public void testExceptionDistributionWithCollator() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); final HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); final HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); assertClusterSizeEventually(3, h1); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Map<String, List<Integer>>> future = job.mapper(new ExceptionThrowingMapper()) .submit( new Collator<Map.Entry<String, List<Integer>>, Map<String, List<Integer>>>() { @Override public Map<String, List<Integer>> collate( Iterable<Map.Entry<String, List<Integer>>> values) { return null; } }); try { Map<String, List<Integer>> result = future.get(); fail(); } catch (Exception e) { e.printStackTrace(); assertTrue(e.getCause() instanceof NullPointerException); throw e; } }
@Test(timeout = 30000) public void testMapperComplexMapping() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance h1 = nodeFactory.newHazelcastInstance(); HazelcastInstance h2 = nodeFactory.newHazelcastInstance(); HazelcastInstance h3 = nodeFactory.newHazelcastInstance(); IMap<Integer, Integer> m1 = h1.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { m1.put(i, i); } JobTracker tracker = h1.getJobTracker("default"); Job<Integer, Integer> job = tracker.newJob(KeyValueSource.fromMap(m1)); ICompletableFuture<Map<String, List<Integer>>> future = job.mapper(new GroupingTestMapper(2)).submit(); Map<String, List<Integer>> result = future.get(); assertEquals(1, result.size()); assertEquals(25, result.values().iterator().next().size()); }
public static <K1, V1> KeyValueSource<K1, V1> fromMap(IMap<K1, V1> map) { return KeyValueSource.fromMap(map); }
@Override public K key() { return keyValueSource.key(); }
@Override public Map.Entry<K, V> element() { return keyValueSource.element(); }
public static <V1> KeyValueSource<String, V1> fromList(IList<V1> list) { return KeyValueSource.fromList(list); }
@Override public boolean reset() { return keyValueSource.reset(); }
public static <V1> KeyValueSource<String, V1> fromSet(ISet<V1> set) { return KeyValueSource.fromSet(set); }
@Override public void close() throws IOException { keyValueSource.close(); }
@Override public boolean isAllKeysSupported() { return keyValueSource.isAllKeysSupported(); }
public static <K1, V1> KeyValueSource<K1, V1> fromMultiMap(MultiMap<K1, V1> multiMap) { return KeyValueSource.fromMultiMap(multiMap); }
@Override public Collection<K> getAllKeys0() { return keyValueSource.getAllKeys0(); }
@Override public boolean hasNext() { return keyValueSource.hasNext(); }