public NonBlockingIdentityHashMap<Long, TestKey> getMapMultithreaded() throws InterruptedException, ExecutionException { final int threadCount = _items.keySet().size(); final NonBlockingIdentityHashMap<Long, TestKey> map = new NonBlockingIdentityHashMap<Long, TestKey>(); // use a barrier to open the gate for all threads at once to avoid rolling start and no actual // concurrency final CyclicBarrier barrier = new CyclicBarrier(threadCount); final ExecutorService ex = Executors.newFixedThreadPool(threadCount); final CompletionService<Integer> co = new ExecutorCompletionService<Integer>(ex); for (Integer type : _items.keySet()) { // A linked-list of things to insert List<TestKey> items = _items.get(type); TestKeyFeederThread feeder = new TestKeyFeederThread(type, items, map, barrier); co.submit(feeder); } // wait for all threads to return int itemCount = 0; for (int retCount = 0; retCount < threadCount; retCount++) { final Future<Integer> result = co.take(); itemCount += result.get(); } ex.shutdown(); return map; }
public static void shutdown() { logger_.info("Shutting down ..."); synchronized (MessagingService.class) { /* Stop listening on any socket */ for (SelectionKey skey : listenSockets_.values()) { SelectorManager.getSelectorManager().cancel(skey); } listenSockets_.clear(); /* Shutdown the threads in the EventQueue's */ messageDeserializationExecutor_.shutdownNow(); messageSerializerExecutor_.shutdownNow(); messageDeserializerExecutor_.shutdownNow(); streamExecutor_.shutdownNow(); /* shut down the cachetables */ taskCompletionMap_.shutdown(); callbackMap_.shutdown(); /* Interrupt the selector manager thread */ SelectorManager.getSelectorManager().interrupt(); poolTable_.clear(); verbHandlers_.clear(); bShutdown_ = true; } logger_.debug("Shutdown invocation complete."); }
@Override public Boolean call() throws Exception { long timeoutMillis = 5000; try { getServersFile(); getZkRunning(); while (true) { while (!restartQueue.isEmpty()) { LOG.debug("Restart queue size [" + restartQueue.size() + "]"); RestartHandler handler = restartQueue.poll(); Future<ScriptContext> runner = pool.submit(handler); ScriptContext scriptContext = runner.get(); // blocking call if (scriptContext.getExitCode() != 0) restartQueue.add(handler); } try { Thread.sleep(timeoutMillis); } catch (InterruptedException e) { } } } catch (Exception e) { e.printStackTrace(); LOG.error(e); pool.shutdown(); throw e; } }
/** Run a basic task */ @Test public void testBasicTask() throws Exception { Callable<String> task = new BasicTestTask(); ExecutorService executor = createSingleNodeExecutorService("testBasicTask"); Future future = executor.submit(task); assertEquals(future.get(), BasicTestTask.RESULT); }
// Concurrent insertion & then iterator test. public static void testNonBlockingIdentityHashMapIterator() throws InterruptedException { final int ITEM_COUNT1 = 1000; final int THREAD_COUNT = 5; final int PER_CNT = ITEM_COUNT1 / THREAD_COUNT; final int ITEM_COUNT = PER_CNT * THREAD_COUNT; // fix roundoff for odd thread counts NonBlockingIdentityHashMap<Long, TestKey> nbhml = new NonBlockingIdentityHashMap<Long, TestKey>(); // use a barrier to open the gate for all threads at once to avoid rolling // start and no actual concurrency final CyclicBarrier barrier = new CyclicBarrier(THREAD_COUNT); final ExecutorService ex = Executors.newFixedThreadPool(THREAD_COUNT); final CompletionService<Object> co = new ExecutorCompletionService<Object>(ex); for (int i = 0; i < THREAD_COUNT; i++) { co.submit(new NBHMLFeeder(nbhml, PER_CNT, barrier, i * PER_CNT)); } for (int retCount = 0; retCount < THREAD_COUNT; retCount++) { co.take(); } ex.shutdown(); assertEquals("values().size()", ITEM_COUNT, nbhml.values().size()); assertEquals("entrySet().size()", ITEM_COUNT, nbhml.entrySet().size()); int itemCount = 0; for (TestKey K : nbhml.values()) itemCount++; assertEquals("values().iterator() count", ITEM_COUNT, itemCount); }
public static void main(String[] args) throws Exception { ExecutorService exec = Executors.newCachedThreadPool(); for (int i = 0; i < 5; i++) exec.execute(new Task()); exec.execute(new Task2()); Timer timer = new Timer(); timer.scheduleAtFixedRate( new TimerTask() { boolean prod = true; public void run() { if (prod) { System.out.print("\nnotify() "); Task.blocker.prod(); prod = false; } else { System.out.print("\nnotifyAll() "); Task.blocker.prodAll(); prod = true; } } }, 400, 400); // Run every .4 second TimeUnit.SECONDS.sleep(5); // Run for a while... timer.cancel(); System.out.println("\nTimer canceled"); TimeUnit.MILLISECONDS.sleep(500); System.out.print("Task2.blocker.prodAll() "); Task2.blocker.prodAll(); TimeUnit.MILLISECONDS.sleep(500); System.out.println("\nShutting down"); exec.shutdownNow(); // Interrupt all tasks }
/** Execute a task that is executing something else inside. Nested Execution. */ @Test(timeout = 10000) public void testNestedExecution() throws Exception { Callable<String> task = new NestedExecutorTask(); ExecutorService executor = createSingleNodeExecutorService("testNestedExecution"); Future future = executor.submit(task); future.get(); }
public static <T> List<T> executeAndGetResult( Collection<? extends Callable<T>> tasks, long waitingTimeInMillis) { ExecutorService executor = Executors.newFixedThreadPool(tasks.size()); List<T> finalResults = new ArrayList<T>(tasks.size()); List<Future<T>> temporaryResults = new ArrayList<Future<T>>(tasks.size()); try { for (Callable<T> task : tasks) { Future<T> future = executor.submit(task); temporaryResults.add(future); } executor.awaitTermination(waitingTimeInMillis, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { throw new RuntimeException(e); } finally { executor.shutdown(); } finalResults = new ArrayList<T>(temporaryResults.size()); for (Future<T> result : temporaryResults) { try { finalResults.add(result.get()); } catch (InterruptedException e) { throw new RuntimeException(e); } catch (ExecutionException e) { throw new RuntimeException(e); } } return finalResults; }
@Test public void testInvokeAllTimeoutCancelled() throws Exception { ExecutorService executor = createSingleNodeExecutorService("testInvokeAll"); assertFalse(executor.isShutdown()); // Only one task ArrayList<Callable<Boolean>> tasks = new ArrayList<Callable<Boolean>>(); tasks.add(new CancellationAwareTask(0)); List<Future<Boolean>> futures = executor.invokeAll(tasks, 5, TimeUnit.SECONDS); assertEquals(futures.size(), 1); assertEquals(futures.get(0).get(), Boolean.TRUE); // More tasks tasks.clear(); for (int i = 0; i < COUNT; i++) { tasks.add(new CancellationAwareTask(i < 2 ? 0 : 20000)); } futures = executor.invokeAll(tasks, 5, TimeUnit.SECONDS); assertEquals(futures.size(), COUNT); for (int i = 0; i < COUNT; i++) { if (i < 2) { assertEquals(futures.get(i).get(), Boolean.TRUE); } else { boolean excepted = false; try { futures.get(i).get(); } catch (CancellationException e) { excepted = true; } assertTrue(excepted); } } }
public static void main(String[] args) { ExecutorService pool = Executors.newFixedThreadPool(2); final Vector[] vecs = { new Vector(), new Vector(), }; vecs[0].add(vecs[1]); vecs[1].add(vecs[0]); for (int i = 0; i < 2; i++) { final int threadNumber = i; pool.submit( new Callable() { public Object call() throws Exception { for (int i = 0; i < 1000 * 1000; i++) { ObjectOutputStream out = new ObjectOutputStream(new NullOutputStream()); out.writeObject(vecs[threadNumber]); out.close(); } System.out.println("done"); return null; } }); } }
private int[] computeClusterNumberRange(int min, int max, int hop, Optimizer optimizer) throws ExecutionException, InterruptedException { ExecutorService executor; if (parallelWorkers > 1) { executor = Executors.newFixedThreadPool(2); } else { executor = Executors.newFixedThreadPool(1); } ConcurrentMap<Integer, Double> sharedScores = new ConcurrentHashMap<>(); SearchSpaceBoundaryFinder_old upperBoundFinder = new SearchSpaceUpperBoundaryFinder_old(min, max, hop, optimizer, sharedScores); Future<Integer> futureUpperBound = executor.submit(upperBoundFinder); SearchSpaceBoundaryFinder_old lowerBoundFinder = new SearchSpaceLowerBoundaryFinder_old(min, max, hop, optimizer, sharedScores); Future<Integer> futureLowerBound = executor.submit(lowerBoundFinder); executor.shutdown(); int[] r = new int[2]; Integer realMin = futureLowerBound.get(); Integer realMax = futureUpperBound.get(); r[0] = realMin == null ? -1 : realMin; r[1] = realMax == null ? -1 : realMax; scores.putAll(lowerBoundFinder.getSplitsAndScores()); // scores.putAll(upperBoundFinder.getSplitsAndScores()); return r; }
public <T> Collection<T> getParallelResults(List<Node<T>> nodes) throws InterruptedException { ExecutorService exec = Executors.newCachedThreadPool(); Queue<T> resultQueue = new ConcurrentLinkedQueue<T>(); parallelRecursive(exec, nodes, resultQueue); exec.shutdown(); exec.awaitTermination(Long.MAX_VALUE, TimeUnit.SECONDS); return resultQueue; }
public static void main(String[] args) throws Exception { ExecutorService exec = Executors.newCachedThreadPool(); for (int i = 0; i < N_ELEMENTS; i++) for (int j = 0; j < N_GENES; j++) GRID[i][j] = new AtomicInteger(rand.nextInt(1000)); for (int i = 0; i < N_EVOLVERS; i++) exec.execute(new Evolver()); TimeUnit.SECONDS.sleep(5); exec.shutdownNow(); }
@Test public void testPostregisteredExecutionCallbackCompletableFuture() throws Exception { HazelcastInstanceProxy proxy = (HazelcastInstanceProxy) createHazelcastInstance(); Field originalField = HazelcastInstanceProxy.class.getDeclaredField("original"); originalField.setAccessible(true); HazelcastInstanceImpl hz = (HazelcastInstanceImpl) originalField.get(proxy); NodeEngine nodeEngine = hz.node.nodeEngine; ExecutionService es = nodeEngine.getExecutionService(); final CountDownLatch latch1 = new CountDownLatch(1); final CountDownLatch latch2 = new CountDownLatch(1); final ExecutorService executorService = Executors.newSingleThreadExecutor(); try { Future future = executorService.submit( new Callable<String>() { @Override public String call() { try { return "success"; } finally { latch1.countDown(); } } }); final ICompletableFuture completableFuture = es.asCompletableFuture(future); latch1.await(30, TimeUnit.SECONDS); final AtomicReference reference = new AtomicReference(); completableFuture.andThen( new ExecutionCallback() { @Override public void onResponse(Object response) { reference.set(response); latch2.countDown(); } @Override public void onFailure(Throwable t) { reference.set(t); latch2.countDown(); } }); latch2.await(30, TimeUnit.SECONDS); if (reference.get() instanceof Throwable) { ((Throwable) reference.get()).printStackTrace(); } assertEquals("success", reference.get()); } finally { executorService.shutdown(); } }
public static void main(String[] args) { Random rand = new Random(47); ExecutorService exec = Executors.newCachedThreadPool(); DelayQueue<DelayedTask> queue = new DelayQueue<DelayedTask>(); // Fill with tasks that have random delays: for (int i = 0; i < 20; i++) queue.put(new DelayedTask(rand.nextInt(5000))); // Set the stopping point queue.add(new DelayedTask.EndSentinel(5000, exec)); exec.execute(new DelayedTaskConsumer(queue)); }
private void executeConcurrentRandomReadAndWriteOperations() throws InterruptedException, ExecutionException { for (int i = 0; i < THREAD_COUNT; i++) { futures.add(executorService.submit(new Writer())); } for (int i = 0; i < THREAD_COUNT; i++) { futures.add(executorService.submit(new Reader())); } for (Future<Void> future : futures) future.get(); }
public static void main(String[] args) throws Exception { if (args.length > 0) size = new Integer(args[0]); if (args.length > 1) delay = new Integer(args[1]); ExecutorService exec = Executors.newCachedThreadPool(); Exchanger<List<Fat>> xc = new Exchanger<List<Fat>>(); List<Fat> producerList = new CopyOnWriteArrayList<Fat>(), consumerList = new CopyOnWriteArrayList<Fat>(); exec.execute(new ExchangerProducer<Fat>(xc, BasicGenerator.create(Fat.class), producerList)); exec.execute(new ExchangerConsumer<Fat>(xc, consumerList)); TimeUnit.SECONDS.sleep(delay); exec.shutdownNow(); }
public static void main(String[] args) throws Exception { ExecutorService exec = Executors.newCachedThreadPool(); Restaurant restaurant = new Restaurant(exec, 5, 2); exec.execute(restaurant); if (args.length > 0) // Optional argument TimeUnit.SECONDS.sleep(new Integer(args[0])); else { print("Press ‘Enter’ to quit"); System.in.read(); } exec.shutdownNow(); }
private synchronized void shutdown() { if (!exe.isShutdown()) { freeCUObjectsMemory(); } exe.shutdown(); try { System.err.println( "cuda device " + Id + " freed ? " + exe.awaitTermination(10, TimeUnit.SECONDS)); } catch (InterruptedException e) { e.printStackTrace(); } }
public Restaurant(ExecutorService e, int nWaitPersons, int nChefs) { exec = e; for (int i = 0; i < nWaitPersons; i++) { WaitPerson waitPerson = new WaitPerson(this); waitPersons.add(waitPerson); exec.execute(waitPerson); } for (int i = 0; i < nChefs; i++) { Chef chef = new Chef(this); chefs.add(chef); exec.execute(chef); } }
/** Test for the issue 129. Repeatedly runs tasks and check for isDone() status after get(). */ @Test public void testIsDoneMethod2() throws Exception { ExecutorService executor = createSingleNodeExecutorService("isDoneMethod2"); for (int i = 0; i < COUNT; i++) { Callable<String> task1 = new BasicTestTask(); Callable<String> task2 = new BasicTestTask(); Future future1 = executor.submit(task1); Future future2 = executor.submit(task2); assertEquals(future2.get(), BasicTestTask.RESULT); assertTrue(future2.isDone()); assertEquals(future1.get(), BasicTestTask.RESULT); assertTrue(future1.isDone()); } }
public static boolean init() { synchronized (cudaEngines) { System.err.println("---------Initializing Cuda----------------"); try { extractAndLoadNativeLibs(); JCudaDriver.setExceptionsEnabled(true); JCudaDriver.cuInit(0); compileKernelsPtx(); // Obtain the number of devices int deviceCountArray[] = {0}; JCudaDriver.cuDeviceGetCount(deviceCountArray); availableDevicesNb = deviceCountArray[0]; if (availableDevicesNb == 0) return false; availableDevicesNb = NB_OF_DEVICE_TO_USE; // TODO initialization = Executors.newCachedThreadPool(); System.out.println("Found " + availableDevicesNb + " GPU devices"); for (int i = 0 /*-NB_OF_DEVICE_TO_USE*/; i < availableDevicesNb; i++) { final int index = i; Future<?> initJob = initialization.submit( new Runnable() { public void run() { System.err.println("Initializing device n°" + index); cudaEngines.put(index, new CudaEngine(index)); } }); initJob.get(); initialization.shutdown(); } } catch (InterruptedException | ExecutionException | IOException | CudaException | UnsatisfiedLinkError e) { e.printStackTrace(); System.err.println("---------Cannot initialize Cuda !!! ----------------"); return false; } Runtime.getRuntime() .addShutdownHook( new Thread() { @Override public void run() { CudaEngine.stop(); } }); System.out.println("---------Cuda Initialized----------------"); return true; } }
/** Shutting down the cluster should act as the ExecutorService shutdown */ @Test(expected = RejectedExecutionException.class) public void testClusterShutdown() throws Exception { ExecutorService executor = createSingleNodeExecutorService("testClusterShutdown"); shutdownNodeFactory(); Thread.sleep(2000); assertNotNull(executor); assertTrue(executor.isShutdown()); assertTrue(executor.isTerminated()); // New tasks must be rejected Callable<String> task = new BasicTestTask(); executor.submit(task); }
@Override public Board solve(Board in) { if (useBacktracking) { return solve_mdfs(in); } else { if (nThreads == 1) { return solve_ldfs(in); } else { ExecutorService pool = Executors.newFixedThreadPool(nThreads); Board ret = solve_pndfs(in, pool); pool.shutdown(); return ret; } } }
public static void main(String[] args) throws Exception { ExecutorService exec = Executors.newCachedThreadPool(); // If line is too long, customers will leave: CustomerLine customers = new CustomerLine(MAX_LINE_SIZE); exec.execute(new CustomerGenerator(customers)); // Manager will add and remove tellers as necessary: exec.execute(new TellerManager(exec, customers, ADJUSTMENT_PERIOD)); if (args.length > 0) // Optional argument TimeUnit.SECONDS.sleep(new Integer(args[0])); else { System.out.println("Press ‘Enter’ to quit"); System.in.read(); } exec.shutdownNow(); }
public static void execute(Collection<? extends Runnable> tasks, long waitingTimeInMillis) { ExecutorService executor = Executors.newFixedThreadPool(tasks.size()); for (Runnable task : tasks) { executor.execute(task); } try { executor.awaitTermination(waitingTimeInMillis, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { throw new RuntimeException(e); } finally { executor.shutdown(); } }
/** {@inheritDoc} */ @Override public void loadCache(GridBiInClosure<K, V> c, @Nullable Object... args) throws GridException { ExecutorService exec = new ThreadPoolExecutor( threadsCnt, threadsCnt, 0L, MILLISECONDS, new ArrayBlockingQueue<Runnable>(batchQueueSize), new BlockingRejectedExecutionHandler()); Iterator<I> iter = inputIterator(args); Collection<I> buf = new ArrayList<>(batchSize); try { while (iter.hasNext()) { if (Thread.currentThread().isInterrupted()) { U.warn(log, "Working thread was interrupted while loading data."); break; } buf.add(iter.next()); if (buf.size() == batchSize) { exec.submit(new Worker(c, buf, args)); buf = new ArrayList<>(batchSize); } } if (!buf.isEmpty()) exec.submit(new Worker(c, buf, args)); } catch (RejectedExecutionException ignored) { // Because of custom RejectedExecutionHandler. assert false : "RejectedExecutionException was thrown while it shouldn't."; } finally { exec.shutdown(); try { exec.awaitTermination(Long.MAX_VALUE, MILLISECONDS); } catch (InterruptedException ignored) { U.warn(log, "Working thread was interrupted while waiting for put operations to complete."); Thread.currentThread().interrupt(); } } }
/** * Somewhat multi-threaded depth-first search. Performs a DFS of the subtrees from the current * node in parallel. * * @param s The tile sequence * @param b The board to search * @param pool The thread pool in which to submit jobs to. * @return The board with the highest evaluation or null if no board can continue. */ private Board solve_pdfs(Board b, ExecutorService pool) { List<Future<Board>> rets = new ArrayList<>(BOARD_WIDTH); Board best = null; int best_score = -1; for (Direction d : directions) { Board n = new Board(b); if (n.move(tileSequence, d)) { rets.add(pool.submit(new ParallelDFS(n))); } } for (Future<Board> ret : rets) { try { Board c = ret.get(); if (c != null) { int score = evaluate(c); if (score > best_score) { best = c; best_score = score; } } } catch (InterruptedException | ExecutionException e) { System.err.println("Error: " + e.getMessage()); } } return best; }
/** {@inheritDoc} */ @Override protected void maybeStartStream() throws IOException { // connector final StreamConnector connector = getStreamConnector(); if (connector == null) return; synchronized (this) { if (started) return; threadPool.execute( new Runnable() { @Override public void run() { try { Sctp.init(); runOnDtlsTransport(connector); } catch (IOException e) { logger.error(e, e); } finally { try { Sctp.finish(); } catch (IOException e) { logger.error("Failed to shutdown SCTP stack", e); } } } }); started = true; } }
/** * Executes the specified runnable on the worker thread of the view. Execution is performed * sequentially in the same sequence as the runnables have been passed to this method. */ @Override public void execute(Runnable worker) { if (executor == null) { executor = Executors.newSingleThreadExecutor(); } executor.execute(worker); }