public static void main(String[] args) { ExecutorService service = Executors.newFixedThreadPool(10); String[] downloadLinksArray = { "http://releases.ubuntu.com/14.04.2/ubuntu-14.04.2-desktop-amd64.iso", "http://releases.ubuntu.com/14.10/ubuntu-14.10-desktop-amd64.iso", "http://releases.ubuntu.com/15.04/ubuntu-15.04-desktop-amd64.iso", "http://asdfadsfasdfasdfafdsfasdfa.com" }; for (int i = 0; i < downloadLinksArray.length; i++) { String url = downloadLinksArray[i]; Runnable worker = new MyRunnable(url, "download" + i + ".iso"); service.execute(worker); } service.shutdown(); // Wait until all threads are finish while (!service.isTerminated()) {} System.out.println("\nFinished all threads"); }
/* * function to create thread pool and assign task to each worker thread */ public void init() throws IOException { ExecutorService executor = Executors.newFixedThreadPool(noOfThreads); // creating a pool of threads ArrayList<String> inputFileNameList = new ArrayList<String>(); File directory = new File(zipFilename); // get all the files from a directory File[] fList = directory.listFiles(); for (File file : fList) { inputFileNameList.add(zipFilename + "/" + file.getName()); } // Assign the noOfThreads to the total number of files this.noOfThreads = fList.length; // System.out.println(inputFileNameList.get(0)); int fileNo = 0; for (int i = 0; i < noOfThreads; i++) { Runnable worker = new Worker(inputFileNameList.get(fileNo)); executor.execute(worker); // calling execute method of ExecutorService fileNo++; } executor.shutdown(); while (!executor.isTerminated()) {} System.out.println("Finished all threads"); }
/** start thread pool */ public static void startThreadPoolIfNecessary() { if (mExecutorService == null || mExecutorService.isShutdown() || mExecutorService.isTerminated()) { mExecutorService = Executors.newFixedThreadPool(3); } }
/** Execute the actual compilation for each of the given files. */ private void compile() { // let everyone know what we're doing task.log("Starting Compile "); // generate the command line Commandline command = generateCompileCommand(); // get all the files that we should compile // this will run checks for things like incremental compiling File objectDirectory = configuration.getObjectDirectory(); File[] filesToCompile = helper.getFilesThatNeedCompiling(objectDirectory); task.log("" + filesToCompile.length + " files to be compiled."); // Do the compile // We have to support parallel builds by ourselves, so we throw a bunch of compile tasks // into a queue and process with an executor and then wait for them all to finish ExecutorService executor = Executors.newFixedThreadPool(configuration.getThreadCount()); // create a runnable task for the compilation of each file and submit it for (File sourceFile : filesToCompile) executor.submit(new CompileTask(sourceFile, objectDirectory, command)); // run the executor over the queue executor.shutdown(); while (executor.isTerminated() == false) { try { executor.awaitTermination(500, TimeUnit.MILLISECONDS); } catch (InterruptedException ie) { /* just carry on */ } } task.log("Compile complete"); }
protected synchronized void initializeQueue(String host, String queueName, Integer port) throws InterruptedException { final String bind = "tcp://" + host + ":" + port; _log.warn("binding to " + bind); if (_context == null) { _context = ZMQ.context(1); } if (_socket != null) { _executorService.shutdownNow(); _heartbeatService.shutdownNow(); Thread.sleep(1 * 1000); _log.warn("_executorService.isTerminated=" + _executorService.isTerminated()); _socket.close(); _executorService = Executors.newFixedThreadPool(1); _heartbeatService = Executors.newFixedThreadPool(1); } _socket = _context.socket(ZMQ.PUB); _socket.connect(bind); _executorService.execute(new SendThread(_socket, queueName)); _heartbeatService.execute(new HeartbeatThread(HEARTBEAT_INTERVAL)); _log.debug("Inference output queue is sending to " + bind); _initialized = true; }
private static void checkInit() { if (EXECUTOR == null || EXECUTOR.isTerminated()) { EXECUTOR = Executors.newCachedThreadPool(); handlerMain = new Handler(Looper.getMainLooper()); Log.d(TAG, "check init cores:" + NUMBER_OF_CORES); } }
public String GetKnnAsString(String featureVector) { // System.err.println(featureVector); // System.err.println(trainingVectors.size()); FeatureVector fv = new FeatureVector(featureVector, 32); PriorityBlockingQueue<CategoryDistances> pbq = new PriorityBlockingQueue<>(); ExecutorService pool = Executors.newFixedThreadPool(NumWorkers); String outp = ""; for (FeatureVector elem : trainingVectors) { pool.execute(new EuclideanWorker(elem, fv, pbq)); } pool.shutdown(); while (!pool.isTerminated()) {; } for (int i = 0; i < K; i++) { CategoryDistances cd = pbq.poll(); if (cd == null) { break; } outp += cd.toString() + VectorDelim; } // System.out.println(outp); return outp.substring(0, outp.length() - 1); }
@Override public void cleanup() { for (Entry<HiveEndPoint, HiveWriter> entry : allWriters.entrySet()) { try { HiveWriter w = entry.getValue(); LOG.info("Flushing writer to {}", w); w.flush(false); LOG.info("Closing writer to {}", w); w.close(); } catch (Exception ex) { LOG.warn("Error while closing writer to " + entry.getKey() + ". Exception follows.", ex); if (ex instanceof InterruptedException) { Thread.currentThread().interrupt(); } } } ExecutorService toShutdown[] = {callTimeoutPool}; for (ExecutorService execService : toShutdown) { execService.shutdown(); try { while (!execService.isTerminated()) { execService.awaitTermination(options.getCallTimeOut(), TimeUnit.MILLISECONDS); } } catch (InterruptedException ex) { LOG.warn("shutdown interrupted on " + execService, ex); } } callTimeoutPool = null; super.cleanup(); LOG.info("Hive Bolt stopped"); }
public void downloadTeamInfo(League[] leagues) throws Exception { int numLoaders = Setup.numThreads; ExecutorService executor = Executors.newFixedThreadPool(numLoaders); for (final League l : leagues) { Runnable r = new Runnable() { @Override public void run() { String yearsToPass = l.startYear; if (!(l.country.toLowerCase().equals("norge") || l.country.toLowerCase().equals("sverige"))) { int tmp = Integer.parseInt(yearsToPass); yearsToPass = yearsToPass + "-" + (tmp + 1); } try { downloadTeamInfo(l.country, l.name, yearsToPass, l.division); } catch (Exception e) { Log.e(e); } } }; executor.execute(r); } executor.shutdown(); while (!executor.isTerminated()) { Thread.sleep(100); } }
/** * This is the only method exposed as the API call loads multiple configuration files from * specified URI an invokes a background {@link SearchAgent} callable task which in turn does the * action property collection from various streams. This is the method that is exposed as API * call. * * @params resourses String containg space separated uri where data will be extracted from. * @params attribute String containg space separated keywords to look for. * @return List class containing the loaded SearchObjectCache. */ public static List<SearchObjectCache> search(String resources, String attributes, int agent) throws InterruptedException { // "crawling data sources"; ExecutorService service = Executors.newFixedThreadPool(10); List<SearchObjectCache> records = new ArrayList<SearchObjectCache>(); java.util.List<String> sources = Arrays.asList(resources.split(" ")); List<Future<SearchObjectCache>> tasks = new ArrayList<Future<SearchObjectCache>>(); try { // for(String source:sources){ sources.stream().forEach(source -> add(service, tasks, agent, source, attributes)); for (Future<SearchObjectCache> task : tasks) { records.addAll((List<SearchObjectCache>) task.get()); } service.shutdown(); service.awaitTermination(5, TimeUnit.SECONDS); } catch (Exception ex) { logger.info("error loading resources "); ex.printStackTrace(); logger.log(Level.SEVERE, null, ex); } finally { if (!service.isTerminated()) { logger.info("Cancel non-finish tasks"); } service.shutdownNow(); } logger.info("Task is completed, let's check result"); logger.info("Document search completed\n"); return records; }
public static void main(String[] args) throws ExecutionException { Random random = new Random(); int[][] myArray = new int[4][100]; for (int i = 0; i < myArray.length; i++) { for (int j = 0; j < myArray[i].length; j++) { myArray[i][j] = random.nextInt(10); } } long before = System.currentTimeMillis(); ExecutorService executor = Executors.newFixedThreadPool(myArray.length); List<Future<Integer>> myResults = new ArrayList<>(); for (int i = 0; i < myArray.length; i++) { Callable<Integer> callable = new SearchCallable(myArray[i]); Future<Integer> callResult = executor.submit(callable); myResults.add(callResult); } executor.shutdown(); while (!executor.isTerminated()) { /*NOP*/ } long max = 0; for (Future<Integer> future : myResults) { try { max = Math.max(max, future.get()); } catch (InterruptedException e) { System.out.println(e.getMessage()); } } System.out.println("The result is " + max); long after = System.currentTimeMillis(); System.out.println(after - before); }
protected void runAllGoldReportsInParallel(int threads) throws Exception { initializeTestEnvironment(); final List<Throwable> errors = Collections.synchronizedList(new ArrayList<Throwable>()); final ExecutorService threadPool = new ThreadPoolExecutor( threads, threads, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>(), new TestThreadFactory(), new ThreadPoolExecutor.AbortPolicy()); threadPool.submit(new ExecuteReportRunner("reports", ReportProcessingMode.legacy, errors)); threadPool.submit(new ExecuteReportRunner("reports", ReportProcessingMode.current, errors)); threadPool.submit(new ExecuteReportRunner("reports", ReportProcessingMode.migration, errors)); threadPool.submit(new ExecuteReportRunner("reports-4.0", ReportProcessingMode.current, errors)); threadPool.submit( new ExecuteReportRunner("reports-4.0", ReportProcessingMode.migration, errors)); threadPool.shutdown(); while (threadPool.isTerminated() == false) { threadPool.awaitTermination(5, TimeUnit.MINUTES); } if (errors.isEmpty() == false) { for (int i = 0; i < errors.size(); i++) { final Throwable throwable = errors.get(i); throwable.printStackTrace(); LogFactory.getLog(GoldTestBase.class).error("Failed", throwable); } Assert.fail(); } }
@Override public void parse(String dir) { File rootDir = new File(dir); if (rootDir.isDirectory()) { this.zipfiles.addAll( Arrays.asList( rootDir.listFiles( new FilenameFilter() { @Override public boolean accept(File dir, String name) { if (name.endsWith(".zip")) { return true; } return false; } }))); } else { Logger.getLogger(ReutersParser.class.getName()) .log(Level.SEVERE, "Error: Pass in the root directoy of the RCV-1 corpus"); } Logger.getLogger(ReutersParser.class.getName()) .log(Level.INFO, "Parsed in {0} zip files", zipfiles.size()); while (!zipfiles.isEmpty()) { pool.execute(new ReutersRCV1(zipfiles.poll(), controller)); } pool.shutdown(); try { while (!pool.isTerminated()) { pool.awaitTermination(5, TimeUnit.SECONDS); } } catch (InterruptedException ex) { Logger.getLogger(ReutersParser.class.getName()).log(Level.SEVERE, null, ex); } }
@Test(groups = {"ueber", "performance"}) public void threadSafeTest() throws Exception { final MessageSenderManager msm = new MessageSenderManager(); msm.setSenderPoolSize(SENDER_COUNT); msm.setSenderClass(SENDER_CLASS_NAME); msm.init(); final Map<Runnable, Throwable> threads = new HashMap<Runnable, Throwable>(); final ExecutorService es = Executors.newFixedThreadPool(THREAD_COUNT); for (int i = 0; i < SENDER_TASK_COUNT; i++) { final Runnable senderTask = new SenderTask(msm, threads); threads.put(senderTask, null); es.submit(senderTask); } es.shutdown(); es.awaitTermination( (long) (SENDER_TASK_COUNT * THREAD_SLEEP_MILLIS * 1.2), TimeUnit.MILLISECONDS); assertTrue(es.isTerminated()); assertTrue(es.isShutdown()); final Iterator<Runnable> it = threads.keySet().iterator(); while (it.hasNext()) { final Throwable t = threads.get(it.next()); if (t != null) { fail("One of the threads threw following exception: " + t.getMessage()); } } msm.close(); }
@Override public void run(final Network network) { super.run(network); log.info("Putting landmarks on network..."); long now = System.currentTimeMillis(); landmarks = landmarker.identifyLandmarks(landmarkCount, network); log.info("done in " + (System.currentTimeMillis() - now) + " ms"); log.info("Initializing landmarks data"); for (Node node : network.getNodes().values()) { this.nodeData.put(node, new LandmarksData(this.landmarkCount)); } int nOfThreads = this.numberOfThreads; if (nOfThreads > this.landmarks.length) { nOfThreads = this.landmarks.length; } if (nOfThreads < 2) { nOfThreads = 2; // always use at least two threads } log.info( "Calculating distance from each node to each of the " + this.landmarkCount + " landmarks using " + nOfThreads + " threads..."); now = System.currentTimeMillis(); ExecutorService executor = Executors.newFixedThreadPool(nOfThreads); for (int i = 0; i < this.landmarks.length; i++) { executor.execute(new Calculator(i, this.landmarks[i], this.nodeData, this.costFunction)); } executor.shutdown(); while (!executor.isTerminated()) { log.info("wait for landmarks Calculator to finish..."); try { executor.awaitTermination(10, TimeUnit.MINUTES); } catch (InterruptedException e) { throw new RuntimeException(e); } } for (Node node : network.getNodes().values()) { LandmarksData r = getNodeData(node); r.updateMinMaxTravelTimes(); } for (Node node : network.getNodes().values()) { LandmarksData r = getNodeData(node); for (int i = 0; i < this.landmarks.length; i++) { if (r.getMinLandmarkTravelTime(i) > r.getMaxLandmarkTravelTime(i)) { log.info("Min > max for node " + node.getId() + " and landmark " + i); } } } log.info("done in " + (System.currentTimeMillis() - now) + " ms"); }
/** * Calculates the Accessible Surface Areas for the atoms given in constructor and with parameters * given. Beware that the parallel implementation is quite memory hungry. It scales well as long * as there is enough memory available. * * @return an array with asa values corresponding to each atom of the input array */ public double[] calculateAsas() { double[] asas = new double[atoms.length]; if (nThreads <= 1) { // (i.e. it will also be 1 thread if 0 or negative number specified) for (int i = 0; i < atoms.length; i++) { asas[i] = calcSingleAsa(i); } } else { // NOTE the multithreaded calculation does not scale up well in some systems, // why? I guess some memory/garbage collect problem? I tried increasing Xmx in pc8201 but // didn't help // Following scaling tests are for 3hbx, calculating ASA of full asym unit (6 chains): // SCALING test done in merlinl01 (12 cores, Xeon X5670 @ 2.93GHz, 24GB RAM) // 1 threads, time: 8.8s -- x1.0 // 2 threads, time: 4.4s -- x2.0 // 3 threads, time: 2.9s -- x3.0 // 4 threads, time: 2.2s -- x3.9 // 5 threads, time: 1.8s -- x4.9 // 6 threads, time: 1.6s -- x5.5 // 7 threads, time: 1.4s -- x6.5 // 8 threads, time: 1.3s -- x6.9 // SCALING test done in pc8201 (4 cores, Core2 Quad Q9550 @ 2.83GHz, 8GB RAM) // 1 threads, time: 17.2s -- x1.0 // 2 threads, time: 9.7s -- x1.8 // 3 threads, time: 7.7s -- x2.2 // 4 threads, time: 7.9s -- x2.2 // SCALING test done in eppic01 (16 cores, Xeon E5-2650 0 @ 2.00GHz, 128GB RAM) // 1 threads, time: 10.7s -- x1.0 // 2 threads, time: 5.6s -- x1.9 // 3 threads, time: 3.6s -- x3.0 // 4 threads, time: 2.8s -- x3.9 // 5 threads, time: 2.3s -- x4.8 // 6 threads, time: 1.8s -- x6.0 // 7 threads, time: 1.6s -- x6.8 // 8 threads, time: 1.3s -- x8.0 // 9 threads, time: 1.3s -- x8.5 // 10 threads, time: 1.1s -- x10.0 // 11 threads, time: 1.0s -- x10.9 // 12 threads, time: 0.9s -- x11.4 ExecutorService threadPool = Executors.newFixedThreadPool(nThreads); for (int i = 0; i < atoms.length; i++) { threadPool.submit(new AsaCalcWorker(i, asas)); } threadPool.shutdown(); while (!threadPool.isTerminated()) ; } return asas; }
/** 开启线程池 */ public static void startThreadPoolIfNecessary() { if (sExecutorService == null || sExecutorService.isShutdown() || sExecutorService.isTerminated()) { sExecutorService = Executors.newFixedThreadPool(3); // sExecutorService = Executors.newSingleThreadExecutor(); } }
public void train() { int n = txt_rdr.getSize(); assert (n == ans.size() && n != 0); ExecutorService fre_executor = Executors.newFixedThreadPool(NTHREADS); ExecutorService dict_executor = Executors.newFixedThreadPool(NTHREADS); for (int i = 0; i < n; i++) { Runnable task = new FreRunnable(i); fre_executor.execute(task); } fre_executor.shutdown(); while (!fre_executor.isTerminated()) {} for (String s : f_rec.getRecordedStrings()) { Runnable task = new DictRunnable(s); dict_executor.execute(task); } dict_executor.shutdown(); while (!dict_executor.isTerminated()) {} }
/** Test of shutdown method, of class GCDExecutorService. */ @Test(expected = RejectedExecutionException.class) public void testShutdown() { fixture.shutdown(); assertTrue(fixture.isShutdown()); assertTrue(fixture.isTerminated()); fixture.execute( new Runnable() { public void run() {} }); }
public static void main(String[] args) { int numThreads = 5; ExecutorService executor = Executors.newFixedThreadPool(numThreads); Test t = new Test(); for (int i = 0; i < numThreads; i++) { executor.execute(t); } executor.shutdown(); while (!executor.isTerminated()) {} }
private void processConceptNetData() throws URISyntaxException { // File folder = new File("/research/conceptnet5/parsed"); File folder = new File("C:/Users/Sam Sarjant/Documents/workspace/ConceptNet/data/assertions/parsed"); try { PrintWriter log = new PrintWriter(new BufferedWriter(new FileWriter("importerLog.txt", true))); String line; // Pre process IsA File isaFile = new File(folder, "IsA.txt"); try { BufferedReader br = new BufferedReader(new FileReader(isaFile)); System.out.println("processing:" + isaFile.getName()); while ((line = br.readLine()) != null) { String[] split = line.split("\t"); String relationName = split[0]; String nodename1 = split[1]; String nodename2 = split[2]; ArrayList<DAGNode> n1 = resolveAmbiguity(nodename1); ArrayList<DAGNode> n2 = resolveAmbiguity(nodename2); if (n1 == null && n2 != null) { // If this node is not resolvable, add // isa edge as backup if (!IsAEdges_.containsKey(nodename1)) IsAEdges_.put(nodename1, new ArrayList<DAGNode>()); IsAEdges_.get(nodename1).addAll(n2); } else if (n1 != null && n2 != null) { _resolvedCount++; } } br.close(); } catch (Exception e) { e.printStackTrace(); } // Read the rest ExecutorService executor = Executors.newFixedThreadPool(6); File[] files = folder.listFiles(); for (File file : files) { if (!file.isFile() || file.getName().endsWith("IsA.txt")) continue; Runnable worker = new ProcessCN5Thread(file); executor.execute(worker); } executor.shutdown(); while (!executor.isTerminated()) {} log.close(); } catch (IOException e1) { e1.printStackTrace(); } }
public void run() { try { ExecutorService service = Executors.newSingleThreadExecutor(); Future<MyLocationBundle> futureBundle = service.submit(c); service.shutdownNow(); service.isTerminated(); Logger.e(futureBundle.get().toString()); } catch (Exception e) { } }
public static void main(String args[]) { ExecutorService executor = Executors.newCachedThreadPool(); for (int i = 0; i < 100; i++) { executor.execute(new AddPennyTask()); } executor.shutdown(); while (!executor.isTerminated()) {} System.out.println("What it balance? " + account.getBalance()); }
/** * Blocks the execution of the current thread until all work thread ends their execution * * @param executorService the ExecutorService instance * @throws Exception to JUnit */ private static void awaitTermination(ExecutorService executorService) throws Throwable { executorService.shutdown(); while (!executorService.isTerminated()) { executorService.awaitTermination(30, TimeUnit.SECONDS); } if (error != null) { throw error; } }
@PreDestroy public void shutdown() { running.setValue(false); executor.shutdownNow(); while (!executor.isTerminated()) { try { Thread.sleep(100); } catch (InterruptedException e) { log.error("interrupted while waiting for threads to finish."); } } }
public static void main(String[] args) { ExecutorService executor = Executors.newFixedThreadPool(5); for (int i = 0; i < 10; i++) { Runnable worker = new WorkerThread("" + i); executor.execute(worker); } executor.shutdown(); while (!executor.isTerminated()) {} System.out.println("Finished all threads"); }
public static void main(String[] args) { int threadCount = 128; ExecutorService executor = Executors.newFixedThreadPool(threadCount); try { URL url; InputStream is = null; BufferedReader br; BufferedReader br2; url = new URL( "http://s3.amazonaws.com/aws-publicdatasets/trec/kba/kba-streamcorpus-2013-v0_2_0-english-and-unknown-language/index.html"); is = url.openStream(); // throws an IOException br = new BufferedReader(new InputStreamReader(is)); String s = ""; int i = 0; boolean finished = false; while (!finished) { // Thread t1 = myThread(br.readLine()); Thread t = null; Pattern p = Pattern.compile("a href=\"([^\"]+)\""); final Matcher m1 = p.matcher(br.readLine()); if (m1.find()) { String linkStr = m1.group(1); System.out.println(linkStr); final String dir = linkStr.substring(0, linkStr.indexOf('/')); System.out.println(dir); Runnable worker = new Thread(i++ + " " + linkStr) { public void run() { downloadDir(dir); }; }; executor.execute(worker); } else if (i > 1) { finished = true; } } executor.shutdown(); while (!executor.isTerminated()) {} System.out.println("Finished all threads"); } catch (Exception ioe) { ioe.printStackTrace(); } }
/** Test of awaitTermination method, of class GCDExecutorService. */ @Test public void testAwaitTermination() throws Exception { int count = 100; Object lock = new Object(); queueUpSomeTasks(lock, count); List<Runnable> outstandingTasks = fixture.shutdownNow(); assertEquals(count, outstandingTasks.size()); assertTrue(fixture.isShutdown()); synchronized (lock) { lock.notifyAll(); } assertTrue(fixture.awaitTermination(5, TimeUnit.SECONDS)); assertTrue(fixture.isTerminated()); }
public static void main(String[] args) { ExecutorService executors = Executors.newFixedThreadPool(100); for (int i = 0; i < 5000; i++) { Runnable task = new Task(); executors.submit(task); } executors.shutdown(); while (executors.isTerminated()) { System.out.println("ALl work is done..."); } }
/** Shutting down the cluster should act as the ExecutorService shutdown */ @Test(expected = RejectedExecutionException.class) public void testClusterShutdown() throws Exception { ExecutorService executor = createSingleNodeExecutorService("testClusterShutdown"); shutdownNodeFactory(); Thread.sleep(2000); assertNotNull(executor); assertTrue(executor.isShutdown()); assertTrue(executor.isTerminated()); // New tasks must be rejected Callable<String> task = new BasicTestTask(); executor.submit(task); }