public ViewStatusResultAggregator run() { ViewStatusResultAggregator aggregator = new ViewStatusResultAggregatorImpl(); CompletionService<PerViewStatusResult> completionService = new ExecutorCompletionService<PerViewStatusResult>(_executor); // submit task to executor to run partitioned by security type for (String securityType : _valueRequirementBySecType.keySet()) { Collection<String> valueRequirements = _valueRequirementBySecType.get(securityType); completionService.submit( new ViewStatusCalculationTask( _toolContext, _portfolioId, _user, securityType, valueRequirements, _marketDataSpecification)); } try { // process all completed task for (int i = 0; i < _valueRequirementBySecType.size(); i++) { Future<PerViewStatusResult> futureTask = completionService.take(); PerViewStatusResult perViewStatusResult = futureTask.get(); for (ViewStatusKey viewStatusKey : perViewStatusResult.keySet()) { aggregator.putStatus(viewStatusKey, perViewStatusResult.get(viewStatusKey)); } } } catch (InterruptedException ex) { Thread.currentThread().interrupt(); } catch (ExecutionException ex) { throw new OpenGammaRuntimeException("Error running View status report", ex.getCause()); } return aggregator; }
void waitForCompletion() { try { while (count-- > 0) completionService.take(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } }
@Test public void testMultipleConcurrentEPs3() { final KieSession ksession = getKieSessionWith3Segments(); List<String> results = new ArrayList<String>(); ksession.setGlobal("results", results); EPManipulator3[] epManipulators = new EPManipulator3[9]; for (int i = 0; i < 9; i++) { epManipulators[i] = new EPManipulator3(ksession, i + 1); } for (int deleteIndex = 0; deleteIndex < 11; deleteIndex++) { boolean success = true; CompletionService<Boolean> ecs = new ExecutorCompletionService<Boolean>(executor); for (int i = 0; i < 9; i++) { ecs.submit(epManipulators[i].setDeleteIndex(deleteIndex % 10)); } for (int i = 1; i < 10; i++) { try { success = ecs.take().get() && success; } catch (Exception e) { throw new RuntimeException(e); } } assertTrue(success); new Thread() { public void run() { ksession.fireUntilHalt(); } }.start(); try { Thread.sleep(1000); } catch (InterruptedException e) { throw new RuntimeException(e); } ksession.halt(); if (deleteIndex % 10 == 0) { assertEquals(3, results.size()); assertTrue(results.containsAll(asList("R1", "R2", "R3"))); } else { if (!results.isEmpty()) { fail( "Results should be empty with deleteIndex = " + deleteIndex + "; got " + results.size() + " items"); } } results.clear(); } }
/** * Submits tasks to the completion service. * * @param completionService the service that will handle the tasks * @throws InterruptedException if something bad happens * @throws ExecutionException if something bad happens */ private static void processResults(final CompletionService<String> completionService) throws InterruptedException, ExecutionException { for (int i = 0; i < NUM_TASKS; ++i) { final String result = completionService.take().get(); System.out.println(result); } }
@Override public void run() { try { ServerSocket server = new ServerSocket(listenPort); // 开始循环 while (true) { Socket socket = server.accept(); cs.submit(new HandleThread(socket)); try { UploadObject result = cs.take().get(); // 如何实现接收完文件后上传到服务器ftp,因为这是多线程,如果有回调方法可能比较好 try { FtpUploadFiles ftpUploadFiles = FtpUploadFiles.getInstance(); PathManage pathManage = FilePathConstants.pathMap.get(result.getFileType()); if (pathManage != null && !pathManage.getFileType().equals(FileType.TempDataType)) { String tpFtpDir = pathManage.getFtpPath(); if (result.getCreateDir() != null) { tpFtpDir += result.getCreateDir(); } ftpUploadFiles.uploadMmsFile(result.getPath(), result.getNewLocalFile(), tpFtpDir); } } catch (Exception e) { e.printStackTrace(); } } catch (InterruptedException e) { e.printStackTrace(); } catch (ExecutionException e) { e.printStackTrace(); } } } catch (IOException e) { e.printStackTrace(); } }
public static void main(String[] args) throws InterruptedException, ExecutionException { final ExecutorService exec = Executors.newFixedThreadPool(10); CompletionService serv = new ExecutorCompletionService(exec); for (int index = 0; index < 5; index++) { final int NO = index; Callable downImg = new Callable() { @Override public Object call() throws Exception { Thread.sleep((long) (Math.random() * 10000)); return "Downloaded Image " + NO; } }; serv.submit(downImg); } // Thread.sleep(2 * 1000); System.out.println("Show web content"); for (int i = 0; i < 5; i++) { Future task = serv.take(); Object img = task.get(); System.out.println(img); } System.out.println("End"); exec.shutdown(); }
/** * This is the main loop of the program. For every file in the filenames list, the file gets * decoded and downsampled to a 4410 hz mono wav file. Then key and bpm detectors are run, the * result is logged in a txt file and written to the tag if possible. */ public void run() throws ExecutionException { int nThreads = Runtime.getRuntime().availableProcessors(); ExecutorService threadPool = Executors.newFixedThreadPool(nThreads); CompletionService<Boolean> pool; pool = new ExecutorCompletionService<Boolean>(threadPool); nextFile: for (String filename : filenames) { // new worker thread pool.submit(new WorkTrack(filename)); } for (int i = 0; i < filenames.size(); i++) { Boolean result; // Compute the result try { result = pool.take().get(); } catch (InterruptedException e) { Logger.getLogger(TrackAnalyzer.class.getName()).log(Level.SEVERE, null, e); } } threadPool.shutdown(); if (!Utils.isEmpty(c.writeList)) { try { writeListWriter.close(); } catch (IOException ex) { Logger.getLogger(TrackAnalyzer.class.getName()).log(Level.SEVERE, null, ex); } } System.exit(0); }
public NonBlockingIdentityHashMap<Long, TestKey> getMapMultithreaded() throws InterruptedException, ExecutionException { final int threadCount = _items.keySet().size(); final NonBlockingIdentityHashMap<Long, TestKey> map = new NonBlockingIdentityHashMap<Long, TestKey>(); // use a barrier to open the gate for all threads at once to avoid rolling start and no actual // concurrency final CyclicBarrier barrier = new CyclicBarrier(threadCount); final ExecutorService ex = Executors.newFixedThreadPool(threadCount); final CompletionService<Integer> co = new ExecutorCompletionService<Integer>(ex); for (Integer type : _items.keySet()) { // A linked-list of things to insert List<TestKey> items = _items.get(type); TestKeyFeederThread feeder = new TestKeyFeederThread(type, items, map, barrier); co.submit(feeder); } // wait for all threads to return int itemCount = 0; for (int retCount = 0; retCount < threadCount; retCount++) { final Future<Integer> result = co.take(); itemCount += result.get(); } ex.shutdown(); return map; }
// for a list of tasks to be submitted public <T> T submitCallable(List<Callable<T>> cList) throws InterruptedException, ExecutionException { for (Callable<T> c : cList) { cs.submit(c); } return (T) cs.take().get(); }
public static void main(String[] args) { Macros.SHIFT_DURATION = 24 * 60; Schedule schedule = new Schedule(); schedule.addJob(new Job("J1", 10 * 24, 200, Job.JOB_NORMAL)); schedule.addJob(new Job("J2", 10 * 24, 200, Job.JOB_NORMAL)); schedule.addJob(new Job("J3", 10 * 24, 200, Job.JOB_NORMAL)); schedule.addJob(new Job("J4", 10 * 24, 200, Job.JOB_NORMAL)); schedule.addJob(new Job("J5", 10 * 24, 200, Job.JOB_NORMAL)); schedule.addJob(new Job("J6", 10 * 24, 200, Job.JOB_NORMAL)); // machine = new Machine(0,1); pmoList = schedule.getPMOpportunities(); ExecutorService threadPool = Executors.newSingleThreadExecutor(); CompletionService<SimulationResult> pool = new ExecutorCompletionService<SimulationResult>(threadPool); pool.submit( new SimulationThread( schedule, getCombolist((long) (Math.pow(2, 31) - 1)), pmoList, false, machine)); try { SimulationResult result = pool.take().get(); System.out.println(result.cost); } catch (InterruptedException | ExecutionException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
// Concurrent insertion & then iterator test. public static void testNonBlockingIdentityHashMapIterator() throws InterruptedException { final int ITEM_COUNT1 = 1000; final int THREAD_COUNT = 5; final int PER_CNT = ITEM_COUNT1 / THREAD_COUNT; final int ITEM_COUNT = PER_CNT * THREAD_COUNT; // fix roundoff for odd thread counts NonBlockingIdentityHashMap<Long, TestKey> nbhml = new NonBlockingIdentityHashMap<Long, TestKey>(); // use a barrier to open the gate for all threads at once to avoid rolling // start and no actual concurrency final CyclicBarrier barrier = new CyclicBarrier(THREAD_COUNT); final ExecutorService ex = Executors.newFixedThreadPool(THREAD_COUNT); final CompletionService<Object> co = new ExecutorCompletionService<Object>(ex); for (int i = 0; i < THREAD_COUNT; i++) { co.submit(new NBHMLFeeder(nbhml, PER_CNT, barrier, i * PER_CNT)); } for (int retCount = 0; retCount < THREAD_COUNT; retCount++) { co.take(); } ex.shutdown(); assertEquals("values().size()", ITEM_COUNT, nbhml.values().size()); assertEquals("entrySet().size()", ITEM_COUNT, nbhml.entrySet().size()); int itemCount = 0; for (TestKey K : nbhml.values()) itemCount++; assertEquals("values().iterator() count", ITEM_COUNT, itemCount); }
@Test(timeout = 10000) public void testConcurrentInsertions() { String str = "import MultithreadTest.Bean\n" + "\n" + "rule \"R\"\n" + "when\n" + " $a : Bean( seed != 1 )\n" + "then\n" + "end"; KnowledgeBase kbase = loadKnowledgeBaseFromString(str); final StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession(); Executor executor = Executors.newCachedThreadPool( new ThreadFactory() { public Thread newThread(Runnable r) { Thread t = new Thread(r); t.setDaemon(true); return t; } }); final int OBJECT_NR = 1000; final int THREAD_NR = 4; CompletionService<Boolean> ecs = new ExecutorCompletionService<Boolean>(executor); for (int i = 0; i < THREAD_NR; i++) { ecs.submit( new Callable<Boolean>() { public Boolean call() throws Exception { try { FactHandle[] facts = new FactHandle[OBJECT_NR]; for (int i = 0; i < OBJECT_NR; i++) facts[i] = ksession.insert(new Bean(i)); ksession.fireAllRules(); for (FactHandle fact : facts) ksession.retract(fact); ksession.fireAllRules(); return true; } catch (Exception e) { e.printStackTrace(); return false; } } }); } boolean success = true; for (int i = 0; i < THREAD_NR; i++) { try { success = ecs.take().get() && success; } catch (Exception e) { throw new RuntimeException(e); } } assertTrue(success); ksession.dispose(); }
private void multiThreadedProjectTaskSegmentBuild( ConcurrencyDependencyGraph analyzer, ReactorContext reactorContext, MavenSession rootSession, CompletionService<ProjectSegment> service, TaskSegment taskSegment, Map<MavenProject, ProjectSegment> projectBuildList, ThreadOutputMuxer muxer) { // schedule independent projects for (MavenProject mavenProject : analyzer.getRootSchedulableBuilds()) { ProjectSegment projectSegment = projectBuildList.get(mavenProject); logger.debug("Scheduling: " + projectSegment.getProject()); Callable<ProjectSegment> cb = createBuildCallable(rootSession, projectSegment, reactorContext, taskSegment, muxer); service.submit(cb); } // for each finished project for (int i = 0; i < analyzer.getNumberOfBuilds(); i++) { try { ProjectSegment projectBuild = service.take().get(); if (reactorContext.getReactorBuildStatus().isHalted()) { break; } final List<MavenProject> newItemsThatCanBeBuilt = analyzer.markAsFinished(projectBuild.getProject()); for (MavenProject mavenProject : newItemsThatCanBeBuilt) { ProjectSegment scheduledDependent = projectBuildList.get(mavenProject); logger.debug("Scheduling: " + scheduledDependent); Callable<ProjectSegment> cb = createBuildCallable( rootSession, scheduledDependent, reactorContext, taskSegment, muxer); service.submit(cb); } } catch (InterruptedException e) { rootSession.getResult().addException(e); break; } catch (ExecutionException e) { // TODO MNG-5766 changes likely made this redundant rootSession.getResult().addException(e); break; } } // cancel outstanding builds (if any) - this can happen if an exception is thrown in above // block Future<ProjectSegment> unprocessed; while ((unprocessed = service.poll()) != null) { try { unprocessed.get(); } catch (InterruptedException e) { throw new RuntimeException(e); } catch (ExecutionException e) { throw new RuntimeException(e); } } }
@Test(timeout = 10000) public void testMultipleConcurrentEPs2() { String str = "global java.util.List results\n" + "\n" + "rule \"R0\" when\n" + " $s : String( ) from entry-point EP0\n" + " $i : Integer( toString().equals($s) ) from entry-point EP1\n" + " $l : Long( intValue() == $i ) from entry-point EP2\n" + "then\n" + " results.add( $s );\n" + "end\n" + "\n" + "rule \"R1\" when\n" + " $s : String( ) from entry-point EP1\n" + " $i : Integer( toString().equals($s) ) from entry-point EP2\n" + " $l : Long( intValue() == $i ) from entry-point EP0\n" + "then\n" + " results.add( $s );\n" + "end\n" + "\n" + "rule \"R2\" when\n" + " $s : String( ) from entry-point EP2\n" + " $i : Integer( toString().equals($s) ) from entry-point EP0\n" + " $l : Long( intValue() == $i ) from entry-point EP1\n" + "then\n" + " results.add( $s );\n" + "end\n"; KnowledgeBase kbase = loadKnowledgeBaseFromString(str); KieSession ksession = kbase.newStatefulKnowledgeSession(); List<String> results = new ArrayList<String>(); ksession.setGlobal("results", results); boolean success = true; CompletionService<Boolean> ecs = new ExecutorCompletionService<Boolean>(executor); for (int i = 0; i < 3; i++) { ecs.submit(new EPManipulator2(ksession, i)); } for (int i = 0; i < 3; i++) { try { success = ecs.take().get() && success; } catch (Exception e) { throw new RuntimeException(e); } } assertTrue(success); ksession.fireAllRules(); System.out.println(results); assertEquals(3, results.size()); for (String s : results) { assertEquals("2", s); } }
@Override public void run() { try { // waiting until the data-sink was set from outside synchronized (this) { while (this.sink == null) this.wait(); } // waiting for new commands while (!this.stopped && !Thread.interrupted()) { try { // fetch the next complete crawler document ICrawlerDocument crawlerDoc = execCompletionService.take().get(); if (crawlerDoc != null && crawlerDoc.getStatus() == ICrawlerDocument.Status.OK) { // create a new ICommand final IDocumentFactory cmdFactory = this.docFactories.get(ICommand.class.getName()); final ICommand cmd = cmdFactory.createDocument(ICommand.class); cmd.setLocation(crawlerDoc.getLocation()); cmd.setProfileOID(this.getProfileID()); cmd.setDepth(0); cmd.setResult(ICommand.Result.Passed, null); /* Sending event via command-tracker! * * Calling this function should also created a valid command OID for us */ this.commandTracker.commandCreated(this.getClass().getName(), cmd); if (cmd.getOID() <= 0) { this.logger.warn( String.format( "Command with location '%s' has an invalid OID '%d'. ORM mapping seems not to work. Command is not enqueued.", cmd.getLocation(), Integer.valueOf(cmd.getOID()))); } else { cmd.setCrawlerDocument(crawlerDoc); // put it into the data-sink this.sink.putData(cmd); } } } catch (Exception e) { if (!(e instanceof InterruptedException)) { this.logger.error( String.format( "%s: Unexpected '%s' while waiting for new commands to enqueue.", this.getName(), e.getClass().getName()), e); } else { this.logger.info("Thread stopped successfully."); break; } } } } catch (Exception e) { this.logger.error( String.format("%s: Unexpected '%s'.", this.getName(), e.getClass().getName()), e); } }
/** * Waits for completed requests. Once the first request has been taken, the method will wait * WAIT_TIMEOUT ms longer to collect more completed requests. * * @return Collected feeds or null if the method has been interrupted during the first waiting * period. */ private List<Feed> collectCompletedRequests() { List<Feed> results = new LinkedList<Feed>(); DownloadRequester requester = DownloadRequester.getInstance(); int tasks = 0; try { DownloadRequest request = completedRequests.take(); parserService.submit(new FeedParserTask(request)); tasks++; } catch (InterruptedException e) { return null; } tasks += pollCompletedDownloads(); isCollectingRequests = true; if (requester.isDownloadingFeeds()) { // wait for completion of more downloads long startTime = System.currentTimeMillis(); long currentTime = startTime; while (requester.isDownloadingFeeds() && (currentTime - startTime) < WAIT_TIMEOUT) { try { if (BuildConfig.DEBUG) Log.d(TAG, "Waiting for " + (startTime + WAIT_TIMEOUT - currentTime) + " ms"); sleep(startTime + WAIT_TIMEOUT - currentTime); } catch (InterruptedException e) { if (BuildConfig.DEBUG) Log.d(TAG, "interrupted while waiting for more downloads"); tasks += pollCompletedDownloads(); } finally { currentTime = System.currentTimeMillis(); } } tasks += pollCompletedDownloads(); } isCollectingRequests = false; for (int i = 0; i < tasks; i++) { try { Feed f = parserService.take().get(); if (f != null) { results.add(f); } } catch (InterruptedException e) { e.printStackTrace(); } catch (ExecutionException e) { e.printStackTrace(); } } return results; }
@Override public void run() { if (BuildConfig.DEBUG) Log.d(TAG, "downloadCompletionThread was started"); while (!isInterrupted()) { try { Downloader downloader = downloadExecutor.take().get(); if (BuildConfig.DEBUG) Log.d(TAG, "Received 'Download Complete' - message."); removeDownload(downloader); DownloadStatus status = downloader.getResult(); boolean successful = status.isSuccessful(); final int type = status.getFeedfileType(); if (successful) { if (type == Feed.FEEDFILETYPE_FEED) { handleCompletedFeedDownload(downloader.getDownloadRequest()); } else if (type == FeedImage.FEEDFILETYPE_FEEDIMAGE) { handleCompletedImageDownload(status, downloader.getDownloadRequest()); } else if (type == FeedMedia.FEEDFILETYPE_FEEDMEDIA) { handleCompletedFeedMediaDownload(status, downloader.getDownloadRequest()); } } else { numberOfDownloads.decrementAndGet(); if (!status.isCancelled()) { if (status.getReason() == DownloadError.ERROR_UNAUTHORIZED) { postAuthenticationNotification(downloader.getDownloadRequest()); } else if (status.getReason() == DownloadError.ERROR_HTTP_DATA_ERROR && Integer.valueOf(status.getReasonDetailed()) == HttpStatus.SC_REQUESTED_RANGE_NOT_SATISFIABLE) { Log.d(TAG, "Requested invalid range, restarting download from the beginning"); FileUtils.deleteQuietly( new File(downloader.getDownloadRequest().getDestination())); DownloadRequester.getInstance() .download(DownloadService.this, downloader.getDownloadRequest()); } else { Log.e(TAG, "Download failed"); saveDownloadStatus(status); handleFailedDownload(status, downloader.getDownloadRequest()); } } sendDownloadHandledIntent(); queryDownloadsAsync(); } } catch (InterruptedException e) { if (BuildConfig.DEBUG) Log.d(TAG, "DownloadCompletionThread was interrupted"); } catch (ExecutionException e) { e.printStackTrace(); numberOfDownloads.decrementAndGet(); } } if (BuildConfig.DEBUG) Log.d(TAG, "End of downloadCompletionThread"); }
// private void multiThreadedMarshal(PrintWriter writer, List<Result> results, private String multiThreadedMarshal( List<Result> results, String recordSchema, final Map<String, Serializable> arguments) throws CatalogTransformerException { CompletionService<BinaryContent> completionService = new ExecutorCompletionService<>(queryExecutor); try { final MetacardTransformer transformer = metacardTransformerManager.getTransformerBySchema(recordSchema); if (transformer == null) { throw new CatalogTransformerException( "Cannot find transformer for schema: " + recordSchema); } Map<Future<BinaryContent>, Result> futures = new HashMap<>(results.size()); for (Result result : results) { final Metacard mc = result.getMetacard(); // the "current" thread will run submitted task when queueSize exceeded; effectively // blocking enqueue of more tasks. futures.put( completionService.submit( () -> { BinaryContent content = transformer.transform(mc, arguments); return content; }), result); } // TODO - really? I can't use a list of some sort? InputStream[] contents = new InputStream[results.size()]; while (!futures.isEmpty()) { Future<BinaryContent> completedFuture = completionService.take(); int index = results.indexOf(futures.get(completedFuture)); contents[index] = completedFuture.get().getInputStream(); futures.remove(completedFuture); } CharArrayWriter accum = new CharArrayWriter(ACCUM_INITIAL_SIZE); for (InputStream is : contents) { IOUtils.copy(is, accum); } return accum.toString(); } catch (IOException | InterruptedException | ExecutionException xe) { throw new CatalogTransformerException(xe); } } // end multiThreadedMarshal()
private void recoverClusterStatus( int newViewId, final boolean isMergeView, final List<Address> clusterMembers) throws Exception { log.debugf("Recovering cluster status for view %d", newViewId); ReplicableCommand command = new CacheTopologyControlCommand( null, CacheTopologyControlCommand.Type.GET_STATUS, transport.getAddress(), newViewId); Map<Address, Object> statusResponses = executeOnClusterSync( command, getGlobalTimeout(), false, false, new CacheTopologyFilterReuser()); log.debugf("Got %d status responses. members are %s", statusResponses.size(), clusterMembers); Map<String, Map<Address, CacheStatusResponse>> responsesByCache = new HashMap<>(); boolean recoveredRebalancingStatus = true; for (Map.Entry<Address, Object> responseEntry : statusResponses.entrySet()) { Address sender = responseEntry.getKey(); ManagerStatusResponse nodeStatus = (ManagerStatusResponse) responseEntry.getValue(); recoveredRebalancingStatus &= nodeStatus.isRebalancingEnabled(); for (Map.Entry<String, CacheStatusResponse> statusEntry : nodeStatus.getCaches().entrySet()) { String cacheName = statusEntry.getKey(); Map<Address, CacheStatusResponse> cacheResponses = responsesByCache.get(cacheName); if (cacheResponses == null) { cacheResponses = new HashMap<>(); responsesByCache.put(cacheName, cacheResponses); } cacheResponses.put(sender, statusEntry.getValue()); } } globalRebalancingEnabled = recoveredRebalancingStatus; // Compute the new consistent hashes on separate threads int maxThreads = Runtime.getRuntime().availableProcessors() / 2 + 1; CompletionService<Void> cs = new SemaphoreCompletionService<>(asyncTransportExecutor, maxThreads); for (final Map.Entry<String, Map<Address, CacheStatusResponse>> e : responsesByCache.entrySet()) { final ClusterCacheStatus cacheStatus = initCacheStatusIfAbsent(e.getKey()); cs.submit( new Callable<Void>() { @Override public Void call() throws Exception { cacheStatus.doMergePartitions(e.getValue(), clusterMembers, isMergeView); return null; } }); } for (int i = 0; i < responsesByCache.size(); i++) { cs.take(); } }
private void blockAndDoRetries() { clients.blockUntilFinished(); // wait for any async commits to complete while (pending != null && pending.size() > 0) { Future<Object> future = null; try { future = completionService.take(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); log.error("blockAndDoRetries interrupted", e); } if (future == null) break; pending.remove(future); } doRetriesIfNeeded(); }
public void run() throws InterruptedException { ExecutorService exec = Executors.newCachedThreadPool(); CompletionService<String> results = new ExecutorCompletionService<String>(exec); for (Thinker bewada : bewade) { results.submit(bewada); } System.out.println("waiting for results"); for (int i = 0; i < bewade.length; i++) { try { System.out.println(results.take().get()); } catch (ExecutionException e) { e.getCause().printStackTrace(); } } exec.shutdown(); }
@Test public void testExecutor1() throws InterruptedException, ExecutionException { ExecutorService executorService = Executors.newFixedThreadPool(4); CompletionService<String> completion = new ExecutorCompletionService<String>(executorService); for (int i = 0; i < 10; i++) { final int finalI = i; completion.submit( new Callable<String>() { @Override public String call() throws Exception { return String.valueOf(finalI); } }); } for (int i = 0; i < 10; i++) { System.out.println(completion.take().get()); } executorService.shutdown(); }
public static void main(String[] args) throws InterruptedException, ExecutionException { ExecutorService exec = Executors.newFixedThreadPool(AleepingBeauty_NUM); CompletionService<Integer> completionService = null; completionService = new ExecutorCompletionService<Integer>(exec); long t1 = System.currentTimeMillis(); for (int i = 0; i < AleepingBeauty_NUM; i++) { // 随机一个 睡美人i 0~20岁 睡眠时间0~1000 completionService.submit( new AleepingBeauty( "睡美人" + i, new Random().nextInt(20), new Random().nextInt(Max_Sleep_time))); } long t2 = System.currentTimeMillis(); System.out.println("任务添加完成"); for (int i = 0; i < 10; i++) { try { // 谁最先执行完成,直接返回 Future<Integer> f = completionService.take(); f.get(100, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { e.printStackTrace(); } catch (ExecutionException e) { e.printStackTrace(); } catch (TimeoutException e) { e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } } long t3 = System.currentTimeMillis(); System.out.println("任务添加时间:" + (t2 - t1)); System.out.println("任务添加完成:" + (t3 - t2)); // 启动一次顺序关闭,执行以前提交的任务,但不接受新任务。如果已经关闭,则调用没有其他作用。 exec.shutdown(); }
private void collectMetrics( CompletionService<RegionMetricStatistics> parallelTasks, int taskSize, AccountMetricStatistics accountMetricStatistics) { for (int index = 0; index < taskSize; index++) { try { RegionMetricStatistics regionStats = parallelTasks.take().get(DEFAULT_THREAD_TIMEOUT, TimeUnit.SECONDS); accountMetricStatistics.add(regionStats); } catch (InterruptedException e) { LOGGER.error("Task interrupted. ", e); } catch (ExecutionException e) { LOGGER.error("Task execution failed. ", e); } catch (TimeoutException e) { LOGGER.error("Task timed out. ", e); } } }
@Test public void testMultipleConcurrentEPs4() { final KieSession ksession = getKieSessionWith3Segments(); List<String> results = new ArrayList<String>(); ksession.setGlobal("results", results); EPManipulator4[] epManipulators = new EPManipulator4[9]; CyclicBarrier barrier = new CyclicBarrier(9, new SegmentChecker(epManipulators)); for (int i = 0; i < 9; i++) { epManipulators[i] = new EPManipulator4(ksession, i + 1, barrier); } new Thread() { public void run() { ksession.fireUntilHalt(); } }.start(); for (int deleteIndex = 0; deleteIndex < 11; deleteIndex++) { boolean success = true; CompletionService<Boolean> ecs = new ExecutorCompletionService<Boolean>(executor); for (int i = 0; i < 9; i++) { ecs.submit(epManipulators[i].setDeleteIndex(deleteIndex % 10)); } for (int i = 0; i < 9; i++) { try { success = ecs.take().get() && success; } catch (Exception e) { throw new RuntimeException(e); } } assertTrue(success); } ksession.halt(); }
@Override public void run() { if (AppConfig.DEBUG) Log.d(TAG, "downloadCompletionThread was started"); while (!isInterrupted()) { try { Downloader downloader = downloadExecutor.take().get(); if (AppConfig.DEBUG) Log.d(TAG, "Received 'Download Complete' - message."); removeDownload(downloader); DownloadStatus status = downloader.getResult(); boolean successful = status.isSuccessful(); final int type = status.getFeedfileType(); if (successful) { if (type == Feed.FEEDFILETYPE_FEED) { handleCompletedFeedDownload(downloader.getDownloadRequest()); } else if (type == FeedImage.FEEDFILETYPE_FEEDIMAGE) { handleCompletedImageDownload(status, downloader.getDownloadRequest()); } else if (type == FeedMedia.FEEDFILETYPE_FEEDMEDIA) { handleCompletedFeedMediaDownload(status, downloader.getDownloadRequest()); } } else { numberOfDownloads.decrementAndGet(); if (!successful && !status.isCancelled()) { Log.e(TAG, "Download failed"); saveDownloadStatus(status); } sendDownloadHandledIntent(); queryDownloadsAsync(); } } catch (InterruptedException e) { if (AppConfig.DEBUG) Log.d(TAG, "DownloadCompletionThread was interrupted"); } catch (ExecutionException e) { e.printStackTrace(); numberOfDownloads.decrementAndGet(); } } if (AppConfig.DEBUG) Log.d(TAG, "End of downloadCompletionThread"); }
private HttpUrl pingTunnel(ServiceJson serviceJson) { if (serviceJson == null || Util.isEmpty(serviceJson.relay_ip) || serviceJson.relay_port == 0) { return null; } // set timeout to 10 seconds OkHttpClient client = defaultClient .newBuilder() .connectTimeout(5, TimeUnit.SECONDS) .readTimeout(5, TimeUnit.SECONDS) .build(); String relayIp = serviceJson.relay_ip; int relayPort = serviceJson.relay_port; // tunnel address ExecutorService executor = Executors.newFixedThreadPool(10); CompletionService<String> service = new ExecutorCompletionService<>(executor); service.submit(createPingTask(client, relayIp, relayPort)); try { Future<String> future = service.take(); if (future != null) { String host = future.get(); if (!Util.isEmpty(host)) { return requestUrl.newBuilder().host(host).port(relayPort).build(); } } } catch (InterruptedException | ExecutionException ignored) { } // shutdown executors executor.shutdownNow(); return null; }
private void loadStoresInParallel(ExecutorService executor, List<Callable<MessageStore>> tasks) throws InterruptedException { CompletionService<MessageStore> completionService = new ExecutorCompletionService<MessageStore>(executor); for (Callable<MessageStore> task : tasks) { completionService.submit(task); } for (int i = 0; i < tasks.size(); i++) { try { MessageStore messageStore = completionService.take().get(); ConcurrentHashMap<Integer /* partition */, MessageStore> map = this.stores.get(messageStore.getTopic()); if (map == null) { map = new ConcurrentHashMap<Integer, MessageStore>(); this.stores.put(messageStore.getTopic(), map); } map.put(messageStore.getPartition(), messageStore); } catch (ExecutionException e) { throw ThreadUtils.launderThrowable(e); } } tasks.clear(); }
@Test public void testPublishAndSubscribeBroadCastServiceWithSeparateThread() throws Exception { final Random random = new Random(System.currentTimeMillis()); final class LucasBroadCastServiceThread extends Thread { private LucasMessageBroadcastService lucasMessageBroadcastService; final String randomString = UUID.randomUUID().toString(); String queueName; String bindKey; String data; public LucasBroadCastServiceThread( final LucasMessageBroadcastService lucasMessageBroadcastService, final String threadName, final int priority) throws Exception { super(threadName); super.setPriority(priority); this.lucasMessageBroadcastService = lucasMessageBroadcastService; final int randomNum = random.nextInt(10) + 1; this.queueName = TEST_QUEUE_PREFIX + randomNum; this.bindKey = TEST_QUEUE_PREFIX + ".Binding" + randomNum; this.data = randomString; } @Override public void run() { try { LOG.debug("Posting queueName: " + queueName + " and data: " + data); pushedQueueMap.put(queueName, data); this.lucasMessageBroadcastService.sendMessage(queueName, bindKey, data); } catch (Exception e) { LOG.error("Exception Generated ", e); throw new LucasRuntimeException(e); } } } final class LucasBroadCastClientThread extends Thread { private LucasMessageBroadcastClientService lucasMessageBroadcastClientService; private String lucasBroadCastQueue; private String lucasBroadCastBinding; public LucasBroadCastClientThread( LucasMessageBroadcastClientService lucasMessageBroadcastClientService, String threadName, int threadPriority, String lucasBroadCastQueue, String lucasBroadCastBinding) { super(threadName); super.setPriority(threadPriority); this.lucasMessageBroadcastClientService = lucasMessageBroadcastClientService; this.lucasBroadCastBinding = lucasBroadCastBinding; this.lucasBroadCastQueue = lucasBroadCastQueue; } @Override public void run() { try { String[] stringArray = this.lucasMessageBroadcastClientService.receiveAllMessagesWithinTimePeriod( this.lucasBroadCastQueue, this.lucasBroadCastBinding, 1L, 5000L); if (stringArray != null) { for (String string : stringArray) { pulledQueueMap.put(this.lucasBroadCastQueue, string); } } } catch (Exception e) { LOG.error("Exception Generated ", e); throw new LucasRuntimeException(e); } } } // // Publisher Threads final int maxThreads = 1000; ExecutorService pushExecutor = Executors.newFixedThreadPool(maxThreads); CompletionService<?> pushCompletion = new ExecutorCompletionService(pushExecutor); for (int i = 0; i < maxThreads; i++) { LucasBroadCastServiceThread thread = new LucasBroadCastServiceThread( this.lucasBroadCastService, "LucasBroadCastServiceThread" + i, 5); pushCompletion.submit(thread, null); } for (int i = 0; i < maxThreads; ++i) { pushCompletion.take(); } pushExecutor.shutdown(); // Subscriber threads int j = 0; // Get the queueNames stored while publishing... Set<String> keySet = pushedQueueMap.keySet(); if (keySet != null) { ExecutorService pullExecutor = Executors.newFixedThreadPool(keySet.size()); CompletionService<?> pullCompletion = new ExecutorCompletionService(pullExecutor); for (String queueName : keySet) { // ...and receive on each queueName LucasBroadCastClientThread thread = new LucasBroadCastClientThread( lucasMessageBroadcastClientService, "LucasBroadCastClientThread" + j, 5, queueName, TEST_QUEUE_PREFIX + ".Binding" + j); pullCompletion.submit(thread, null); j++; } for (int i = 0; i < keySet.size(); ++i) { pullCompletion.take(); } pullExecutor.shutdown(); } LOG.debug("Pushed values:"); CollectionsUtilService.dumpMultiMapNumbers(pushedQueueMap); LOG.debug("Pulled values:"); CollectionsUtilService.dumpMultiMapNumbers(pulledQueueMap); // Assert junit.framework.Assert.assertTrue( "The pushed and pulled values do not match!", CollectionsUtilService.compareMultiMaps(pushedQueueMap, pulledQueueMap)); }
@Test public void testMultipleConcurrentEPs() { final boolean PARALLEL = true; final int EP_NR = 10; StringBuilder sb = new StringBuilder(); sb.append("import org.drools.compiler.StockTick;\n"); for (int i = 0; i < EP_NR; i++) { sb.append("global java.util.List results").append(i).append(";\n"); } sb.append("declare StockTick\n" + " @role( event )\n" + "end\n"); for (int i = 0; i < EP_NR; i++) { sb.append( "rule \"R" + i + "\"\n" + "when\n" + " $name : String( this.startsWith(\"A\") )\n" + " $st : StockTick( company == $name, price > 10 ) from entry-point EP" + i + "\n" + "then\n" + " results" + i + ".add( $st );\n" + "end\n"); } KnowledgeBase kbase = loadKnowledgeBaseFromString(sb.toString()); KieSession ksession = kbase.newStatefulKnowledgeSession(); boolean success = true; if (PARALLEL) { CompletionService<Boolean> ecs = new ExecutorCompletionService<Boolean>(executor); for (int i = 0; i < EP_NR; i++) { ecs.submit(new EPManipulator(ksession, i)); } for (int i = 0; i < EP_NR; i++) { try { success = ecs.take().get() && success; } catch (Exception e) { throw new RuntimeException(e); } } } else { for (int i = 0; i < EP_NR; i++) { try { success = new EPManipulator(ksession, i).call() && success; } catch (Exception e) { throw new RuntimeException(e); } } } assertTrue(success); assertEquals(EP_NR, ksession.fireAllRules()); for (int i = 0; i < EP_NR; i++) { assertEquals(1, ((List) ksession.getGlobal("results" + i)).size()); } ksession.dispose(); }