// Concurrent insertion & then iterator test. public static void testNonBlockingIdentityHashMapIterator() throws InterruptedException { final int ITEM_COUNT1 = 1000; final int THREAD_COUNT = 5; final int PER_CNT = ITEM_COUNT1 / THREAD_COUNT; final int ITEM_COUNT = PER_CNT * THREAD_COUNT; // fix roundoff for odd thread counts NonBlockingIdentityHashMap<Long, TestKey> nbhml = new NonBlockingIdentityHashMap<Long, TestKey>(); // use a barrier to open the gate for all threads at once to avoid rolling // start and no actual concurrency final CyclicBarrier barrier = new CyclicBarrier(THREAD_COUNT); final ExecutorService ex = Executors.newFixedThreadPool(THREAD_COUNT); final CompletionService<Object> co = new ExecutorCompletionService<Object>(ex); for (int i = 0; i < THREAD_COUNT; i++) { co.submit(new NBHMLFeeder(nbhml, PER_CNT, barrier, i * PER_CNT)); } for (int retCount = 0; retCount < THREAD_COUNT; retCount++) { co.take(); } ex.shutdown(); assertEquals("values().size()", ITEM_COUNT, nbhml.values().size()); assertEquals("entrySet().size()", ITEM_COUNT, nbhml.entrySet().size()); int itemCount = 0; for (TestKey K : nbhml.values()) itemCount++; assertEquals("values().iterator() count", ITEM_COUNT, itemCount); }
public NonBlockingIdentityHashMap<Long, TestKey> getMapMultithreaded() throws InterruptedException, ExecutionException { final int threadCount = _items.keySet().size(); final NonBlockingIdentityHashMap<Long, TestKey> map = new NonBlockingIdentityHashMap<Long, TestKey>(); // use a barrier to open the gate for all threads at once to avoid rolling start and no actual // concurrency final CyclicBarrier barrier = new CyclicBarrier(threadCount); final ExecutorService ex = Executors.newFixedThreadPool(threadCount); final CompletionService<Integer> co = new ExecutorCompletionService<Integer>(ex); for (Integer type : _items.keySet()) { // A linked-list of things to insert List<TestKey> items = _items.get(type); TestKeyFeederThread feeder = new TestKeyFeederThread(type, items, map, barrier); co.submit(feeder); } // wait for all threads to return int itemCount = 0; for (int retCount = 0; retCount < threadCount; retCount++) { final Future<Integer> result = co.take(); itemCount += result.get(); } ex.shutdown(); return map; }
@Override public void fillCompletionVariants(CompletionParameters parameters, CompletionResultSet result) { PsiFile file = parameters.getOriginalFile(); final Consumer<Task> consumer = file.getUserData(KEY); if (CompletionService.getCompletionService().getAdvertisementText() == null) { final String shortcut = getActionShortcut(IdeActions.ACTION_QUICK_JAVADOC); if (shortcut != null) { CompletionService.getCompletionService() .setAdvertisementText( "Pressing " + shortcut + " would show task description and comments"); } } if (consumer != null) { result.stopHere(); String text = parameters.getOriginalFile().getText(); int i = text.lastIndexOf(' ', parameters.getOffset() - 1) + 1; final String prefix = text.substring(i, parameters.getOffset()); if (parameters.getInvocationCount() == 0 && !file.getUserData(AUTO_POPUP_KEY)) { // is autopopup return; } result = result.withPrefixMatcher(new PlainPrefixMatcher(prefix)); final TaskSearchSupport searchSupport = new TaskSearchSupport(file.getProject()); List<Task> items = searchSupport.getItems(prefix, true); addCompletionElements(result, consumer, items, -10000); Future<List<Task>> future = ApplicationManager.getApplication() .executeOnPooledThread( new Callable<List<Task>>() { @Override public List<Task> call() { return searchSupport.getItems(prefix, false); } }); while (true) { try { List<Task> tasks = future.get(100, TimeUnit.MILLISECONDS); if (tasks != null) { addCompletionElements(result, consumer, tasks, 0); return; } } catch (ProcessCanceledException e) { throw e; } catch (Exception ignore) { } ProgressManager.checkCanceled(); } } }
/** * Waits for completed requests. Once the first request has been taken, the method will wait * WAIT_TIMEOUT ms longer to collect more completed requests. * * @return Collected feeds or null if the method has been interrupted during the first waiting * period. */ private List<Feed> collectCompletedRequests() { List<Feed> results = new LinkedList<Feed>(); DownloadRequester requester = DownloadRequester.getInstance(); int tasks = 0; try { DownloadRequest request = completedRequests.take(); parserService.submit(new FeedParserTask(request)); tasks++; } catch (InterruptedException e) { return null; } tasks += pollCompletedDownloads(); isCollectingRequests = true; if (requester.isDownloadingFeeds()) { // wait for completion of more downloads long startTime = System.currentTimeMillis(); long currentTime = startTime; while (requester.isDownloadingFeeds() && (currentTime - startTime) < WAIT_TIMEOUT) { try { if (BuildConfig.DEBUG) Log.d(TAG, "Waiting for " + (startTime + WAIT_TIMEOUT - currentTime) + " ms"); sleep(startTime + WAIT_TIMEOUT - currentTime); } catch (InterruptedException e) { if (BuildConfig.DEBUG) Log.d(TAG, "interrupted while waiting for more downloads"); tasks += pollCompletedDownloads(); } finally { currentTime = System.currentTimeMillis(); } } tasks += pollCompletedDownloads(); } isCollectingRequests = false; for (int i = 0; i < tasks; i++) { try { Feed f = parserService.take().get(); if (f != null) { results.add(f); } } catch (InterruptedException e) { e.printStackTrace(); } catch (ExecutionException e) { e.printStackTrace(); } } return results; }
private int pollCompletedDownloads() { int tasks = 0; for (int i = 0; i < completedRequests.size(); i++) { parserService.submit(new FeedParserTask(completedRequests.poll())); tasks++; } return tasks; }
@Test public void testExecutor1() throws InterruptedException, ExecutionException { ExecutorService executorService = Executors.newFixedThreadPool(4); CompletionService<String> completion = new ExecutorCompletionService<String>(executorService); for (int i = 0; i < 10; i++) { final int finalI = i; completion.submit( new Callable<String>() { @Override public String call() throws Exception { return String.valueOf(finalI); } }); } for (int i = 0; i < 10; i++) { System.out.println(completion.take().get()); } executorService.shutdown(); }
protected void completionFinished( final int offset1, final int offset2, final CompletionProgressIndicator indicator, final LookupElement[] items, boolean hasModifiers) { if (items.length == 0) { LookupManager.getInstance(indicator.getProject()).hideActiveLookup(); indicator.handleEmptyLookup(true); checkNotSync(indicator, items); return; } LOG.assertTrue(!indicator.isRunning(), "running"); LOG.assertTrue(!indicator.isCanceled(), "canceled"); indicator.getLookup().refreshUi(true, false); final AutoCompletionDecision decision = shouldAutoComplete(indicator, items); if (decision == AutoCompletionDecision.SHOW_LOOKUP) { CompletionServiceImpl.setCompletionPhase(new CompletionPhase.ItemsCalculated(indicator)); indicator.getLookup().setCalculating(false); indicator.showLookup(); } else if (decision instanceof AutoCompletionDecision.InsertItem) { final Runnable restorePrefix = rememberDocumentState(indicator.getEditor()); final LookupElement item = ((AutoCompletionDecision.InsertItem) decision).getElement(); CommandProcessor.getInstance() .executeCommand( indicator.getProject(), new Runnable() { @Override public void run() { indicator.setMergeCommand(); indicator.getLookup().finishLookup(Lookup.AUTO_INSERT_SELECT_CHAR, item); } }, "Autocompletion", null); // the insert handler may have started a live template with completion if (CompletionService.getCompletionService().getCurrentCompletion() == null && !ApplicationManager.getApplication().isUnitTestMode()) { CompletionServiceImpl.setCompletionPhase( hasModifiers ? new CompletionPhase.InsertedSingleItem(indicator, restorePrefix) : CompletionPhase.NoCompletion); } checkNotSync(indicator, items); } else if (decision == AutoCompletionDecision.CLOSE_LOOKUP) { LookupManager.getInstance(indicator.getProject()).hideActiveLookup(); checkNotSync(indicator, items); } }
@Override public void run() { if (BuildConfig.DEBUG) Log.d(TAG, "downloadCompletionThread was started"); while (!isInterrupted()) { try { Downloader downloader = downloadExecutor.take().get(); if (BuildConfig.DEBUG) Log.d(TAG, "Received 'Download Complete' - message."); removeDownload(downloader); DownloadStatus status = downloader.getResult(); boolean successful = status.isSuccessful(); final int type = status.getFeedfileType(); if (successful) { if (type == Feed.FEEDFILETYPE_FEED) { handleCompletedFeedDownload(downloader.getDownloadRequest()); } else if (type == FeedImage.FEEDFILETYPE_FEEDIMAGE) { handleCompletedImageDownload(status, downloader.getDownloadRequest()); } else if (type == FeedMedia.FEEDFILETYPE_FEEDMEDIA) { handleCompletedFeedMediaDownload(status, downloader.getDownloadRequest()); } } else { numberOfDownloads.decrementAndGet(); if (!status.isCancelled()) { if (status.getReason() == DownloadError.ERROR_UNAUTHORIZED) { postAuthenticationNotification(downloader.getDownloadRequest()); } else if (status.getReason() == DownloadError.ERROR_HTTP_DATA_ERROR && Integer.valueOf(status.getReasonDetailed()) == HttpStatus.SC_REQUESTED_RANGE_NOT_SATISFIABLE) { Log.d(TAG, "Requested invalid range, restarting download from the beginning"); FileUtils.deleteQuietly( new File(downloader.getDownloadRequest().getDestination())); DownloadRequester.getInstance() .download(DownloadService.this, downloader.getDownloadRequest()); } else { Log.e(TAG, "Download failed"); saveDownloadStatus(status); handleFailedDownload(status, downloader.getDownloadRequest()); } } sendDownloadHandledIntent(); queryDownloadsAsync(); } } catch (InterruptedException e) { if (BuildConfig.DEBUG) Log.d(TAG, "DownloadCompletionThread was interrupted"); } catch (ExecutionException e) { e.printStackTrace(); numberOfDownloads.decrementAndGet(); } } if (BuildConfig.DEBUG) Log.d(TAG, "End of downloadCompletionThread"); }
private LookupElement[] calculateItems( CompletionInitializationContext initContext, WeighingDelegate weigher) { duringCompletion(initContext); ProgressManager.checkCanceled(); LookupElement[] result = CompletionService.getCompletionService().performCompletion(myParameters, weigher); ProgressManager.checkCanceled(); weigher.waitFor(); ProgressManager.checkCanceled(); return result; }
private HttpUrl pingTunnel(ServiceJson serviceJson) { if (serviceJson == null || Util.isEmpty(serviceJson.relay_ip) || serviceJson.relay_port == 0) { return null; } // set timeout to 10 seconds OkHttpClient client = defaultClient .newBuilder() .connectTimeout(5, TimeUnit.SECONDS) .readTimeout(5, TimeUnit.SECONDS) .build(); String relayIp = serviceJson.relay_ip; int relayPort = serviceJson.relay_port; // tunnel address ExecutorService executor = Executors.newFixedThreadPool(10); CompletionService<String> service = new ExecutorCompletionService<>(executor); service.submit(createPingTask(client, relayIp, relayPort)); try { Future<String> future = service.take(); if (future != null) { String host = future.get(); if (!Util.isEmpty(host)) { return requestUrl.newBuilder().host(host).port(relayPort).build(); } } } catch (InterruptedException | ExecutionException ignored) { } // shutdown executors executor.shutdownNow(); return null; }
@Override public void fillCompletionVariants( CompletionParameters parameters, final CompletionResultSet result) { if (parameters.isExtendedCompletion()) { CompletionService.getCompletionService() .getVariantsFromContributors( parameters.delegateToClassName(), null, new Consumer<CompletionResult>() { public void consume(final CompletionResult completionResult) { result.passResult(completionResult); } }); } }
private void onDownloadQueued(Intent intent) { if (AppConfig.DEBUG) Log.d(TAG, "Received enqueue request"); DownloadRequest request = intent.getParcelableExtra(EXTRA_REQUEST); if (request == null) { throw new IllegalArgumentException("ACTION_ENQUEUE_DOWNLOAD intent needs request extra"); } Downloader downloader = getDownloader(request); if (downloader != null) { numberOfDownloads.incrementAndGet(); downloads.add(downloader); downloadExecutor.submit(downloader); sendBroadcast(new Intent(ACTION_DOWNLOADS_CONTENT_CHANGED)); } queryDownloads(); }
@Override public void run() { if (AppConfig.DEBUG) Log.d(TAG, "downloadCompletionThread was started"); while (!isInterrupted()) { try { Downloader downloader = downloadExecutor.take().get(); if (AppConfig.DEBUG) Log.d(TAG, "Received 'Download Complete' - message."); removeDownload(downloader); DownloadStatus status = downloader.getResult(); boolean successful = status.isSuccessful(); final int type = status.getFeedfileType(); if (successful) { if (type == Feed.FEEDFILETYPE_FEED) { handleCompletedFeedDownload(downloader.getDownloadRequest()); } else if (type == FeedImage.FEEDFILETYPE_FEEDIMAGE) { handleCompletedImageDownload(status, downloader.getDownloadRequest()); } else if (type == FeedMedia.FEEDFILETYPE_FEEDMEDIA) { handleCompletedFeedMediaDownload(status, downloader.getDownloadRequest()); } } else { numberOfDownloads.decrementAndGet(); if (!successful && !status.isCancelled()) { Log.e(TAG, "Download failed"); saveDownloadStatus(status); } sendDownloadHandledIntent(); queryDownloadsAsync(); } } catch (InterruptedException e) { if (AppConfig.DEBUG) Log.d(TAG, "DownloadCompletionThread was interrupted"); } catch (ExecutionException e) { e.printStackTrace(); numberOfDownloads.decrementAndGet(); } } if (AppConfig.DEBUG) Log.d(TAG, "End of downloadCompletionThread"); }
private static void finishLookup(final char charTyped, @NotNull final LookupImpl lookup) { final Editor editor = lookup.getEditor(); FeatureUsageTracker.getInstance() .triggerFeatureUsed(CodeCompletionFeatures.EDITING_COMPLETION_FINISH_BY_DOT_ETC); CompletionProcess process = CompletionService.getCompletionService().getCurrentCompletion(); SelectionModel sm = editor.getSelectionModel(); final boolean smartUndo = false; // !sm.hasSelection() && !sm.hasBlockSelection() && process != null && // process.isAutopopupCompletion(); final ScrollingModelEx scrollingModel = (ScrollingModelEx) editor.getScrollingModel(); scrollingModel.accumulateViewportChanges(); try { final LinkedList<EditorChangeAction> events = smartUndo ? justTypeChar(charTyped, lookup, editor) : null; if (lookup.isLookupDisposed()) { // if justTypeChar corrupted the start offset return; } CommandProcessor.getInstance() .executeCommand( editor.getProject(), new Runnable() { @Override public void run() { if (smartUndo && !undoEvents(lookup, events)) { return; } lookup.finishLookup(charTyped); } }, null, "Undo inserting the completion char and select the item"); } finally { scrollingModel.flushViewportChanges(); } }
@SuppressWarnings({"unchecked"}) public final void execute(final Callback callback) throws MojoExecutionException, MojoFailureException { if (!skip) { if (header == null) { warn("No header file specified to check for license"); return; } if (!strictCheck) { warn( "Property 'strictCheck' is not enabled. Please consider adding <strictCheck>true</strictCheck> in your pom.xml file."); warn("See http://mycila.github.io/license-maven-plugin for more information."); } finder = new ResourceFinder(basedir); try { finder.setCompileClassPath(project.getCompileClasspathElements()); } catch (DependencyResolutionRequiredException e) { throw new MojoExecutionException(e.getMessage(), e); } finder.setPluginClassPath(getClass().getClassLoader()); final Header h = new Header(finder.findResource(this.header), encoding, headerSections); debug("Header %s:\n%s", h.getLocation(), h); if (this.validHeaders == null) { this.validHeaders = new String[0]; } final List<Header> validHeaders = new ArrayList<Header>(this.validHeaders.length); for (String validHeader : this.validHeaders) { validHeaders.add(new Header(finder.findResource(validHeader), encoding, headerSections)); } final List<PropertiesProvider> propertiesProviders = new LinkedList<PropertiesProvider>(); for (PropertiesProvider provider : ServiceLoader.load( PropertiesProvider.class, Thread.currentThread().getContextClassLoader())) { propertiesProviders.add(provider); } final DocumentPropertiesLoader propertiesLoader = new DocumentPropertiesLoader() { @Override public Properties load(Document document) { Properties props = new Properties(); for (Map.Entry<String, String> entry : mergeProperties(document).entrySet()) { if (entry.getValue() != null) { props.setProperty(entry.getKey(), entry.getValue()); } else { props.remove(entry.getKey()); } } for (PropertiesProvider provider : propertiesProviders) { try { final Map<String, String> providerProperties = provider.getAdditionalProperties(AbstractLicenseMojo.this, props, document); if (getLog().isDebugEnabled()) { getLog() .debug( "provider: " + provider.getClass() + " brought new properties\n" + providerProperties); } for (Map.Entry<String, String> entry : providerProperties.entrySet()) { if (entry.getValue() != null) { props.setProperty(entry.getKey(), entry.getValue()); } else { props.remove(entry.getKey()); } } } catch (Exception e) { getLog().warn("failure occured while calling " + provider.getClass(), e); } } return props; } }; final DocumentFactory documentFactory = new DocumentFactory( basedir, buildMapping(), buildHeaderDefinitions(), encoding, keywords, propertiesLoader); int nThreads = (int) (Runtime.getRuntime().availableProcessors() * concurrencyFactor); ExecutorService executorService = Executors.newFixedThreadPool(nThreads); CompletionService completionService = new ExecutorCompletionService(executorService); int count = 0; debug("Number of execution threads: %s", nThreads); try { for (final String file : listSelectedFiles()) { completionService.submit( new Runnable() { @Override public void run() { Document document = documentFactory.createDocuments(file); debug( "Selected file: %s [header style: %s]", document.getFilePath(), document.getHeaderDefinition()); if (document.isNotSupported()) { callback.onUnknownFile(document, h); } else if (document.is(h)) { debug("Skipping header file: %s", document.getFilePath()); } else if (document.hasHeader(h, strictCheck)) { callback.onExistingHeader(document, h); } else { boolean headerFound = false; for (Header validHeader : validHeaders) { if (headerFound = document.hasHeader(validHeader, strictCheck)) { callback.onExistingHeader(document, h); break; } } if (!headerFound) { callback.onHeaderNotFound(document, h); } } } }, null); count++; } while (count-- > 0) { try { completionService.take().get(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } catch (ExecutionException e) { Throwable cause = e.getCause(); if (cause instanceof Error) { throw (Error) cause; } if (cause instanceof MojoExecutionException) { throw (MojoExecutionException) cause; } if (cause instanceof MojoFailureException) { throw (MojoFailureException) cause; } if (cause instanceof RuntimeException) { throw (RuntimeException) cause; } throw new RuntimeException(cause.getMessage(), cause); } } } finally { executorService.shutdownNow(); } } }
public HttpUrl pingDSM(ServerInfoJson infoJson) { // set timeout to 5 seconds final OkHttpClient client = defaultClient .newBuilder() .connectTimeout(5, TimeUnit.SECONDS) .readTimeout(5, TimeUnit.SECONDS) .build(); ServerJson serverJson = infoJson.server; if (serverJson == null) { throw new IllegalArgumentException("serverJson == null"); } ServiceJson serviceJson = infoJson.service; if (serviceJson == null) { throw new IllegalArgumentException("serviceJson == null"); } int port = serviceJson.port; int externalPort = serviceJson.ext_port; // internal address(192.168.x.x/10.x.x.x) ExecutorService executor = Executors.newFixedThreadPool(10); CompletionService<String> internalService = new ExecutorCompletionService<>(executor); List<InterfaceJson> ifaces = serverJson._interface; AtomicInteger internalCount = new AtomicInteger(0); if (ifaces != null) { for (final InterfaceJson iface : ifaces) { internalService.submit(createPingTask(client, iface.ip, port)); internalCount.incrementAndGet(); if (iface.ipv6 != null) { for (Ipv6Json ipv6 : iface.ipv6) { String ipv6Address = "[" + ipv6.address + "]"; internalService.submit(createPingTask(client, ipv6Address, port)); internalCount.incrementAndGet(); } } } } // host address(ddns/fqdn) ExecutorCompletionService<String> hostService = new ExecutorCompletionService<>(executor); AtomicInteger hostCount = new AtomicInteger(0); // ddns if (!Util.isEmpty(serverJson.ddns) && !serverJson.ddns.equals("NULL")) { hostService.submit(createPingTask(client, serverJson.ddns, port)); hostCount.incrementAndGet(); } // fqdn if (!Util.isEmpty(serverJson.fqdn) && !serverJson.fqdn.equals("NULL")) { hostService.submit(createPingTask(client, serverJson.fqdn, port)); hostCount.incrementAndGet(); } // external address(public ip address) ExecutorCompletionService<String> externalService = new ExecutorCompletionService<>(executor); AtomicInteger externalCount = new AtomicInteger(0); if (serverJson.external != null) { String ip = serverJson.external.ip; if (!Util.isEmpty(ip)) { externalService.submit( createPingTask(client, ip, (externalPort != 0) ? externalPort : port)); externalCount.incrementAndGet(); } String ipv6 = serverJson.external.ipv6; if (!Util.isEmpty(ipv6) && !ipv6.equals("::")) { externalService.submit( createPingTask(client, "[" + ipv6 + "]", (externalPort != 0) ? externalPort : port)); externalCount.incrementAndGet(); } } while (internalCount.getAndDecrement() > 0) { try { Future<String> future = internalService.take(); if (future != null) { String host = future.get(); if (!Util.isEmpty(host)) { return requestUrl.newBuilder().host(host).port(port).build(); } } } catch (InterruptedException | ExecutionException ignored) { } } while (hostCount.getAndDecrement() > 0) { try { Future<String> future = hostService.take(); if (future != null) { String host = future.get(); if (!Util.isEmpty(host)) { return requestUrl.newBuilder().host(host).port(port).build(); } } } catch (InterruptedException | ExecutionException ignored) { } } while (externalCount.getAndDecrement() > 0) { try { Future<String> future = externalService.take(); if (future != null) { String host = future.get(); if (!Util.isEmpty(host)) { return requestUrl.newBuilder().host(host).port(port).build(); } } } catch (InterruptedException | ExecutionException ignored) { // ignored.printStackTrace(); } } // shutdown executors executor.shutdownNow(); return null; }
/** {@inheritDoc} */ @Override protected DataExecutionResponse run( DataBuilderContext dataBuilderContext, DataFlowInstance dataFlowInstance, DataDelta dataDelta, DataFlow dataFlow, DataBuilderFactory builderFactory) throws DataBuilderFrameworkException, DataValidationException { CompletionService<DataContainer> completionExecutor = new ExecutorCompletionService<DataContainer>(executorService); ExecutionGraph executionGraph = dataFlow.getExecutionGraph(); DataSet dataSet = dataFlowInstance.getDataSet().accessor().copy(); // Create own copy to work with DataSetAccessor dataSetAccessor = DataSet.accessor(dataSet); dataSetAccessor.merge(dataDelta); Map<String, Data> responseData = Maps.newTreeMap(); Set<String> activeDataSet = Sets.newHashSet(); for (Data data : dataDelta.getDelta()) { activeDataSet.add(data.getData()); } List<List<DataBuilderMeta>> dependencyHierarchy = executionGraph.getDependencyHierarchy(); Set<String> newlyGeneratedData = Sets.newHashSet(); Set<DataBuilderMeta> processedBuilders = Collections.synchronizedSet(Sets.<DataBuilderMeta>newHashSet()); while (true) { for (List<DataBuilderMeta> levelBuilders : dependencyHierarchy) { List<Future<DataContainer>> dataFutures = Lists.newArrayList(); for (DataBuilderMeta builderMeta : levelBuilders) { if (processedBuilders.contains(builderMeta)) { continue; } // If there is an intersection, means some of it's inputs have changed. Reevaluate if (Sets.intersection(builderMeta.getConsumes(), activeDataSet).isEmpty()) { continue; } DataBuilder builder = builderFactory.create(builderMeta.getName()); if (!dataSetAccessor.checkForData(builder.getDataBuilderMeta().getConsumes())) { break; // No need to run others, list is topo sorted } BuilderRunner builderRunner = new BuilderRunner( dataBuilderExecutionListener, dataFlowInstance, builderMeta, dataDelta, responseData, builder, dataBuilderContext, processedBuilders, dataSet); Future<DataContainer> future = completionExecutor.submit(builderRunner); dataFutures.add(future); } // Now wait for something to complete. int listSize = dataFutures.size(); for (int i = 0; i < listSize; i++) { try { DataContainer responseContainer = completionExecutor.take().get(); Data response = responseContainer.getGeneratedData(); if (responseContainer.isHasError()) { if (null != responseContainer.getValidationException()) { throw responseContainer.getValidationException(); } throw responseContainer.getException(); } if (null != response) { dataSetAccessor.merge(response); responseData.put(response.getData(), response); activeDataSet.add(response.getData()); if (null != dataFlow.getTransients() && !dataFlow.getTransients().contains(response.getData())) { newlyGeneratedData.add(response.getData()); } } } catch (InterruptedException e) { throw new DataBuilderFrameworkException( DataBuilderFrameworkException.ErrorCode.BUILDER_EXECUTION_ERROR, "Error while waiting for error ", e); } catch (ExecutionException e) { throw new DataBuilderFrameworkException( DataBuilderFrameworkException.ErrorCode.BUILDER_EXECUTION_ERROR, "Error while waiting for error ", e.getCause()); } } } if (newlyGeneratedData.contains(dataFlow.getTargetData())) { // logger.debug("Finished running this instance of the flow. Exiting."); break; } if (newlyGeneratedData.isEmpty()) { // logger.debug("Nothing happened in this loop, exiting.."); break; } // StringBuilder stringBuilder = new StringBuilder(); // for(String data : newlyGeneratedData) { // stringBuilder.append(data + ", "); // } // logger.info("Newly generated: " + stringBuilder); activeDataSet.clear(); activeDataSet.addAll(newlyGeneratedData); newlyGeneratedData.clear(); if (!dataFlow.isLoopingEnabled()) { break; } } DataSet finalDataSet = dataSetAccessor.copy(dataFlow.getTransients()); dataFlowInstance.setDataSet(finalDataSet); return new DataExecutionResponse(responseData); }
public void loadTables(String tableNames, String procNames) throws SQLException, IOException, InterruptedException, ExecutionException { String[] tableNameArray = tableNames != null && !"".equals(tableNames) ? tableNames.split(",") : null; String[] procNameArray = procNames != null && !"".equals(procNames) ? procNames.split(",") : null; ExecutorService executor = Executors.newFixedThreadPool(tableNameArray.length * 3); CompletionService completion = new ExecutorCompletionService(executor); for (int j = 0; j < tableNameArray.length && tableNameArray != null; j++) { String tableName = tableNameArray[j]; String procName = procNameArray != null ? procNameArray[j] : ""; // if procName not provided, use the default VoltDB TABLENAME.insert procedure if (procName.length() == 0) { if (tableName.contains("..")) { procName = tableName.split("\\.\\.")[1].toUpperCase() + ".insert"; } else { procName = tableName.toUpperCase() + ".insert"; } } // query the table String jdbcSelect = "SELECT * FROM " + tableName + ";"; // create query to find count String countquery = jdbcSelect.replace("*", "COUNT(*)"); int pages = 1; String base = ""; if (config.srisvoltdb) { if (config.isPaginated) { try { // find count if (countquery.contains("<") || countquery.contains(">")) { int bracketOpen = countquery.indexOf("<"); int bracketClose = countquery.indexOf(">"); String orderCol = countquery.substring(bracketOpen + 1, bracketClose); countquery = countquery.replace("<" + orderCol + ">", ""); } VoltTable vcount = client.callProcedure("@AdHoc", countquery).getResults()[0]; int count = Integer.parseInt(vcount.toString()); // determine number of pages from total data and page size pages = (int) Math.ceil((double) count / config.pageSize); System.out.println(pages); } catch (Exception e) { System.out.println("Count formation failure!"); } } } else { // find count Connection conn = DriverManager.getConnection(config.jdbcurl, config.jdbcuser, config.jdbcpassword); base = conn.getMetaData().getDatabaseProductName().toLowerCase(); Statement jdbcStmt = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); if (countquery.contains("<") || countquery.contains(">")) { int bracketOpen = countquery.indexOf("<"); int bracketClose = countquery.indexOf(">"); String orderCol = countquery.substring(bracketOpen + 1, bracketClose); countquery = countquery.replace("<" + orderCol + ">", ""); } ResultSet rcount = jdbcStmt.executeQuery( countquery); // determine number of pages from total data and page size if (base.contains("postgres") && config.isPaginated) { int count = Integer.parseInt(rcount.toString()); pages = (int) Math.ceil((double) count / config.pageSize); } } // establish new SourceReaders and DestinationWriters for pages SourceReader[] sr = new SourceReader[pages]; DestinationWriter[] cr = new DestinationWriter[pages]; for (int i = 0; i < pages; i++) { sr[i] = new SourceReader(); cr[i] = new DestinationWriter(); } Controller processor = new Controller<ArrayList<Object[]>>( client, sr, cr, jdbcSelect, procName, config, pages, base); completion.submit(processor); } // wait for all tasks to complete. for (int i = 0; i < tableNameArray.length; ++i) { logger.info( "****************" + completion.take().get() + " completed *****************"); // will block until the next sub task has // completed. } executor.shutdown(); }
/* * Solves for times by processing samples in the active list in parallel. */ private void solveParallelX( final ActiveList al, final float[][][] t, final int m, final float[][][] times, final int[][][] marks) { int nthread = Runtime.getRuntime().availableProcessors(); ExecutorService es = Executors.newFixedThreadPool(nthread); CompletionService<Void> cs = new ExecutorCompletionService<Void>(es); ActiveList[] bl = new ActiveList[nthread]; float[][] d = new float[nthread][]; for (int ithread = 0; ithread < nthread; ++ithread) { bl[ithread] = new ActiveList(); d[ithread] = new float[6]; } final AtomicInteger ai = new AtomicInteger(); int ntotal = 0; // int niter = 0; while (!al.isEmpty()) { ai.set(0); // initialize the shared block index to zero final int n = al.size(); // number of samples in active (A) list ntotal += n; final int mb = 32; // size of blocks of samples final int nb = 1 + (n - 1) / mb; // number of blocks of samples int ntask = min(nb, nthread); // number of tasks (threads to be used) for (int itask = 0; itask < ntask; ++itask) { // for each task, ... final ActiveList bltask = bl[itask]; // task-specific B list final float[] dtask = d[itask]; // task-specific work array cs.submit( new Callable<Void>() { // submit new task public Void call() { for (int ib = ai.getAndIncrement(); ib < nb; ib = ai.getAndIncrement()) { int i = ib * mb; // beginning of block int j = min(i + mb, n); // beginning of next block (or end) for (int k = i; k < j; ++k) { // for each sample in block, ... Sample s = al.get(k); // get k'th sample from A list solveOne(t, m, times, marks, s, bltask, dtask); // process sample } } bltask.setAllAbsent(); // needed when merging B lists below return null; } }); } try { for (int itask = 0; itask < ntask; ++itask) cs.take(); } catch (InterruptedException e) { throw new RuntimeException(e); } // Merge samples from all B lists to a new A list. As samples // are appended, their absent flags are set to false, so that // each sample is appended no more than once to the new A list. al.clear(); for (int itask = 0; itask < ntask; ++itask) { al.appendIfAbsent(bl[itask]); bl[itask].clear(); } // ++niter; } es.shutdown(); // trace("solveParallel: ntotal="+ntotal); // trace(" nratio="+(float)ntotal/(float)(_n1*_n2*_n3)); }
public void load(String queryFile, String modules, String tables) throws SQLException, IOException, InterruptedException, ExecutionException { Properties properties = new Properties(); properties.load(new FileInputStream(queryFile)); Collection<String> keys = properties.stringPropertyNames(); // Filtering by validating if property starts with any of the module names if (!Config.ALL.equalsIgnoreCase(modules)) { keys = Util.filter( keys, "^(" + modules.replaceAll(Config.COMMA_SEPARATOR, Config.MODULE_SUFFIX) + ")"); } // Filtering by table names if (!Config.ALL.equalsIgnoreCase(tables)) { keys = Util.filter( keys, "(" + tables.replaceAll(Config.COMMA_SEPARATOR, Config.TABLE_SUFFIX) + ")$"); } logger.info("The final modules and tables that are being considered" + keys.toString()); ExecutorService executor = Executors.newFixedThreadPool(keys.size() * 3); CompletionService completion = new ExecutorCompletionService(executor); for (String key : keys) { String query = properties.getProperty(key); key = (key.contains(Config.DOT_SEPARATOR) ? key.substring(key.indexOf(Config.DOT_SEPARATOR) + 1) : key); while (query.contains("[:")) { String param = query.substring(query.indexOf("[:") + 2, query.indexOf("]")); query = query.replaceFirst("\\[\\:" + param + "\\]", properties.getProperty(param)); } int pages = 1; String base = ""; if (config.srisvoltdb) { if (config.isPaginated) { try { // find count String countquery = query; if (countquery.contains("<") || countquery.contains(">")) { int bracketOpen = countquery.indexOf("<"); int bracketClose = countquery.indexOf(">"); String orderCol = countquery.substring(bracketOpen + 1, bracketClose); countquery = countquery.replace("<" + orderCol + ">", ""); } VoltTable vcount = client.callProcedure("@AdHoc", countquery).getResults()[0]; int count = vcount.getRowCount(); pages = (int) Math.ceil((double) count / config.pageSize); } catch (Exception e) { System.out.println("Count formation failure!"); } } // set up data in order } else { // find count String countquery = query.replace("*", "COUNT(*)"); Connection conn = DriverManager.getConnection(config.jdbcurl, config.jdbcuser, config.jdbcpassword); base = conn.getMetaData().getDatabaseProductName().toLowerCase(); System.out.println("BASE: " + base); Statement jdbcStmt = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); if (countquery.contains("<") || countquery.contains(">")) { int bracketOpen = countquery.indexOf("<"); int bracketClose = countquery.indexOf(">"); String orderCol = countquery.substring(bracketOpen + 1, bracketClose); countquery = countquery.replace("<" + orderCol + ">", ""); } ResultSet rcount = jdbcStmt.executeQuery(countquery); rcount.next(); int count = Integer.parseInt(rcount.getArray(1).toString()); // THIS IF NEEDS A WAY TO DETERMINE IF POSTGRES if (base.contains("postgres") && config.isPaginated) { pages = (int) Math.ceil((double) count / config.pageSize); } // set up data in order } // establish new SourceReaders and DestinationWriters for pages SourceReader[] sr = new SourceReader[pages]; DestinationWriter[] cr = new DestinationWriter[pages]; for (int i = 0; i < pages; i++) { sr[i] = new SourceReader(); cr[i] = new DestinationWriter(); } Controller processor = new Controller<ArrayList<Object[]>>( client, sr, cr, query, key.toUpperCase() + ".insert", config, pages, base); completion.submit(processor); } // wait for all tasks to complete. for (int i = 0; i < keys.size(); ++i) { logger.info( "****************" + completion.take().get() + " completed *****************"); // will block until the next sub task has // completed. } executor.shutdown(); }