Пример #1
0
    public NonBlockingIdentityHashMap<Long, TestKey> getMapMultithreaded()
        throws InterruptedException, ExecutionException {
      final int threadCount = _items.keySet().size();
      final NonBlockingIdentityHashMap<Long, TestKey> map =
          new NonBlockingIdentityHashMap<Long, TestKey>();

      // use a barrier to open the gate for all threads at once to avoid rolling start and no actual
      // concurrency
      final CyclicBarrier barrier = new CyclicBarrier(threadCount);
      final ExecutorService ex = Executors.newFixedThreadPool(threadCount);
      final CompletionService<Integer> co = new ExecutorCompletionService<Integer>(ex);
      for (Integer type : _items.keySet()) {
        // A linked-list of things to insert
        List<TestKey> items = _items.get(type);
        TestKeyFeederThread feeder = new TestKeyFeederThread(type, items, map, barrier);
        co.submit(feeder);
      }

      // wait for all threads to return
      int itemCount = 0;
      for (int retCount = 0; retCount < threadCount; retCount++) {
        final Future<Integer> result = co.take();
        itemCount += result.get();
      }
      ex.shutdown();
      return map;
    }
Пример #2
0
  // Concurrent insertion & then iterator test.
  public static void testNonBlockingIdentityHashMapIterator() throws InterruptedException {
    final int ITEM_COUNT1 = 1000;
    final int THREAD_COUNT = 5;
    final int PER_CNT = ITEM_COUNT1 / THREAD_COUNT;
    final int ITEM_COUNT = PER_CNT * THREAD_COUNT; // fix roundoff for odd thread counts

    NonBlockingIdentityHashMap<Long, TestKey> nbhml =
        new NonBlockingIdentityHashMap<Long, TestKey>();
    // use a barrier to open the gate for all threads at once to avoid rolling
    // start and no actual concurrency
    final CyclicBarrier barrier = new CyclicBarrier(THREAD_COUNT);
    final ExecutorService ex = Executors.newFixedThreadPool(THREAD_COUNT);
    final CompletionService<Object> co = new ExecutorCompletionService<Object>(ex);
    for (int i = 0; i < THREAD_COUNT; i++) {
      co.submit(new NBHMLFeeder(nbhml, PER_CNT, barrier, i * PER_CNT));
    }
    for (int retCount = 0; retCount < THREAD_COUNT; retCount++) {
      co.take();
    }
    ex.shutdown();

    assertEquals("values().size()", ITEM_COUNT, nbhml.values().size());
    assertEquals("entrySet().size()", ITEM_COUNT, nbhml.entrySet().size());
    int itemCount = 0;
    for (TestKey K : nbhml.values()) itemCount++;
    assertEquals("values().iterator() count", ITEM_COUNT, itemCount);
  }
Пример #3
0
    /**
     * Waits for completed requests. Once the first request has been taken, the method will wait
     * WAIT_TIMEOUT ms longer to collect more completed requests.
     *
     * @return Collected feeds or null if the method has been interrupted during the first waiting
     *     period.
     */
    private List<Feed> collectCompletedRequests() {
      List<Feed> results = new LinkedList<Feed>();
      DownloadRequester requester = DownloadRequester.getInstance();
      int tasks = 0;

      try {
        DownloadRequest request = completedRequests.take();
        parserService.submit(new FeedParserTask(request));
        tasks++;
      } catch (InterruptedException e) {
        return null;
      }

      tasks += pollCompletedDownloads();

      isCollectingRequests = true;

      if (requester.isDownloadingFeeds()) {
        // wait for completion of more downloads
        long startTime = System.currentTimeMillis();
        long currentTime = startTime;
        while (requester.isDownloadingFeeds() && (currentTime - startTime) < WAIT_TIMEOUT) {
          try {
            if (BuildConfig.DEBUG)
              Log.d(TAG, "Waiting for " + (startTime + WAIT_TIMEOUT - currentTime) + " ms");
            sleep(startTime + WAIT_TIMEOUT - currentTime);
          } catch (InterruptedException e) {
            if (BuildConfig.DEBUG) Log.d(TAG, "interrupted while waiting for more downloads");
            tasks += pollCompletedDownloads();
          } finally {
            currentTime = System.currentTimeMillis();
          }
        }

        tasks += pollCompletedDownloads();
      }

      isCollectingRequests = false;

      for (int i = 0; i < tasks; i++) {
        try {
          Feed f = parserService.take().get();
          if (f != null) {
            results.add(f);
          }
        } catch (InterruptedException e) {
          e.printStackTrace();

        } catch (ExecutionException e) {
          e.printStackTrace();
        }
      }

      return results;
    }
Пример #4
0
 private int pollCompletedDownloads() {
   int tasks = 0;
   for (int i = 0; i < completedRequests.size(); i++) {
     parserService.submit(new FeedParserTask(completedRequests.poll()));
     tasks++;
   }
   return tasks;
 }
Пример #5
0
        @Override
        public void run() {
          if (BuildConfig.DEBUG) Log.d(TAG, "downloadCompletionThread was started");
          while (!isInterrupted()) {
            try {
              Downloader downloader = downloadExecutor.take().get();
              if (BuildConfig.DEBUG) Log.d(TAG, "Received 'Download Complete' - message.");
              removeDownload(downloader);
              DownloadStatus status = downloader.getResult();
              boolean successful = status.isSuccessful();

              final int type = status.getFeedfileType();
              if (successful) {
                if (type == Feed.FEEDFILETYPE_FEED) {
                  handleCompletedFeedDownload(downloader.getDownloadRequest());
                } else if (type == FeedImage.FEEDFILETYPE_FEEDIMAGE) {
                  handleCompletedImageDownload(status, downloader.getDownloadRequest());
                } else if (type == FeedMedia.FEEDFILETYPE_FEEDMEDIA) {
                  handleCompletedFeedMediaDownload(status, downloader.getDownloadRequest());
                }
              } else {
                numberOfDownloads.decrementAndGet();
                if (!status.isCancelled()) {
                  if (status.getReason() == DownloadError.ERROR_UNAUTHORIZED) {
                    postAuthenticationNotification(downloader.getDownloadRequest());
                  } else if (status.getReason() == DownloadError.ERROR_HTTP_DATA_ERROR
                      && Integer.valueOf(status.getReasonDetailed())
                          == HttpStatus.SC_REQUESTED_RANGE_NOT_SATISFIABLE) {

                    Log.d(TAG, "Requested invalid range, restarting download from the beginning");
                    FileUtils.deleteQuietly(
                        new File(downloader.getDownloadRequest().getDestination()));
                    DownloadRequester.getInstance()
                        .download(DownloadService.this, downloader.getDownloadRequest());
                  } else {
                    Log.e(TAG, "Download failed");
                    saveDownloadStatus(status);
                    handleFailedDownload(status, downloader.getDownloadRequest());
                  }
                }
                sendDownloadHandledIntent();
                queryDownloadsAsync();
              }
            } catch (InterruptedException e) {
              if (BuildConfig.DEBUG) Log.d(TAG, "DownloadCompletionThread was interrupted");
            } catch (ExecutionException e) {
              e.printStackTrace();
              numberOfDownloads.decrementAndGet();
            }
          }
          if (BuildConfig.DEBUG) Log.d(TAG, "End of downloadCompletionThread");
        }
Пример #6
0
  private void onDownloadQueued(Intent intent) {
    if (BuildConfig.DEBUG) Log.d(TAG, "Received enqueue request");
    DownloadRequest request = intent.getParcelableExtra(EXTRA_REQUEST);
    if (request == null) {
      throw new IllegalArgumentException("ACTION_ENQUEUE_DOWNLOAD intent needs request extra");
    }

    Downloader downloader = getDownloader(request);
    if (downloader != null) {
      numberOfDownloads.incrementAndGet();
      downloads.add(downloader);
      downloadExecutor.submit(downloader);
      sendBroadcast(new Intent(ACTION_DOWNLOADS_CONTENT_CHANGED));
    }

    queryDownloads();
  }
  @SuppressWarnings({"unchecked"})
  public final void execute(final Callback callback)
      throws MojoExecutionException, MojoFailureException {
    if (!skip) {
      if (header == null) {
        warn("No header file specified to check for license");
        return;
      }
      if (!strictCheck) {
        warn(
            "Property 'strictCheck' is not enabled. Please consider adding <strictCheck>true</strictCheck> in your pom.xml file.");
        warn("See http://mycila.github.io/license-maven-plugin for more information.");
      }

      finder = new ResourceFinder(basedir);
      try {
        finder.setCompileClassPath(project.getCompileClasspathElements());
      } catch (DependencyResolutionRequiredException e) {
        throw new MojoExecutionException(e.getMessage(), e);
      }
      finder.setPluginClassPath(getClass().getClassLoader());

      final Header h = new Header(finder.findResource(this.header), encoding, headerSections);
      debug("Header %s:\n%s", h.getLocation(), h);

      if (this.validHeaders == null) {
        this.validHeaders = new String[0];
      }
      final List<Header> validHeaders = new ArrayList<Header>(this.validHeaders.length);
      for (String validHeader : this.validHeaders) {
        validHeaders.add(new Header(finder.findResource(validHeader), encoding, headerSections));
      }

      final List<PropertiesProvider> propertiesProviders = new LinkedList<PropertiesProvider>();
      for (PropertiesProvider provider :
          ServiceLoader.load(
              PropertiesProvider.class, Thread.currentThread().getContextClassLoader())) {
        propertiesProviders.add(provider);
      }
      final DocumentPropertiesLoader propertiesLoader =
          new DocumentPropertiesLoader() {
            @Override
            public Properties load(Document document) {
              Properties props = new Properties();

              for (Map.Entry<String, String> entry : mergeProperties(document).entrySet()) {
                if (entry.getValue() != null) {
                  props.setProperty(entry.getKey(), entry.getValue());
                } else {
                  props.remove(entry.getKey());
                }
              }
              for (PropertiesProvider provider : propertiesProviders) {
                try {
                  final Map<String, String> providerProperties =
                      provider.getAdditionalProperties(AbstractLicenseMojo.this, props, document);
                  if (getLog().isDebugEnabled()) {
                    getLog()
                        .debug(
                            "provider: "
                                + provider.getClass()
                                + " brought new properties\n"
                                + providerProperties);
                  }
                  for (Map.Entry<String, String> entry : providerProperties.entrySet()) {
                    if (entry.getValue() != null) {
                      props.setProperty(entry.getKey(), entry.getValue());
                    } else {
                      props.remove(entry.getKey());
                    }
                  }
                } catch (Exception e) {
                  getLog().warn("failure occured while calling " + provider.getClass(), e);
                }
              }
              return props;
            }
          };

      final DocumentFactory documentFactory =
          new DocumentFactory(
              basedir,
              buildMapping(),
              buildHeaderDefinitions(),
              encoding,
              keywords,
              propertiesLoader);

      int nThreads = (int) (Runtime.getRuntime().availableProcessors() * concurrencyFactor);
      ExecutorService executorService = Executors.newFixedThreadPool(nThreads);
      CompletionService completionService = new ExecutorCompletionService(executorService);
      int count = 0;
      debug("Number of execution threads: %s", nThreads);

      try {
        for (final String file : listSelectedFiles()) {
          completionService.submit(
              new Runnable() {
                @Override
                public void run() {
                  Document document = documentFactory.createDocuments(file);
                  debug(
                      "Selected file: %s [header style: %s]",
                      document.getFilePath(), document.getHeaderDefinition());
                  if (document.isNotSupported()) {
                    callback.onUnknownFile(document, h);
                  } else if (document.is(h)) {
                    debug("Skipping header file: %s", document.getFilePath());
                  } else if (document.hasHeader(h, strictCheck)) {
                    callback.onExistingHeader(document, h);
                  } else {
                    boolean headerFound = false;
                    for (Header validHeader : validHeaders) {
                      if (headerFound = document.hasHeader(validHeader, strictCheck)) {
                        callback.onExistingHeader(document, h);
                        break;
                      }
                    }
                    if (!headerFound) {
                      callback.onHeaderNotFound(document, h);
                    }
                  }
                }
              },
              null);
          count++;
        }

        while (count-- > 0) {
          try {
            completionService.take().get();
          } catch (InterruptedException e) {
            Thread.currentThread().interrupt();
          } catch (ExecutionException e) {
            Throwable cause = e.getCause();
            if (cause instanceof Error) {
              throw (Error) cause;
            }
            if (cause instanceof MojoExecutionException) {
              throw (MojoExecutionException) cause;
            }
            if (cause instanceof MojoFailureException) {
              throw (MojoFailureException) cause;
            }
            if (cause instanceof RuntimeException) {
              throw (RuntimeException) cause;
            }
            throw new RuntimeException(cause.getMessage(), cause);
          }
        }

      } finally {
        executorService.shutdownNow();
      }
    }
  }
  /** {@inheritDoc} */
  @Override
  protected DataExecutionResponse run(
      DataBuilderContext dataBuilderContext,
      DataFlowInstance dataFlowInstance,
      DataDelta dataDelta,
      DataFlow dataFlow,
      DataBuilderFactory builderFactory)
      throws DataBuilderFrameworkException, DataValidationException {
    CompletionService<DataContainer> completionExecutor =
        new ExecutorCompletionService<DataContainer>(executorService);
    ExecutionGraph executionGraph = dataFlow.getExecutionGraph();
    DataSet dataSet =
        dataFlowInstance.getDataSet().accessor().copy(); // Create own copy to work with
    DataSetAccessor dataSetAccessor = DataSet.accessor(dataSet);
    dataSetAccessor.merge(dataDelta);
    Map<String, Data> responseData = Maps.newTreeMap();
    Set<String> activeDataSet = Sets.newHashSet();

    for (Data data : dataDelta.getDelta()) {
      activeDataSet.add(data.getData());
    }
    List<List<DataBuilderMeta>> dependencyHierarchy = executionGraph.getDependencyHierarchy();
    Set<String> newlyGeneratedData = Sets.newHashSet();
    Set<DataBuilderMeta> processedBuilders =
        Collections.synchronizedSet(Sets.<DataBuilderMeta>newHashSet());
    while (true) {
      for (List<DataBuilderMeta> levelBuilders : dependencyHierarchy) {
        List<Future<DataContainer>> dataFutures = Lists.newArrayList();
        for (DataBuilderMeta builderMeta : levelBuilders) {
          if (processedBuilders.contains(builderMeta)) {
            continue;
          }
          // If there is an intersection, means some of it's inputs have changed. Reevaluate
          if (Sets.intersection(builderMeta.getConsumes(), activeDataSet).isEmpty()) {
            continue;
          }
          DataBuilder builder = builderFactory.create(builderMeta.getName());
          if (!dataSetAccessor.checkForData(builder.getDataBuilderMeta().getConsumes())) {
            break; // No need to run others, list is topo sorted
          }
          BuilderRunner builderRunner =
              new BuilderRunner(
                  dataBuilderExecutionListener,
                  dataFlowInstance,
                  builderMeta,
                  dataDelta,
                  responseData,
                  builder,
                  dataBuilderContext,
                  processedBuilders,
                  dataSet);
          Future<DataContainer> future = completionExecutor.submit(builderRunner);
          dataFutures.add(future);
        }

        // Now wait for something to complete.
        int listSize = dataFutures.size();
        for (int i = 0; i < listSize; i++) {
          try {
            DataContainer responseContainer = completionExecutor.take().get();
            Data response = responseContainer.getGeneratedData();
            if (responseContainer.isHasError()) {
              if (null != responseContainer.getValidationException()) {
                throw responseContainer.getValidationException();
              }

              throw responseContainer.getException();
            }
            if (null != response) {
              dataSetAccessor.merge(response);
              responseData.put(response.getData(), response);
              activeDataSet.add(response.getData());
              if (null != dataFlow.getTransients()
                  && !dataFlow.getTransients().contains(response.getData())) {
                newlyGeneratedData.add(response.getData());
              }
            }
          } catch (InterruptedException e) {
            throw new DataBuilderFrameworkException(
                DataBuilderFrameworkException.ErrorCode.BUILDER_EXECUTION_ERROR,
                "Error while waiting for error ",
                e);
          } catch (ExecutionException e) {
            throw new DataBuilderFrameworkException(
                DataBuilderFrameworkException.ErrorCode.BUILDER_EXECUTION_ERROR,
                "Error while waiting for error ",
                e.getCause());
          }
        }
      }
      if (newlyGeneratedData.contains(dataFlow.getTargetData())) {
        // logger.debug("Finished running this instance of the flow. Exiting.");
        break;
      }
      if (newlyGeneratedData.isEmpty()) {
        // logger.debug("Nothing happened in this loop, exiting..");
        break;
      }
      //            StringBuilder stringBuilder = new StringBuilder();
      //            for(String data : newlyGeneratedData) {
      //                stringBuilder.append(data + ", ");
      //            }
      // logger.info("Newly generated: " + stringBuilder);
      activeDataSet.clear();
      activeDataSet.addAll(newlyGeneratedData);
      newlyGeneratedData.clear();
      if (!dataFlow.isLoopingEnabled()) {
        break;
      }
    }
    DataSet finalDataSet = dataSetAccessor.copy(dataFlow.getTransients());
    dataFlowInstance.setDataSet(finalDataSet);
    return new DataExecutionResponse(responseData);
  }
Пример #9
0
  public void loadTables(String tableNames, String procNames)
      throws SQLException, IOException, InterruptedException, ExecutionException {
    String[] tableNameArray =
        tableNames != null && !"".equals(tableNames) ? tableNames.split(",") : null;
    String[] procNameArray =
        procNames != null && !"".equals(procNames) ? procNames.split(",") : null;

    ExecutorService executor = Executors.newFixedThreadPool(tableNameArray.length * 3);
    CompletionService completion = new ExecutorCompletionService(executor);

    for (int j = 0; j < tableNameArray.length && tableNameArray != null; j++) {
      String tableName = tableNameArray[j];
      String procName = procNameArray != null ? procNameArray[j] : "";

      // if procName not provided, use the default VoltDB TABLENAME.insert procedure
      if (procName.length() == 0) {
        if (tableName.contains("..")) {
          procName = tableName.split("\\.\\.")[1].toUpperCase() + ".insert";
        } else {
          procName = tableName.toUpperCase() + ".insert";
        }
      }

      // query the table
      String jdbcSelect = "SELECT * FROM " + tableName + ";";

      // create query to find count
      String countquery = jdbcSelect.replace("*", "COUNT(*)");
      int pages = 1;
      String base = "";
      if (config.srisvoltdb) {
        if (config.isPaginated) {
          try {
            // find count
            if (countquery.contains("<") || countquery.contains(">")) {
              int bracketOpen = countquery.indexOf("<");
              int bracketClose = countquery.indexOf(">");
              String orderCol = countquery.substring(bracketOpen + 1, bracketClose);
              countquery = countquery.replace("<" + orderCol + ">", "");
            }
            VoltTable vcount = client.callProcedure("@AdHoc", countquery).getResults()[0];
            int count = Integer.parseInt(vcount.toString());
            // determine number of pages from total data and page size
            pages = (int) Math.ceil((double) count / config.pageSize);
            System.out.println(pages);
          } catch (Exception e) {
            System.out.println("Count formation failure!");
          }
        }
      } else {
        // find count
        Connection conn =
            DriverManager.getConnection(config.jdbcurl, config.jdbcuser, config.jdbcpassword);
        base = conn.getMetaData().getDatabaseProductName().toLowerCase();
        Statement jdbcStmt =
            conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
        if (countquery.contains("<") || countquery.contains(">")) {
          int bracketOpen = countquery.indexOf("<");
          int bracketClose = countquery.indexOf(">");
          String orderCol = countquery.substring(bracketOpen + 1, bracketClose);
          countquery = countquery.replace("<" + orderCol + ">", "");
        }
        ResultSet rcount =
            jdbcStmt.executeQuery(
                countquery); // determine number of pages from total data and page size
        if (base.contains("postgres") && config.isPaginated) {
          int count = Integer.parseInt(rcount.toString());
          pages = (int) Math.ceil((double) count / config.pageSize);
        }
      }

      // establish new SourceReaders and DestinationWriters for pages
      SourceReader[] sr = new SourceReader[pages];
      DestinationWriter[] cr = new DestinationWriter[pages];
      for (int i = 0; i < pages; i++) {
        sr[i] = new SourceReader();
        cr[i] = new DestinationWriter();
      }
      Controller processor =
          new Controller<ArrayList<Object[]>>(
              client, sr, cr, jdbcSelect, procName, config, pages, base);
      completion.submit(processor);
    }

    // wait for all tasks to complete.
    for (int i = 0; i < tableNameArray.length; ++i) {
      logger.info(
          "****************"
              + completion.take().get()
              + " completed *****************"); // will block until the next sub task has
      // completed.
    }
    executor.shutdown();
  }
Пример #10
0
  public void load(String queryFile, String modules, String tables)
      throws SQLException, IOException, InterruptedException, ExecutionException {
    Properties properties = new Properties();
    properties.load(new FileInputStream(queryFile));

    Collection<String> keys = properties.stringPropertyNames();

    // Filtering by validating if property starts with any of the module names
    if (!Config.ALL.equalsIgnoreCase(modules)) {
      keys =
          Util.filter(
              keys, "^(" + modules.replaceAll(Config.COMMA_SEPARATOR, Config.MODULE_SUFFIX) + ")");
    }

    // Filtering by table names
    if (!Config.ALL.equalsIgnoreCase(tables)) {
      keys =
          Util.filter(
              keys, "(" + tables.replaceAll(Config.COMMA_SEPARATOR, Config.TABLE_SUFFIX) + ")$");
    }

    logger.info("The final modules and tables that are being considered" + keys.toString());

    ExecutorService executor = Executors.newFixedThreadPool(keys.size() * 3);
    CompletionService completion = new ExecutorCompletionService(executor);

    for (String key : keys) {
      String query = properties.getProperty(key);
      key =
          (key.contains(Config.DOT_SEPARATOR)
              ? key.substring(key.indexOf(Config.DOT_SEPARATOR) + 1)
              : key);

      while (query.contains("[:")) {
        String param = query.substring(query.indexOf("[:") + 2, query.indexOf("]"));

        query = query.replaceFirst("\\[\\:" + param + "\\]", properties.getProperty(param));
      }
      int pages = 1;
      String base = "";
      if (config.srisvoltdb) {
        if (config.isPaginated) {
          try {
            // find count
            String countquery = query;
            if (countquery.contains("<") || countquery.contains(">")) {
              int bracketOpen = countquery.indexOf("<");
              int bracketClose = countquery.indexOf(">");
              String orderCol = countquery.substring(bracketOpen + 1, bracketClose);
              countquery = countquery.replace("<" + orderCol + ">", "");
            }
            VoltTable vcount = client.callProcedure("@AdHoc", countquery).getResults()[0];
            int count = vcount.getRowCount();
            pages = (int) Math.ceil((double) count / config.pageSize);
          } catch (Exception e) {
            System.out.println("Count formation failure!");
          }
        }
        // set up data in order
      } else {
        // find count
        String countquery = query.replace("*", "COUNT(*)");
        Connection conn =
            DriverManager.getConnection(config.jdbcurl, config.jdbcuser, config.jdbcpassword);
        base = conn.getMetaData().getDatabaseProductName().toLowerCase();
        System.out.println("BASE: " + base);
        Statement jdbcStmt =
            conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
        if (countquery.contains("<") || countquery.contains(">")) {
          int bracketOpen = countquery.indexOf("<");
          int bracketClose = countquery.indexOf(">");
          String orderCol = countquery.substring(bracketOpen + 1, bracketClose);
          countquery = countquery.replace("<" + orderCol + ">", "");
        }
        ResultSet rcount = jdbcStmt.executeQuery(countquery);
        rcount.next();
        int count = Integer.parseInt(rcount.getArray(1).toString());

        // THIS IF NEEDS A WAY TO DETERMINE IF POSTGRES
        if (base.contains("postgres") && config.isPaginated) {
          pages = (int) Math.ceil((double) count / config.pageSize);
        }
        // set up data in order
      }
      // establish new SourceReaders and DestinationWriters for pages
      SourceReader[] sr = new SourceReader[pages];
      DestinationWriter[] cr = new DestinationWriter[pages];
      for (int i = 0; i < pages; i++) {
        sr[i] = new SourceReader();
        cr[i] = new DestinationWriter();
      }
      Controller processor =
          new Controller<ArrayList<Object[]>>(
              client, sr, cr, query, key.toUpperCase() + ".insert", config, pages, base);
      completion.submit(processor);
    }

    // wait for all tasks to complete.
    for (int i = 0; i < keys.size(); ++i) {
      logger.info(
          "****************"
              + completion.take().get()
              + " completed *****************"); // will block until the next sub task has
      // completed.
    }

    executor.shutdown();
  }