/** Pardot, an ExactTarget company User: Michael Frank Date: 6/21/13 */
public class StatementIteratorConsumer {

  private static Logger logger = LoggerFactory.getLogger(StatementIteratorConsumer.class);
  private static ExecutorService executorService = Executors.newFixedThreadPool(260);

  private final BoundedCQLStatementIterator statementIterator;
  private CQLExecutor cqlExecutor;
  private final CountDownLatch shutdownLatch;
  private final long timeout;
  private final Set<Future> futures = Collections.synchronizedSet(new HashSet<Future>());
  private List<Throwable> executionExceptions = new Vector<Throwable>();

  public StatementIteratorConsumer(
      BoundedCQLStatementIterator statementIterator, CQLExecutor cqlExecutor, long timeout) {
    this.statementIterator = statementIterator;
    this.cqlExecutor = cqlExecutor;
    this.timeout = timeout;
    this.shutdownLatch = new CountDownLatch((new Long(statementIterator.size())).intValue());
    logger.trace("Created consumer with countdown {}", shutdownLatch.getCount());
  }

  public void start() {
    while (statementIterator.hasNext()) {
      final CQLStatement next = statementIterator.next();
      Runnable r =
          new Runnable() {
            @Override
            public void run() {
              handle(next);
            }
          };
      executorService.execute(r);
    }
  }

  public void join() throws RhombusException {
    logger.trace("Awaiting shutdownLatch with timeout {}ms", timeout);
    try {
      boolean complete = shutdownLatch.await(timeout, TimeUnit.MILLISECONDS);
      if (!complete) {
        Metrics.defaultRegistry()
            .newMeter(
                StatementIteratorConsumer.class, "asyncTimeout", "asyncTimeout", TimeUnit.SECONDS)
            .mark();
        cancelFutures();
        for (Throwable t : this.executionExceptions) {
          logger.warn("Timeout executing statements. Found future failure: ", t);
        }
        throw new RhombusException("Timout executing statements asynch");
      }
      for (Throwable t : this.executionExceptions) {
        logger.warn("Completed executing statements, but found future failure: ", t);
      }
    } catch (InterruptedException e) {
      logger.warn("Interrupted while executing statements asynch", e);
      cancelFutures();
    }
  }

  private void cancelFutures() {
    for (Future future : futures) {
      try {
        future.cancel(true);
      } catch (Exception e) {
        logger.warn("Exception when cancelling future", e);
      }
    }
  }

  protected void handle(CQLStatement statement) {
    final Timer asyncExecTimer =
        Metrics.defaultRegistry().newTimer(StatementIteratorConsumer.class, "asyncExec");
    final TimerContext asyncExecTimerContext = asyncExecTimer.time();
    final long startTime = System.nanoTime();
    ResultSetFuture future = this.cqlExecutor.executeAsync(statement);
    futures.add(future);
    Futures.addCallback(
        future,
        new FutureCallback<ResultSet>() {
          @Override
          public void onSuccess(final ResultSet result) {
            Host queriedHost = result.getExecutionInfo().getQueriedHost();
            Metrics.defaultRegistry()
                .newMeter(
                    StatementIteratorConsumer.class,
                    "queriedhost." + queriedHost.getDatacenter(),
                    queriedHost.getDatacenter(),
                    TimeUnit.SECONDS)
                .mark();
            asyncExecTimerContext.stop();
            logger.debug("Async exec time {}us", (System.nanoTime() - startTime) / 1000);
            shutdownLatch.countDown();
          }

          @Override
          public void onFailure(final Throwable t) {
            asyncExecTimerContext.stop();
            logger.debug("Async failure time {}us", (System.nanoTime() - startTime) / 1000);
            executionExceptions.add(t);
            shutdownLatch.countDown();
          }
        },
        executorService);
  }
}
  /** {@inheritDoc} */
  @Override
  protected DataExecutionResponse run(
      DataBuilderContext dataBuilderContext,
      DataFlowInstance dataFlowInstance,
      DataDelta dataDelta,
      DataFlow dataFlow,
      DataBuilderFactory builderFactory)
      throws DataBuilderFrameworkException, DataValidationException {
    CompletionService<DataContainer> completionExecutor =
        new ExecutorCompletionService<DataContainer>(executorService);
    ExecutionGraph executionGraph = dataFlow.getExecutionGraph();
    DataSet dataSet =
        dataFlowInstance.getDataSet().accessor().copy(); // Create own copy to work with
    DataSetAccessor dataSetAccessor = DataSet.accessor(dataSet);
    dataSetAccessor.merge(dataDelta);
    Map<String, Data> responseData = Maps.newTreeMap();
    Set<String> activeDataSet = Sets.newHashSet();

    for (Data data : dataDelta.getDelta()) {
      activeDataSet.add(data.getData());
    }
    List<List<DataBuilderMeta>> dependencyHierarchy = executionGraph.getDependencyHierarchy();
    Set<String> newlyGeneratedData = Sets.newHashSet();
    Set<DataBuilderMeta> processedBuilders =
        Collections.synchronizedSet(Sets.<DataBuilderMeta>newHashSet());
    while (true) {
      for (List<DataBuilderMeta> levelBuilders : dependencyHierarchy) {
        List<Future<DataContainer>> dataFutures = Lists.newArrayList();
        for (DataBuilderMeta builderMeta : levelBuilders) {
          if (processedBuilders.contains(builderMeta)) {
            continue;
          }
          // If there is an intersection, means some of it's inputs have changed. Reevaluate
          if (Sets.intersection(builderMeta.getConsumes(), activeDataSet).isEmpty()) {
            continue;
          }
          DataBuilder builder = builderFactory.create(builderMeta.getName());
          if (!dataSetAccessor.checkForData(builder.getDataBuilderMeta().getConsumes())) {
            break; // No need to run others, list is topo sorted
          }
          BuilderRunner builderRunner =
              new BuilderRunner(
                  dataBuilderExecutionListener,
                  dataFlowInstance,
                  builderMeta,
                  dataDelta,
                  responseData,
                  builder,
                  dataBuilderContext,
                  processedBuilders,
                  dataSet);
          Future<DataContainer> future = completionExecutor.submit(builderRunner);
          dataFutures.add(future);
        }

        // Now wait for something to complete.
        int listSize = dataFutures.size();
        for (int i = 0; i < listSize; i++) {
          try {
            DataContainer responseContainer = completionExecutor.take().get();
            Data response = responseContainer.getGeneratedData();
            if (responseContainer.isHasError()) {
              if (null != responseContainer.getValidationException()) {
                throw responseContainer.getValidationException();
              }

              throw responseContainer.getException();
            }
            if (null != response) {
              dataSetAccessor.merge(response);
              responseData.put(response.getData(), response);
              activeDataSet.add(response.getData());
              if (null != dataFlow.getTransients()
                  && !dataFlow.getTransients().contains(response.getData())) {
                newlyGeneratedData.add(response.getData());
              }
            }
          } catch (InterruptedException e) {
            throw new DataBuilderFrameworkException(
                DataBuilderFrameworkException.ErrorCode.BUILDER_EXECUTION_ERROR,
                "Error while waiting for error ",
                e);
          } catch (ExecutionException e) {
            throw new DataBuilderFrameworkException(
                DataBuilderFrameworkException.ErrorCode.BUILDER_EXECUTION_ERROR,
                "Error while waiting for error ",
                e.getCause());
          }
        }
      }
      if (newlyGeneratedData.contains(dataFlow.getTargetData())) {
        // logger.debug("Finished running this instance of the flow. Exiting.");
        break;
      }
      if (newlyGeneratedData.isEmpty()) {
        // logger.debug("Nothing happened in this loop, exiting..");
        break;
      }
      //            StringBuilder stringBuilder = new StringBuilder();
      //            for(String data : newlyGeneratedData) {
      //                stringBuilder.append(data + ", ");
      //            }
      // logger.info("Newly generated: " + stringBuilder);
      activeDataSet.clear();
      activeDataSet.addAll(newlyGeneratedData);
      newlyGeneratedData.clear();
      if (!dataFlow.isLoopingEnabled()) {
        break;
      }
    }
    DataSet finalDataSet = dataSetAccessor.copy(dataFlow.getTransients());
    dataFlowInstance.setDataSet(finalDataSet);
    return new DataExecutionResponse(responseData);
  }