Ejemplo n.º 1
0
 private void notifyListeners(DataEvent event) {
   for (int i = 0, n = _listeners.size(); i < n; i++) {
     ((DataListener) _listeners.elementAt(i)).dataChanged(event);
   }
   // Notifies also parent container
   if (this instanceof DataElement) {
     DataContainer container = ((DataElement) this).getContainer();
     if (container != null) {
       container.notifyListeners(event);
     }
   }
 }
  /**
   * Analyzes the data file identified by the given resource name.
   *
   * @param aResourceName the name of the resource (= data file) to analyse, cannot be <code>null
   *     </code>.
   * @return the analysis results, never <code>null</code>.
   * @throws Exception in case of exceptions.
   */
  private I2CDataSet analyseDataFile(
      final String aResourceName, final int aSclIndex, final int aSdaIndex) throws Exception {
    URL resource = ResourceUtils.getResource(getClass(), aResourceName);
    DataContainer container = DataTestUtils.getCapturedData(resource);
    ToolContext toolContext = DataTestUtils.createToolContext(0, container.getValues().length);

    I2CAnalyserWorker worker = new I2CAnalyserWorker(container, toolContext);
    worker.setLineAIndex(aSclIndex);
    worker.setLineBIndex(aSdaIndex);
    worker.setDetectSDA_SCL(false);
    worker.setReportACK(false);
    worker.setReportNACK(false);
    worker.setReportStart(false);
    worker.setReportStop(false);

    // Simulate we're running in a separate thread by directly calling the main
    // working routine...
    I2CDataSet result = worker.doInBackground();
    assertNotNull(result);

    return result;
  }
  /** {@inheritDoc} */
  @Override
  protected DataExecutionResponse run(
      DataBuilderContext dataBuilderContext,
      DataFlowInstance dataFlowInstance,
      DataDelta dataDelta,
      DataFlow dataFlow,
      DataBuilderFactory builderFactory)
      throws DataBuilderFrameworkException, DataValidationException {
    CompletionService<DataContainer> completionExecutor =
        new ExecutorCompletionService<DataContainer>(executorService);
    ExecutionGraph executionGraph = dataFlow.getExecutionGraph();
    DataSet dataSet =
        dataFlowInstance.getDataSet().accessor().copy(); // Create own copy to work with
    DataSetAccessor dataSetAccessor = DataSet.accessor(dataSet);
    dataSetAccessor.merge(dataDelta);
    Map<String, Data> responseData = Maps.newTreeMap();
    Set<String> activeDataSet = Sets.newHashSet();

    for (Data data : dataDelta.getDelta()) {
      activeDataSet.add(data.getData());
    }
    List<List<DataBuilderMeta>> dependencyHierarchy = executionGraph.getDependencyHierarchy();
    Set<String> newlyGeneratedData = Sets.newHashSet();
    Set<DataBuilderMeta> processedBuilders =
        Collections.synchronizedSet(Sets.<DataBuilderMeta>newHashSet());
    while (true) {
      for (List<DataBuilderMeta> levelBuilders : dependencyHierarchy) {
        List<Future<DataContainer>> dataFutures = Lists.newArrayList();
        for (DataBuilderMeta builderMeta : levelBuilders) {
          if (processedBuilders.contains(builderMeta)) {
            continue;
          }
          // If there is an intersection, means some of it's inputs have changed. Reevaluate
          if (Sets.intersection(builderMeta.getConsumes(), activeDataSet).isEmpty()) {
            continue;
          }
          DataBuilder builder = builderFactory.create(builderMeta.getName());
          if (!dataSetAccessor.checkForData(builder.getDataBuilderMeta().getConsumes())) {
            break; // No need to run others, list is topo sorted
          }
          BuilderRunner builderRunner =
              new BuilderRunner(
                  dataBuilderExecutionListener,
                  dataFlowInstance,
                  builderMeta,
                  dataDelta,
                  responseData,
                  builder,
                  dataBuilderContext,
                  processedBuilders,
                  dataSet);
          Future<DataContainer> future = completionExecutor.submit(builderRunner);
          dataFutures.add(future);
        }

        // Now wait for something to complete.
        int listSize = dataFutures.size();
        for (int i = 0; i < listSize; i++) {
          try {
            DataContainer responseContainer = completionExecutor.take().get();
            Data response = responseContainer.getGeneratedData();
            if (responseContainer.isHasError()) {
              if (null != responseContainer.getValidationException()) {
                throw responseContainer.getValidationException();
              }

              throw responseContainer.getException();
            }
            if (null != response) {
              dataSetAccessor.merge(response);
              responseData.put(response.getData(), response);
              activeDataSet.add(response.getData());
              if (null != dataFlow.getTransients()
                  && !dataFlow.getTransients().contains(response.getData())) {
                newlyGeneratedData.add(response.getData());
              }
            }
          } catch (InterruptedException e) {
            throw new DataBuilderFrameworkException(
                DataBuilderFrameworkException.ErrorCode.BUILDER_EXECUTION_ERROR,
                "Error while waiting for error ",
                e);
          } catch (ExecutionException e) {
            throw new DataBuilderFrameworkException(
                DataBuilderFrameworkException.ErrorCode.BUILDER_EXECUTION_ERROR,
                "Error while waiting for error ",
                e.getCause());
          }
        }
      }
      if (newlyGeneratedData.contains(dataFlow.getTargetData())) {
        // logger.debug("Finished running this instance of the flow. Exiting.");
        break;
      }
      if (newlyGeneratedData.isEmpty()) {
        // logger.debug("Nothing happened in this loop, exiting..");
        break;
      }
      //            StringBuilder stringBuilder = new StringBuilder();
      //            for(String data : newlyGeneratedData) {
      //                stringBuilder.append(data + ", ");
      //            }
      // logger.info("Newly generated: " + stringBuilder);
      activeDataSet.clear();
      activeDataSet.addAll(newlyGeneratedData);
      newlyGeneratedData.clear();
      if (!dataFlow.isLoopingEnabled()) {
        break;
      }
    }
    DataSet finalDataSet = dataSetAccessor.copy(dataFlow.getTransients());
    dataFlowInstance.setDataSet(finalDataSet);
    return new DataExecutionResponse(responseData);
  }