@Override
  public Map<String, Object> execute(
      Map<String, Object> inputs,
      Map<String, String> parameters,
      IProgressMonitor monitor,
      String nodeName,
      Workflow workflow)
      throws WorkflowException {

    // Get the SDFGraph to optimize
    @SuppressWarnings("unchecked")
    Set<SDFGraph> graphs = (Set<SDFGraph>) inputs.get(KEY_SDF_GRAPHS_SET);

    // First pass is to clean the graph from useless pairs of join-fork
    // vertices which can hinder scheduling

    // JoinForkCleaner evaluates some rates and delays expressions, and thus
    // can throw InvalidExpressionExceptions, even if at this point of the
    // workflow, there should have been already raised
    for (SDFGraph graph : graphs) {
      try {
        while (JoinForkCleaner.cleanJoinForkPairsFrom(graph)) ;
      } catch (InvalidExpressionException e) {
        System.err.println("SDFGraph " + graph.getName() + " contains invalid expressions.");
        e.printStackTrace();
      } catch (SDF4JException e) {
        System.err.println("Error when cleaning fork/join pairs in SDFGraph " + graph.getName());
        e.printStackTrace();
      }
    }

    Map<String, Object> outputs = new HashMap<String, Object>();
    outputs.put(KEY_SDF_GRAPHS_SET, graphs);
    return outputs;
  }
Example #2
0
  @Override
  public Map<String, Object> execute(
      Map<String, Object> inputs,
      Map<String, String> parameters,
      IProgressMonitor monitor,
      String nodeName,
      Workflow workflow)
      throws WorkflowException {

    Map<String, Object> outputs = new HashMap<String, Object>();
    Design architecture = (Design) inputs.get("architecture");
    SDFGraph algorithm = (SDFGraph) inputs.get("SDF");
    PreesmScenario scenario = (PreesmScenario) inputs.get("scenario");

    super.execute(inputs, parameters, monitor, nodeName, workflow);

    PFastAlgoParameters pFastParams = new PFastAlgoParameters(parameters);
    AbcParameters abcParameters = new AbcParameters(parameters);

    MapperDAG dag = SdfToDagConverter.convert(algorithm, architecture, scenario, false);

    // calculates the DAG span length on the architecture main operator (the
    // tasks that can
    // not be executed by the main operator are deported without transfer
    // time to other operator
    calculateSpan(dag, architecture, scenario, abcParameters);

    IAbc simu =
        new InfiniteHomogeneousAbc(
            abcParameters,
            dag,
            architecture,
            abcParameters.getSimulatorType().getTaskSchedType(),
            scenario);

    InitialLists initial = new InitialLists();

    if (!initial.constructInitialLists(dag, simu)) return null;

    TopologicalTaskSched taskSched = new TopologicalTaskSched(simu.getTotalOrder());
    simu.resetDAG();

    IAbc simu2 = AbstractAbc.getInstance(abcParameters, dag, architecture, scenario);

    PFastAlgorithm pfastAlgorithm = new PFastAlgorithm();

    dag =
        pfastAlgorithm.map(
            dag,
            architecture,
            scenario,
            initial,
            abcParameters,
            pFastParams,
            false,
            0,
            pFastParams.isDisplaySolutions(),
            null,
            taskSched);

    simu2.setDAG(dag);

    // simu2.plotImplementation();

    // The transfers are reordered using the best found order during
    // scheduling
    simu2.reschedule(pfastAlgorithm.getBestTotalOrder());
    TagDAG tagSDF = new TagDAG();

    try {
      tagSDF.tag(dag, architecture, scenario, simu2, abcParameters.getEdgeSchedType());
    } catch (InvalidExpressionException e) {
      e.printStackTrace();
      throw (new WorkflowException(e.getMessage()));
    }

    outputs.put("DAG", dag);
    outputs.put("ABC", simu2);

    super.clean(architecture, scenario);
    return outputs;
  }