/**
   * Generate {@link Parameters} and {@link Task}s. After generate is complete, bash scripts will be
   * available to submit to the cluster
   *
   * @param context
   * @param computeProperties
   * @throws Exception
   */
  private void generate(Context context, ComputeProperties computeProperties) throws Exception {
    // Create a list of parameter files
    List<File> parameterFiles = new ArrayList<File>();

    // Add files from the computeProperties parameter String array to the list of parameterFiles
    for (String parameterFile : computeProperties.getParameterFiles()) {
      parameterFiles.add(new File(parameterFile));
    }

    // Add default property file if it exists
    if (defaultsExists(computeProperties)) {
      parameterFiles.add(new File(computeProperties.defaults));
    }

    // Parse all parameters
    CsvParameterParserImpl parser = new CsvParameterParserImpl(computeProperties.stringStore);

    // This run ID will be passed to the TupleUtils "to solve" method
    parser.setRunID(computeProperties.runId);

    if (computeProperties.hasParametersToOverwrite()) {
      parser.setParametersToOverwrite(computeProperties.getParametersToOverwrite());
    }

    // Fold parameters
    FoldParametersImpl foldParameters = new FoldParametersImpl(parameterFiles, computeProperties);

    // TODO set number of files, only used to check size
    context.setFoldParameters(foldParameters);
    context.setParameters(parser.parse(parameterFiles, computeProperties));

    // Batch the compute job if there is a batch option available
    if (computeProperties.batchOption != null) {
      context.createBatchAnalyser(computeProperties.batchVariable, computeProperties.batchSize);
    }

    File outputDirectory = new File(computeProperties.runDir);
    computeProperties.runDir = outputDirectory.getCanonicalPath();
    outputDirectory.mkdirs();

    // Parse workflow
    LOG.info("Parsing workflow");
    WorkflowImpl workflowImpl =
        new WorkflowCsvParserImpl().parse(computeProperties.workFlow, computeProperties);
    context.setWorkflow(workflowImpl);

    // Create environment.txt with user parameters that are used in at least one of the steps
    LOG.info("Creating user.env file");
    new EnvironmentGenerator(computeProperties.stringStore)
        .generate(context, computeProperties.runDir);

    // Create a ScriptGenerator object. This object creates the header, footer, and submit template
    // on
    // initialization. The object can then be used to create scripts for every generated task.
    LOG.info("Generating header and footer templates");
    ScriptGenerator scriptGenerator = new ScriptGenerator(computeProperties);

    // Create a TaskGenerator object with the current context object
    TaskGenerator taskGenerator =
        new TaskGenerator(context, scriptGenerator, computeProperties.stringStore);

    // Analyze lists in workflow protocols.
    // We need to know if list inputs are coming from the same or from a different parameter file to
    // combine lists
    // or leave them separated
    if (foldParameters.getParameters().size() >= 2) {
      determineCombineLists(workflowImpl);
    }

    // Generate tasks, store task names and previous steps in a list of TaskInfo objects
    LOG.info("Generating tasks");
    List<TaskInfo> taskInfos = taskGenerator.generate();

    // Generate submit script with the TaskInfo objects
    LOG.info("Generating submit scripts");
    scriptGenerator.generateSubmitScript(taskInfos);

    LOG.info("All scripts have been generated");
    LOG.info("You can find them in: " + outputDirectory);
  }