/**
   * Constructs a sample execution graph consisting of two vertices connected by a channel of the
   * given type.
   *
   * @param channelType the channel type to connect the vertices with
   * @param instanceManager the instance manager that shall be used during the creation of the
   *     execution graph
   * @return a sample execution graph
   */
  private ExecutionGraph createExecutionGraph(
      final ChannelType channelType, final InstanceManager instanceManager) {

    final JobGraph jobGraph = new JobGraph("Job Graph");

    final JobInputVertex inputVertex = new JobInputVertex("Input 1", jobGraph);
    inputVertex.setInputClass(InputTask.class);
    inputVertex.setNumberOfSubtasks(1);

    final JobOutputVertex outputVertex = new JobOutputVertex("Output 1", jobGraph);
    outputVertex.setOutputClass(OutputTask.class);
    outputVertex.setNumberOfSubtasks(1);

    try {
      inputVertex.connectTo(outputVertex, channelType);
    } catch (JobGraphDefinitionException e) {
      fail(StringUtils.stringifyException(e));
    }

    try {
      LibraryCacheManager.register(jobGraph.getJobID(), new String[0]);
      return new ExecutionGraph(jobGraph, instanceManager);

    } catch (GraphConversionException e) {
      fail(StringUtils.stringifyException(e));
    } catch (IOException e) {
      fail(StringUtils.stringifyException(e));
    }

    return null;
  }
 private static JobOutputVertex createSync(JobGraph jobGraph, int numIterations, int dop) {
   JobOutputVertex sync = JobGraphUtils.createSync(jobGraph, dop);
   TaskConfig syncConfig = new TaskConfig(sync.getConfiguration());
   syncConfig.setNumberOfIterations(numIterations);
   syncConfig.setIterationId(ITERATION_ID);
   return sync;
 }
  public static void main(String[] args)
      throws JobGraphDefinitionException, IOException, JobExecutionException {
    if (args.length != 5) {
      System.out.println("Not correct parameters");
      System.exit(-1);
    }

    final int dop = Integer.valueOf(args[0]);
    final String input = args[1];
    final String output = args[2];
    final int spi = Integer.valueOf(args[3]);
    final int baseMemory = Integer.valueOf(args[4]);

    JobGraph graph = new JobGraph("Connected Components");

    // Create tasks
    JobInputVertex sourceVertex = createInput(AdjacencyListInput.class, input, graph, dop, spi);

    JobTaskVertex convert = createTask(ConvertToTransitiveClosureTypes.class, graph, dop, spi);
    convert.setVertexToShareInstancesWith(sourceVertex);

    JobTaskVertex initialState = createTask(InitialStateComponents.class, graph, dop, spi);
    initialState.setVertexToShareInstancesWith(sourceVertex);

    JobTaskVertex initialUpdateAssigner = createTask(InitialUpdates.class, graph, dop, spi);
    initialUpdateAssigner.setVertexToShareInstancesWith(sourceVertex);

    JobTaskVertex tmpTask = createTask(UpdateTempTask.class, graph, dop);
    tmpTask.setVertexToShareInstancesWith(sourceVertex);
    setMemorySize(tmpTask, baseMemory * 1 / 9);

    // Inner iteration loop tasks -- END
    JobTaskVertex updatesMatch =
        createTask(UpdateableMatchingOptimizedCombined.class, graph, dop, spi);
    updatesMatch.setVertexToShareInstancesWith(sourceVertex);
    setMemorySize(updatesMatch, baseMemory * 8 / 9);
    // Inner iteration loop tasks -- END

    JobOutputVertex sinkVertex = createOutput(NullOutput.class, output, graph, dop, spi);
    sinkVertex.setVertexToShareInstancesWith(sourceVertex);

    // Connect tasks
    connectJobVertices(ShipStrategy.FORWARD, sourceVertex, convert, null, null);
    connectJobVertices(ShipStrategy.PARTITION_HASH, convert, initialState, null, null);

    connectJobVertices(ShipStrategy.FORWARD, initialState, initialUpdateAssigner, null, null);
    connectJobVertices(ShipStrategy.PARTITION_HASH, initialUpdateAssigner, tmpTask, null, null);

    NepheleUtil.connectBoundedRoundsIterationLoop(
        tmpTask, sinkVertex, null, null, updatesMatch, ShipStrategy.PARTITION_HASH, 14, graph);

    connectJobVertices(ShipStrategy.FORWARD, initialState, updatesMatch, null, null);

    // Submit job
    submit(graph, getConfiguration());
  }
  private static JobOutputVertex createOutput(
      JobGraph jobGraph, String resultPath, int numSubTasks, TypeSerializerFactory<?> serializer) {

    JobOutputVertex output =
        JobGraphUtils.createFileOutput(jobGraph, "Output", numSubTasks, numSubTasks);

    {
      TaskConfig taskConfig = new TaskConfig(output.getConfiguration());
      taskConfig.addInputToGroup(0);
      taskConfig.setInputSerializer(serializer, 0);

      PointOutFormat outFormat = new PointOutFormat();
      outFormat.setOutputFilePath(new Path(resultPath));

      taskConfig.setStubWrapper(new UserCodeObjectWrapper<PointOutFormat>(outFormat));
    }

    return output;
  }
  private static JobOutputVertex createOutput(
      JobGraph jobGraph, String resultPath, int numSubTasks, TypeSerializerFactory<?> serializer) {
    JobOutputVertex output =
        JobGraphUtils.createFileOutput(jobGraph, "Output", numSubTasks, numSubTasks);

    {
      TaskConfig taskConfig = new TaskConfig(output.getConfiguration());
      taskConfig.addInputToGroup(0);
      taskConfig.setInputSerializer(serializer, 0);

      @SuppressWarnings("unchecked")
      CsvOutputFormat outFormat =
          new CsvOutputFormat("\n", " ", LongValue.class, LongValue.class, LongValue.class);
      outFormat.setOutputFilePath(new Path(resultPath));

      taskConfig.setStubWrapper(new UserCodeObjectWrapper<CsvOutputFormat>(outFormat));
    }

    return output;
  }
  private static JobGraph createJobGraph(
      String pointsPath, String centersPath, String resultPath, int numSubTasks, int numIterations)
      throws JobGraphDefinitionException {

    // -- init
    // -------------------------------------------------------------------------------------------------
    final TypeSerializerFactory<?> serializer = RecordSerializerFactory.get();
    @SuppressWarnings("unchecked")
    final TypeComparatorFactory<?> int0Comparator =
        new RecordComparatorFactory(new int[] {0}, new Class[] {IntValue.class});

    JobGraph jobGraph = new JobGraph("KMeans Iterative");

    // -- vertices
    // ---------------------------------------------------------------------------------------------
    JobInputVertex points = createPointsInput(jobGraph, pointsPath, numSubTasks, serializer);
    JobInputVertex centers = createCentersInput(jobGraph, centersPath, numSubTasks, serializer);

    JobTaskVertex head = createIterationHead(jobGraph, numSubTasks, serializer);
    JobTaskVertex mapper =
        createMapper(jobGraph, numSubTasks, serializer, serializer, serializer, int0Comparator);

    JobTaskVertex reducer =
        createReducer(jobGraph, numSubTasks, serializer, int0Comparator, serializer);

    JobOutputVertex fakeTailOutput =
        JobGraphUtils.createFakeOutput(jobGraph, "FakeTailOutput", numSubTasks, numSubTasks);

    JobOutputVertex sync = createSync(jobGraph, numIterations, numSubTasks);

    JobOutputVertex output = createOutput(jobGraph, resultPath, numSubTasks, serializer);

    // -- edges
    // ------------------------------------------------------------------------------------------------
    JobGraphUtils.connect(points, mapper, ChannelType.NETWORK, DistributionPattern.POINTWISE);

    JobGraphUtils.connect(centers, head, ChannelType.NETWORK, DistributionPattern.POINTWISE);

    JobGraphUtils.connect(head, mapper, ChannelType.NETWORK, DistributionPattern.BIPARTITE);
    new TaskConfig(mapper.getConfiguration())
        .setBroadcastGateIterativeWithNumberOfEventsUntilInterrupt(0, numSubTasks);
    new TaskConfig(mapper.getConfiguration()).setInputCached(0, true);
    new TaskConfig(mapper.getConfiguration())
        .setInputMaterializationMemory(0, MEMORY_PER_CONSUMER * JobGraphUtils.MEGABYTE);

    JobGraphUtils.connect(mapper, reducer, ChannelType.NETWORK, DistributionPattern.BIPARTITE);
    new TaskConfig(reducer.getConfiguration())
        .setGateIterativeWithNumberOfEventsUntilInterrupt(0, numSubTasks);

    JobGraphUtils.connect(
        reducer, fakeTailOutput, ChannelType.NETWORK, DistributionPattern.POINTWISE);

    JobGraphUtils.connect(head, output, ChannelType.NETWORK, DistributionPattern.POINTWISE);

    JobGraphUtils.connect(head, sync, ChannelType.NETWORK, DistributionPattern.BIPARTITE);

    // -- instance sharing
    // -------------------------------------------------------------------------------------
    points.setVertexToShareInstancesWith(output);
    centers.setVertexToShareInstancesWith(output);
    head.setVertexToShareInstancesWith(output);
    mapper.setVertexToShareInstancesWith(output);
    reducer.setVertexToShareInstancesWith(output);
    fakeTailOutput.setVertexToShareInstancesWith(output);
    sync.setVertexToShareInstancesWith(output);

    return jobGraph;
  }