public JobGraph createBlockingJob(int parallelism) {
    Tasks.BlockingOnceReceiver$.MODULE$.blocking_$eq(true);

    JobVertex sender = new JobVertex("sender");
    JobVertex receiver = new JobVertex("receiver");

    sender.setInvokableClass(Tasks.Sender.class);
    receiver.setInvokableClass(Tasks.BlockingOnceReceiver.class);

    sender.setParallelism(parallelism);
    receiver.setParallelism(parallelism);

    receiver.connectNewDataSetAsInput(sender, DistributionPattern.POINTWISE);

    SlotSharingGroup slotSharingGroup = new SlotSharingGroup();
    sender.setSlotSharingGroup(slotSharingGroup);
    receiver.setSlotSharingGroup(slotSharingGroup);

    return new JobGraph("Blocking test job", sender, receiver);
  }
  public static JobGraph getJobGraph(String[] args) throws Exception {

    int parallelism = 2;
    String pageWithRankInputPath =
        ""; // "file://" + PlayConstants.PLAY_DIR + "test-inputs/danglingpagerank/pageWithRank";
    String adjacencyListInputPath = ""; // "file://" + PlayConstants.PLAY_DIR +
    //			"test-inputs/danglingpagerank/adjacencylists";
    String outputPath =
        OperatingSystem.isWindows()
            ? "file:/c:/tmp/flink/iterations"
            : "file:///tmp/flink/iterations";
    //		String confPath = PlayConstants.PLAY_DIR + "local-conf";
    int minorConsumer = 25;
    int matchMemory = 50;
    int coGroupSortMemory = 50;
    int numIterations = 25;
    long numVertices = 5;
    long numDanglingVertices = 1;

    String failingWorkers = "1";
    int failingIteration = 2;
    double messageLoss = 0.75;

    if (args.length >= 15) {
      parallelism = Integer.parseInt(args[0]);
      pageWithRankInputPath = args[1];
      adjacencyListInputPath = args[2];
      outputPath = args[3];
      //			confPath = args[4];
      minorConsumer = Integer.parseInt(args[5]);
      matchMemory = Integer.parseInt(args[6]);
      coGroupSortMemory = Integer.parseInt(args[7]);
      numIterations = Integer.parseInt(args[8]);
      numVertices = Long.parseLong(args[9]);
      numDanglingVertices = Long.parseLong(args[10]);
      failingWorkers = args[11];
      failingIteration = Integer.parseInt(args[12]);
      messageLoss = Double.parseDouble(args[13]);
    }

    int totalMemoryConsumption = 3 * minorConsumer + matchMemory + coGroupSortMemory;

    JobGraph jobGraph = new JobGraph("CompensatableDanglingPageRank");

    // --------------- the inputs ---------------------

    // page rank input
    InputFormatVertex pageWithRankInput =
        JobGraphUtils.createInput(
            new ImprovedDanglingPageRankInputFormat(),
            pageWithRankInputPath,
            "DanglingPageWithRankInput",
            jobGraph,
            parallelism);
    TaskConfig pageWithRankInputConfig = new TaskConfig(pageWithRankInput.getConfiguration());
    pageWithRankInputConfig.addOutputShipStrategy(ShipStrategyType.PARTITION_HASH);
    pageWithRankInputConfig.setOutputComparator(fieldZeroComparator, 0);
    pageWithRankInputConfig.setOutputSerializer(recSerializer);
    pageWithRankInputConfig.setStubParameter("pageRank.numVertices", String.valueOf(numVertices));

    // edges as adjacency list
    InputFormatVertex adjacencyListInput =
        JobGraphUtils.createInput(
            new ImprovedAdjacencyListInputFormat(),
            adjacencyListInputPath,
            "AdjancencyListInput",
            jobGraph,
            parallelism);
    TaskConfig adjacencyListInputConfig = new TaskConfig(adjacencyListInput.getConfiguration());
    adjacencyListInputConfig.addOutputShipStrategy(ShipStrategyType.PARTITION_HASH);
    adjacencyListInputConfig.setOutputSerializer(recSerializer);
    adjacencyListInputConfig.setOutputComparator(fieldZeroComparator, 0);

    // --------------- the head ---------------------
    JobVertex head =
        JobGraphUtils.createTask(
            IterationHeadPactTask.class, "IterationHead", jobGraph, parallelism);
    TaskConfig headConfig = new TaskConfig(head.getConfiguration());
    headConfig.setIterationId(ITERATION_ID);

    // initial input / partial solution
    headConfig.addInputToGroup(0);
    headConfig.setIterationHeadPartialSolutionOrWorksetInputIndex(0);
    headConfig.setInputSerializer(recSerializer, 0);
    headConfig.setInputComparator(fieldZeroComparator, 0);
    headConfig.setInputLocalStrategy(0, LocalStrategy.SORT);
    headConfig.setRelativeMemoryInput(0, (double) minorConsumer / totalMemoryConsumption);
    headConfig.setFilehandlesInput(0, NUM_FILE_HANDLES_PER_SORT);
    headConfig.setSpillingThresholdInput(0, SORT_SPILL_THRESHOLD);

    // back channel / iterations
    headConfig.setRelativeBackChannelMemory((double) minorConsumer / totalMemoryConsumption);

    // output into iteration
    headConfig.setOutputSerializer(recSerializer);
    headConfig.addOutputShipStrategy(ShipStrategyType.FORWARD);
    headConfig.addOutputShipStrategy(ShipStrategyType.FORWARD);

    // final output
    TaskConfig headFinalOutConfig = new TaskConfig(new Configuration());
    headFinalOutConfig.setOutputSerializer(recSerializer);
    headFinalOutConfig.addOutputShipStrategy(ShipStrategyType.FORWARD);
    headConfig.setIterationHeadFinalOutputConfig(headFinalOutConfig);

    // the sync
    headConfig.setIterationHeadIndexOfSyncOutput(3);
    headConfig.setNumberOfIterations(numIterations);

    // the driver
    headConfig.setDriver(CollectorMapDriver.class);
    headConfig.setDriverStrategy(DriverStrategy.COLLECTOR_MAP);
    headConfig.setStubWrapper(new UserCodeClassWrapper<CompensatingMap>(CompensatingMap.class));
    headConfig.setStubParameter("pageRank.numVertices", String.valueOf(numVertices));
    headConfig.setStubParameter("compensation.failingWorker", failingWorkers);
    headConfig.setStubParameter("compensation.failingIteration", String.valueOf(failingIteration));
    headConfig.setStubParameter("compensation.messageLoss", String.valueOf(messageLoss));
    headConfig.addIterationAggregator(
        CompensatableDotProductCoGroup.AGGREGATOR_NAME, new PageRankStatsAggregator());

    // --------------- the join ---------------------

    JobVertex intermediate =
        JobGraphUtils.createTask(
            IterationIntermediatePactTask.class, "IterationIntermediate", jobGraph, parallelism);
    TaskConfig intermediateConfig = new TaskConfig(intermediate.getConfiguration());
    intermediateConfig.setIterationId(ITERATION_ID);
    //		intermediateConfig.setDriver(RepeatableHashjoinMatchDriverWithCachedBuildside.class);
    intermediateConfig.setDriver(BuildSecondCachedMatchDriver.class);
    intermediateConfig.setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND);
    intermediateConfig.setRelativeMemoryDriver((double) matchMemory / totalMemoryConsumption);
    intermediateConfig.addInputToGroup(0);
    intermediateConfig.addInputToGroup(1);
    intermediateConfig.setInputSerializer(recSerializer, 0);
    intermediateConfig.setInputSerializer(recSerializer, 1);
    intermediateConfig.setDriverComparator(fieldZeroComparator, 0);
    intermediateConfig.setDriverComparator(fieldZeroComparator, 1);
    intermediateConfig.setDriverPairComparator(pairComparatorFactory);

    intermediateConfig.setOutputSerializer(recSerializer);
    intermediateConfig.addOutputShipStrategy(ShipStrategyType.PARTITION_HASH);
    intermediateConfig.setOutputComparator(fieldZeroComparator, 0);

    intermediateConfig.setStubWrapper(
        new UserCodeClassWrapper<CompensatableDotProductMatch>(CompensatableDotProductMatch.class));
    intermediateConfig.setStubParameter("pageRank.numVertices", String.valueOf(numVertices));
    intermediateConfig.setStubParameter("compensation.failingWorker", failingWorkers);
    intermediateConfig.setStubParameter(
        "compensation.failingIteration", String.valueOf(failingIteration));
    intermediateConfig.setStubParameter("compensation.messageLoss", String.valueOf(messageLoss));

    // ---------------- the tail (co group) --------------------

    JobVertex tail =
        JobGraphUtils.createTask(
            IterationTailPactTask.class, "IterationTail", jobGraph, parallelism);
    TaskConfig tailConfig = new TaskConfig(tail.getConfiguration());
    tailConfig.setIterationId(ITERATION_ID);
    tailConfig.setIsWorksetUpdate();
    // TODO we need to combine!

    // inputs and driver
    tailConfig.setDriver(CoGroupDriver.class);
    tailConfig.setDriverStrategy(DriverStrategy.CO_GROUP);
    tailConfig.addInputToGroup(0);
    tailConfig.addInputToGroup(1);
    tailConfig.setInputSerializer(recSerializer, 0);
    tailConfig.setInputSerializer(recSerializer, 1);
    tailConfig.setDriverComparator(fieldZeroComparator, 0);
    tailConfig.setDriverComparator(fieldZeroComparator, 1);
    tailConfig.setDriverPairComparator(pairComparatorFactory);
    tailConfig.setInputAsynchronouslyMaterialized(0, true);
    tailConfig.setRelativeInputMaterializationMemory(
        0, (double) minorConsumer / totalMemoryConsumption);
    tailConfig.setInputLocalStrategy(1, LocalStrategy.SORT);
    tailConfig.setInputComparator(fieldZeroComparator, 1);
    tailConfig.setRelativeMemoryInput(1, (double) coGroupSortMemory / totalMemoryConsumption);
    tailConfig.setFilehandlesInput(1, NUM_FILE_HANDLES_PER_SORT);
    tailConfig.setSpillingThresholdInput(1, SORT_SPILL_THRESHOLD);

    // output
    tailConfig.setOutputSerializer(recSerializer);

    // the stub
    tailConfig.setStubWrapper(
        new UserCodeClassWrapper<CompensatableDotProductCoGroup>(
            CompensatableDotProductCoGroup.class));
    tailConfig.setStubParameter("pageRank.numVertices", String.valueOf(numVertices));
    tailConfig.setStubParameter(
        "pageRank.numDanglingVertices", String.valueOf(numDanglingVertices));
    tailConfig.setStubParameter("compensation.failingWorker", failingWorkers);
    tailConfig.setStubParameter("compensation.failingIteration", String.valueOf(failingIteration));
    tailConfig.setStubParameter("compensation.messageLoss", String.valueOf(messageLoss));

    // --------------- the output ---------------------

    OutputFormatVertex output =
        JobGraphUtils.createFileOutput(jobGraph, "FinalOutput", parallelism);
    TaskConfig outputConfig = new TaskConfig(output.getConfiguration());
    outputConfig.addInputToGroup(0);
    outputConfig.setInputSerializer(recSerializer, 0);
    outputConfig.setStubWrapper(
        new UserCodeClassWrapper<PageWithRankOutFormat>(PageWithRankOutFormat.class));
    outputConfig.setStubParameter(FileOutputFormat.FILE_PARAMETER_KEY, outputPath);

    // --------------- the auxiliaries ---------------------

    JobVertex sync = JobGraphUtils.createSync(jobGraph, parallelism);
    TaskConfig syncConfig = new TaskConfig(sync.getConfiguration());
    syncConfig.setNumberOfIterations(numIterations);
    syncConfig.addIterationAggregator(
        CompensatableDotProductCoGroup.AGGREGATOR_NAME, new PageRankStatsAggregator());
    syncConfig.setConvergenceCriterion(
        CompensatableDotProductCoGroup.AGGREGATOR_NAME, new DiffL1NormConvergenceCriterion());
    syncConfig.setIterationId(ITERATION_ID);

    // --------------- the wiring ---------------------

    JobGraphUtils.connect(pageWithRankInput, head, DistributionPattern.ALL_TO_ALL);

    JobGraphUtils.connect(head, intermediate, DistributionPattern.POINTWISE);
    intermediateConfig.setGateIterativeWithNumberOfEventsUntilInterrupt(0, 1);

    JobGraphUtils.connect(adjacencyListInput, intermediate, DistributionPattern.ALL_TO_ALL);

    JobGraphUtils.connect(head, tail, DistributionPattern.POINTWISE);
    JobGraphUtils.connect(intermediate, tail, DistributionPattern.ALL_TO_ALL);
    tailConfig.setGateIterativeWithNumberOfEventsUntilInterrupt(0, 1);
    tailConfig.setGateIterativeWithNumberOfEventsUntilInterrupt(1, parallelism);

    JobGraphUtils.connect(head, output, DistributionPattern.POINTWISE);

    JobGraphUtils.connect(head, sync, DistributionPattern.POINTWISE);

    SlotSharingGroup sharingGroup = new SlotSharingGroup();
    pageWithRankInput.setSlotSharingGroup(sharingGroup);
    adjacencyListInput.setSlotSharingGroup(sharingGroup);
    head.setSlotSharingGroup(sharingGroup);
    intermediate.setSlotSharingGroup(sharingGroup);
    tail.setSlotSharingGroup(sharingGroup);
    output.setSlotSharingGroup(sharingGroup);
    sync.setSlotSharingGroup(sharingGroup);

    tail.setStrictlyCoLocatedWith(head);
    intermediate.setStrictlyCoLocatedWith(head);

    return jobGraph;
  }
  /*
   * Test setup:
   * - v1 is isolated, no slot sharing
   * - v2 and v3 (not connected) share slots
   * - v4 and v5 (connected) share slots
   */
  @Test
  public void testAssignSlotSharingGroup() {
    try {
      JobVertex v1 = new JobVertex("v1");
      JobVertex v2 = new JobVertex("v2");
      JobVertex v3 = new JobVertex("v3");
      JobVertex v4 = new JobVertex("v4");
      JobVertex v5 = new JobVertex("v5");

      v1.setParallelism(4);
      v2.setParallelism(5);
      v3.setParallelism(7);
      v4.setParallelism(1);
      v5.setParallelism(11);

      v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE);
      v5.connectNewDataSetAsInput(v4, DistributionPattern.POINTWISE);

      SlotSharingGroup jg1 = new SlotSharingGroup();
      v2.setSlotSharingGroup(jg1);
      v3.setSlotSharingGroup(jg1);

      SlotSharingGroup jg2 = new SlotSharingGroup();
      v4.setSlotSharingGroup(jg2);
      v5.setSlotSharingGroup(jg2);

      List<JobVertex> vertices = new ArrayList<JobVertex>(Arrays.asList(v1, v2, v3, v4, v5));

      ExecutionGraph eg =
          new ExecutionGraph(
              TestingUtils.defaultExecutionContext(),
              new JobID(),
              "test job",
              new Configuration(),
              ExecutionConfigTest.getSerializedConfig(),
              AkkaUtils.getDefaultTimeout(),
              new NoRestartStrategy());
      eg.attachJobGraph(vertices);

      // verify that the vertices are all in the same slot sharing group
      SlotSharingGroup group1 = null;
      SlotSharingGroup group2 = null;

      // verify that v1 tasks have no slot sharing group
      assertNull(eg.getJobVertex(v1.getID()).getSlotSharingGroup());

      // v2 and v3 are shared
      group1 = eg.getJobVertex(v2.getID()).getSlotSharingGroup();
      assertNotNull(group1);
      assertEquals(group1, eg.getJobVertex(v3.getID()).getSlotSharingGroup());

      assertEquals(2, group1.getJobVertexIds().size());
      assertTrue(group1.getJobVertexIds().contains(v2.getID()));
      assertTrue(group1.getJobVertexIds().contains(v3.getID()));

      // v4 and v5 are shared
      group2 = eg.getJobVertex(v4.getID()).getSlotSharingGroup();
      assertNotNull(group2);
      assertEquals(group2, eg.getJobVertex(v5.getID()).getSlotSharingGroup());

      assertEquals(2, group1.getJobVertexIds().size());
      assertTrue(group2.getJobVertexIds().contains(v4.getID()));
      assertTrue(group2.getJobVertexIds().contains(v5.getID()));
    } catch (Exception e) {
      e.printStackTrace();
      fail(e.getMessage());
    }
  }