@Override
  public List<TaskSubmissionResult> submitTasks(final List<TaskDeploymentDescriptor> tasks)
      throws IOException {

    final List<TaskSubmissionResult> submissionResultList =
        new SerializableArrayList<TaskSubmissionResult>();
    final List<Task> tasksToStart = new ArrayList<Task>();

    // Make sure all tasks are fully registered before they are started
    for (final TaskDeploymentDescriptor tdd : tasks) {

      final JobID jobID = tdd.getJobID();
      final ExecutionVertexID vertexID = tdd.getVertexID();
      RuntimeEnvironment re;

      // retrieve the registered cache files from job configuration and create the local tmp file.
      Map<String, FutureTask<Path>> cpTasks = new HashMap<String, FutureTask<Path>>();
      for (Entry<String, DistributedCacheEntry> e :
          DistributedCache.readFileInfoFromConfig(tdd.getJobConfiguration())) {
        FutureTask<Path> cp = this.fileCache.createTmpFile(e.getKey(), e.getValue(), jobID);
        cpTasks.put(e.getKey(), cp);
      }

      try {
        re =
            new RuntimeEnvironment(
                tdd,
                this.memoryManager,
                this.ioManager,
                new TaskInputSplitProvider(jobID, vertexID, this.globalInputSplitProvider),
                this.accumulatorProtocolProxy,
                cpTasks);
      } catch (Throwable t) {
        final TaskSubmissionResult result =
            new TaskSubmissionResult(vertexID, AbstractTaskResult.ReturnCode.DEPLOYMENT_ERROR);
        result.setDescription(StringUtils.stringifyException(t));
        LOG.error(result.getDescription(), t);
        submissionResultList.add(result);
        continue;
      }

      final Configuration jobConfiguration = tdd.getJobConfiguration();

      // Register the task
      Task task;
      try {
        task = createAndRegisterTask(vertexID, jobConfiguration, re);
      } catch (InsufficientResourcesException e) {
        final TaskSubmissionResult result =
            new TaskSubmissionResult(
                vertexID, AbstractTaskResult.ReturnCode.INSUFFICIENT_RESOURCES);
        result.setDescription(e.getMessage());
        LOG.error(result.getDescription(), e);
        submissionResultList.add(result);
        continue;
      }

      if (task == null) {
        final TaskSubmissionResult result =
            new TaskSubmissionResult(vertexID, AbstractTaskResult.ReturnCode.TASK_NOT_FOUND);
        result.setDescription(
            "Task " + re.getTaskNameWithIndex() + " (" + vertexID + ") was already running");
        LOG.error(result.getDescription());
        submissionResultList.add(result);
        continue;
      }

      submissionResultList.add(
          new TaskSubmissionResult(vertexID, AbstractTaskResult.ReturnCode.SUCCESS));
      tasksToStart.add(task);
    }

    // Now start the tasks
    for (final Task task : tasksToStart) {
      task.startExecution();
    }

    return submissionResultList;
  }
  @Test
  public void testBuildDeploymentDescriptor() {
    try {
      final JobID jobId = new JobID();

      final JobVertexID jid1 = new JobVertexID();
      final JobVertexID jid2 = new JobVertexID();
      final JobVertexID jid3 = new JobVertexID();
      final JobVertexID jid4 = new JobVertexID();

      JobVertex v1 = new JobVertex("v1", jid1);
      JobVertex v2 = new JobVertex("v2", jid2);
      JobVertex v3 = new JobVertex("v3", jid3);
      JobVertex v4 = new JobVertex("v4", jid4);

      v1.setParallelism(10);
      v2.setParallelism(10);
      v3.setParallelism(10);
      v4.setParallelism(10);

      v1.setInvokableClass(BatchTask.class);
      v2.setInvokableClass(BatchTask.class);
      v3.setInvokableClass(BatchTask.class);
      v4.setInvokableClass(BatchTask.class);

      v2.connectNewDataSetAsInput(v1, DistributionPattern.ALL_TO_ALL);
      v3.connectNewDataSetAsInput(v2, DistributionPattern.ALL_TO_ALL);
      v4.connectNewDataSetAsInput(v2, DistributionPattern.ALL_TO_ALL);

      ExecutionGraph eg =
          new ExecutionGraph(
              TestingUtils.defaultExecutionContext(),
              jobId,
              "some job",
              new Configuration(),
              new SerializedValue<>(new ExecutionConfig()),
              AkkaUtils.getDefaultTimeout(),
              new NoRestartStrategy());

      List<JobVertex> ordered = Arrays.asList(v1, v2, v3, v4);

      eg.attachJobGraph(ordered);

      ExecutionJobVertex ejv = eg.getAllVertices().get(jid2);
      ExecutionVertex vertex = ejv.getTaskVertices()[3];

      ExecutionGraphTestUtils.SimpleActorGateway instanceGateway =
          new ExecutionGraphTestUtils.SimpleActorGateway(TestingUtils.directExecutionContext());

      final Instance instance = getInstance(instanceGateway);

      final SimpleSlot slot = instance.allocateSimpleSlot(jobId);

      assertEquals(ExecutionState.CREATED, vertex.getExecutionState());

      vertex.deployToSlot(slot);

      assertEquals(ExecutionState.DEPLOYING, vertex.getExecutionState());

      TaskDeploymentDescriptor descr = instanceGateway.lastTDD;
      assertNotNull(descr);

      assertEquals(jobId, descr.getJobID());
      assertEquals(jid2, descr.getVertexID());
      assertEquals(3, descr.getIndexInSubtaskGroup());
      assertEquals(10, descr.getNumberOfSubtasks());
      assertEquals(BatchTask.class.getName(), descr.getInvokableClassName());
      assertEquals("v2", descr.getTaskName());

      List<ResultPartitionDeploymentDescriptor> producedPartitions = descr.getProducedPartitions();
      List<InputGateDeploymentDescriptor> consumedPartitions = descr.getInputGates();

      assertEquals(2, producedPartitions.size());
      assertEquals(1, consumedPartitions.size());

      assertEquals(10, producedPartitions.get(0).getNumberOfSubpartitions());
      assertEquals(10, producedPartitions.get(1).getNumberOfSubpartitions());
      assertEquals(10, consumedPartitions.get(0).getInputChannelDeploymentDescriptors().length);
    } catch (Exception e) {
      e.printStackTrace();
      fail(e.getMessage());
    }
  }