public AddJobFlowStepsResult unmarshall(JsonUnmarshallerContext context) throws Exception {
    AddJobFlowStepsResult addJobFlowStepsResult = new AddJobFlowStepsResult();

    int originalDepth = context.getCurrentDepth();
    String currentParentElement = context.getCurrentParentElement();
    int targetDepth = originalDepth + 1;

    JsonToken token = context.getCurrentToken();
    if (token == null) token = context.nextToken();
    if (token == VALUE_NULL) return null;

    while (true) {
      if (token == null) break;

      if (token == FIELD_NAME || token == START_OBJECT) {
        if (context.testExpression("StepIds", targetDepth)) {
          context.nextToken();
          addJobFlowStepsResult.setStepIds(
              new ListUnmarshaller<String>(StringJsonUnmarshaller.getInstance())
                  .unmarshall(context));
        }
      } else if (token == END_ARRAY || token == END_OBJECT) {
        if (context.getLastParsedParentElement() == null
            || context.getLastParsedParentElement().equals(currentParentElement)) {
          if (context.getCurrentDepth() <= originalDepth) break;
        }
      }

      token = context.nextToken();
    }

    return addJobFlowStepsResult;
  }
  public void createClusterWithStep(String inputPath, String outputPath, String logBucket) {

    String logUri = "s3://" + logBucket + "/";

    RunJobFlowRequest request =
        new RunJobFlowRequest()
            .withName("Create cluster with ReleaseLabel")
            .withReleaseLabel("emr-4.2.0")
            // .withSteps(customStep)
            .withServiceRole("EMR_DefaultRole")
            .withJobFlowRole("EMR_EC2_DefaultRole")
            .withInstances(
                new JobFlowInstancesConfig()
                    .withInstanceCount(3)
                    .withKeepJobFlowAliveWhenNoSteps(true)
                    .withMasterInstanceType("m3.xlarge")
                    .withSlaveInstanceType("m3.xlarge"))
            .withVisibleToAllUsers(true)
            .withLogUri(logUri)
            .withLogUri(logUri);

    RunJobFlowResult result = emr.runJobFlow(request);

    // --------------------------------------------------------------------------

    ClusterSummary cluster = emr.listClusters().getClusters().get(0);

    HadoopJarStepConfig hadoopConfigAdd =
        new HadoopJarStepConfig()
            .withJar("s3://hadoop-lyy/code/hadoop-mapreduce-examples-2.6.0.jar")
            .withMainClass("wordcount")
            .withArgs("-Dfs.s3.canned.acl=BucketOwnerFullControl", inputPath, outputPath);

    StepConfig customStepAdd =
        new StepConfig("Step2", hadoopConfigAdd).withActionOnFailure(ActionOnFailure.CONTINUE);
    AddJobFlowStepsResult resultAdd =
        emr.addJobFlowSteps(
            new AddJobFlowStepsRequest().withJobFlowId(cluster.getId()).withSteps(customStepAdd));

    System.out.println(resultAdd.getStepIds());

    //

    DescribeStepRequest describe =
        new DescribeStepRequest().withStepId(resultAdd.getStepIds().get(0));

    describe.setClusterId(cluster.getId());
    describe.setRequestCredentials(credentials);

    DescribeStepResult res = emr.describeStep(describe);
    StepStatus status = res.getStep().getStatus();
    String stas = status.getState();

    while (stas.equals(StepExecutionState.PENDING.name())
        || stas.equals(StepExecutionState.RUNNING.name())) {
      try {
        Thread.sleep(5000);
        res = emr.describeStep(describe);
        status = res.getStep().getStatus();
        stas = status.getState();
        System.out.print(".");
      } catch (InterruptedException e) {
        e.printStackTrace();
      }
    }

    if (stas.equals(StepExecutionState.COMPLETED.name())) {
      System.out.println("\n step complete");
    } else if (stas.equals(StepExecutionState.FAILED.name())
        || stas.equals(StepExecutionState.CANCELLED.name())) {
      System.out.println("\n step failed");
    }
  }