public JSONArray getOffsets(String topicName, Long time) throws Exception {
    // e.g. ./kafka-run-class.sh kafka.tools.GetOffsetShell --broker-list
    // ip-10-0-1-71.us-west-2.compute.internal:9092,ip-10-0-1-72.us-west-2.compute.internal:9093,ip-10-0-1-68.us-west-2.compute.internal:9094 --topic topic0 --time -1 --partitions 0

    List<String> brokerEndpoints = kafkaScheduler.getKafkaState().getBrokerEndpoints();
    String brokers = StringUtils.join(brokerEndpoints, ",");

    List<String> cmd = new ArrayList<String>();
    cmd.add(binPath + "kafka-run-class.sh");
    cmd.add("kafka.tools.GetOffsetShell");
    cmd.add("--topic");
    cmd.add(topicName);
    cmd.add("--time");
    cmd.add(String.valueOf(time));
    cmd.add("--broker-list");
    cmd.add(brokers);

    String stdout = (String) runCmd(cmd).get("message");
    stdout = stdout.substring("Output: ".length());
    return getPartitions(stdout);
  }
  public JSONObject producerTest(String topicName, int messages) throws Exception {
    // e.g. ./kafka-producer-perf-test.sh --topic topic0 --num-records 1000 --producer-props
    // bootstrap.servers=ip-10-0-2-171.us-west-2.compute.internal:9092,ip-10-0-2-172.us-west-2.compute.internal:9093,ip-10-0-2-173.us-west-2.compute.internal:9094 --throughput 100000 --record-size 1024
    List<String> brokerEndpoints = kafkaScheduler.getKafkaState().getBrokerEndpoints();
    String brokers = StringUtils.join(brokerEndpoints, ",");
    String bootstrapServers = "bootstrap.servers=" + brokers;

    List<String> cmd = new ArrayList<String>();
    cmd.add(binPath + "kafka-producer-perf-test.sh");
    cmd.add("--topic");
    cmd.add(topicName);
    cmd.add("--num-records");
    cmd.add(Integer.toString(messages));
    cmd.add("--throughput");
    cmd.add("100000");
    cmd.add("--record-size");
    cmd.add("1024");
    cmd.add("--producer-props");
    cmd.add(bootstrapServers);

    return runCmd(cmd);
  }