private backtype.storm.Config getStormConfig(com.typesafe.config.Config config) {
    backtype.storm.Config conf = new backtype.storm.Config();
    conf.put(RichSpoutBatchExecutor.MAX_BATCH_SIZE_CONF, Int.box(64 * 1024));
    conf.put(backtype.storm.Config.TOPOLOGY_RECEIVER_BUFFER_SIZE, Int.box(8));
    conf.put(backtype.storm.Config.TOPOLOGY_TRANSFER_BUFFER_SIZE, Int.box(32));
    conf.put(backtype.storm.Config.TOPOLOGY_EXECUTOR_RECEIVE_BUFFER_SIZE, Int.box(16384));
    conf.put(backtype.storm.Config.TOPOLOGY_EXECUTOR_SEND_BUFFER_SIZE, Int.box(16384));
    conf.put(backtype.storm.Config.NIMBUS_THRIFT_MAX_BUFFER_SIZE, Int.box(20480000));
    String nimbusHost = STORM_NIMBUS_HOST_DEFAULT;
    if (environment.config().hasPath(STORM_NIMBUS_HOST_CONF_PATH)) {
      nimbusHost = environment.config().getString(STORM_NIMBUS_HOST_CONF_PATH);
      LOG.info("Overriding {} = {}", STORM_NIMBUS_HOST_CONF_PATH, nimbusHost);
    } else {
      LOG.info("Using default {} = {}", STORM_NIMBUS_HOST_CONF_PATH, STORM_NIMBUS_HOST_DEFAULT);
    }
    Integer nimbusThriftPort = STORM_NIMBUS_THRIFT_DEFAULT;
    if (environment.config().hasPath(STORM_NIMBUS_THRIFT_CONF_PATH)) {
      nimbusThriftPort = environment.config().getInt(STORM_NIMBUS_THRIFT_CONF_PATH);
      LOG.info("Overriding {} = {}", STORM_NIMBUS_THRIFT_CONF_PATH, nimbusThriftPort);
    } else {
      LOG.info("Using default {} = {}", STORM_NIMBUS_THRIFT_CONF_PATH, STORM_NIMBUS_THRIFT_DEFAULT);
    }
    conf.put(backtype.storm.Config.NIMBUS_HOST, nimbusHost);
    conf.put(backtype.storm.Config.NIMBUS_THRIFT_PORT, nimbusThriftPort);
    conf.put(
        Config.STORM_THRIFT_TRANSPORT_PLUGIN, "backtype.storm.security.auth.SimpleTransportPlugin");
    if (config.hasPath(WORKERS)) {
      conf.setNumWorkers(config.getInt(WORKERS));
    }

    if (config.hasPath(TOPOLOGY_MESSAGE_TIMEOUT_SECS)) {
      conf.put(TOPOLOGY_MESSAGE_TIMEOUT_SECS, config.getInt(TOPOLOGY_MESSAGE_TIMEOUT_SECS));
    }
    return conf;
  }
 @Override
 public void start(
     Application<StormEnvironment, StormTopology> executor, com.typesafe.config.Config config) {
   String topologyName = config.getString("appId");
   Preconditions.checkNotNull(
       topologyName, "[appId] is required by null for " + executor.getClass().getCanonicalName());
   StormTopology topology = executor.execute(config, environment);
   LOG.info(
       "Starting {} ({}), mode: {}",
       topologyName,
       executor.getClass().getCanonicalName(),
       config.getString("mode"));
   Config conf = getStormConfig(config);
   if (ApplicationEntity.Mode.CLUSTER.name().equalsIgnoreCase(config.getString("mode"))) {
     String jarFile = config.hasPath("jarPath") ? config.getString("jarPath") : null;
     if (jarFile == null) {
       jarFile = DynamicJarPathFinder.findPath(executor.getClass());
     }
     synchronized (StormExecutionRuntime.class) {
       System.setProperty("storm.jar", jarFile);
       LOG.info("Submitting as cluster mode ...");
       try {
         StormSubmitter.submitTopologyWithProgressBar(topologyName, conf, topology);
       } catch (AlreadyAliveException | InvalidTopologyException e) {
         LOG.error(e.getMessage(), e);
         throw new RuntimeException(e.getMessage(), e);
       } finally {
         System.clearProperty("storm.jar");
       }
     }
   } else {
     LOG.info("Submitting as local mode ...");
     getLocalCluster().submitTopology(topologyName, conf, topology);
     LOG.info("Submitted");
   }
   LOG.info("Started {} ({})", topologyName, executor.getClass().getCanonicalName());
 }