public int getDelaySeconds(Object[] args) {
    if (oldStatus != null && oldStatus.getDelaySecs() > 0) {
      return oldStatus.getDelaySecs();
    }

    Integer delaySecs = DelayStatusTransitionCallback.DEFAULT_DELAY_SECONDS;
    if (args == null || args.length == 0 || args[0] == null) {
      Map<?, ?> map = null;
      try {

        map = StormConfig.read_nimbus_topology_conf(data.getConf(), topologyid);
        delaySecs =
            JStormUtils.parseInt(
                map.get(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS), DEFAULT_DELAY_SECONDS);
      } catch (Exception e) {
        LOG.info("Failed to get topology configuration " + topologyid);
      }

    } else {
      delaySecs = JStormUtils.parseInt(args[0]);
    }

    if (delaySecs == null || delaySecs <= 0) {
      delaySecs = DelayStatusTransitionCallback.DEFAULT_DELAY_SECONDS;
    }

    return delaySecs;
  }
Exemplo n.º 2
0
  /**
   * wait for worker launch if the time is not > * SUPERVISOR_WORKER_START_TIMEOUT_SECS, otherwise
   * info failed
   *
   * @param conf
   * @param workerId
   * @param startTime
   * @throws IOException
   * @throws InterruptedException
   * @pdOid f0a6ab43-8cd3-44e1-8fd3-015a2ec51c6a
   */
  public void waitForWorkerLaunch(Map conf, String workerId, int startTime)
      throws IOException, InterruptedException {

    LocalState ls = StormConfig.worker_state(conf, workerId);

    while (true) {

      WorkerHeartbeat whb = (WorkerHeartbeat) ls.get(Common.LS_WORKER_HEARTBEAT);
      if (whb == null
          && ((TimeUtils.current_time_secs() - startTime)
              < JStormUtils.parseInt(conf.get(Config.SUPERVISOR_WORKER_START_TIMEOUT_SECS)))) {
        LOG.info(workerId + " still hasn't started");
        Time.sleep(500);
      } else {
        // whb is valid or timeout
        break;
      }
    }

    WorkerHeartbeat whb = (WorkerHeartbeat) ls.get(Common.LS_WORKER_HEARTBEAT);
    if (whb == null) {
      LOG.error("Failed to start Worker " + workerId);
    } else {
      LOG.info("Successfully start worker " + workerId);
    }
  }
Exemplo n.º 3
0
  /**
   * get worker heartbeat by workerid
   *
   * @param conf
   * @param workerId
   * @returns WorkerHeartbeat
   * @throws IOException
   */
  public WorkerHeartbeat readWorkerHeartbeat(Map conf, String workerId) throws Exception {

    try {
      LocalState ls = StormConfig.worker_state(conf, workerId);

      return (WorkerHeartbeat) ls.get(Common.LS_WORKER_HEARTBEAT);
    } catch (IOException e) {
      LOG.error("Failed to get worker Heartbeat", e);
      return null;
    }
  }
Exemplo n.º 4
0
  /**
   * get all workers heartbeats of the supervisor
   *
   * @param conf
   * @return Map<workerId, WorkerHeartbeat>
   * @throws IOException
   * @throws IOException
   */
  public Map<String, WorkerHeartbeat> readWorkerHeartbeats(Map conf) throws Exception {

    Map<String, WorkerHeartbeat> workerHeartbeats = new HashMap<String, WorkerHeartbeat>();

    // get the path: STORM-LOCAL-DIR/workers
    String path = StormConfig.worker_root(conf);

    List<String> workerIds = PathUtils.read_dir_contents(path);

    if (workerIds == null) {
      LOG.info("No worker dir under " + path);
      return workerHeartbeats;
    }

    for (String workerId : workerIds) {

      WorkerHeartbeat whb = readWorkerHeartbeat(conf, workerId);

      // ATTENTION: whb can be null
      workerHeartbeats.put(workerId, whb);
    }
    return workerHeartbeats;
  }
Exemplo n.º 5
0
  private void startNewWorkers(
      Set<Integer> keepPorts, Map<Integer, LocalAssignment> localAssignments) throws Exception {
    /**
     * Step 4: get reassigned tasks, which is in assignedTasks, but not in keeperPorts Map<port(type
     * Integer), LocalAssignment>
     */
    Map<Integer, LocalAssignment> newWorkers =
        JStormUtils.select_keys_pred(keepPorts, localAssignments);

    /** Step 5: generate new work ids */
    Map<Integer, String> newWorkerIds = new HashMap<Integer, String>();

    for (Entry<Integer, LocalAssignment> entry : newWorkers.entrySet()) {
      Integer port = entry.getKey();
      LocalAssignment assignment = entry.getValue();

      String workerId = UUID.randomUUID().toString();

      newWorkerIds.put(port, workerId);

      // create new worker Id directory
      // LOCALDIR/workers/newworkid/pids
      try {
        StormConfig.worker_pids_root(conf, workerId);
      } catch (IOException e1) {
        LOG.error("Failed to create " + workerId + " localdir", e1);
        throw e1;
      }

      StringBuilder sb = new StringBuilder();
      sb.append("Launching worker with assiangment ");
      sb.append(assignment.toString());
      sb.append(" for the supervisor ");
      sb.append(supervisorId);
      sb.append(" on port ");
      sb.append(port);
      sb.append(" with id ");
      sb.append(workerId);
      LOG.info(sb);

      try {
        String clusterMode = StormConfig.cluster_mode(conf);

        if (clusterMode.equals("distributed")) {
          launchWorker(
              conf,
              sharedContext,
              assignment.getTopologyId(),
              supervisorId,
              port,
              workerId,
              assignment);
        } else if (clusterMode.equals("local")) {
          launchWorker(
              conf,
              sharedContext,
              assignment.getTopologyId(),
              supervisorId,
              port,
              workerId,
              workerThreadPids);
        }
      } catch (Exception e) {
        String errorMsg = "Failed to launchWorker workerId:" + workerId + ":" + port;
        LOG.error(errorMsg, e);
        throw e;
      }
    }

    /** FIXME, workerIds should be Set, not Collection, but here simplify the logic */
    Collection<String> workerIds = newWorkerIds.values();
    try {
      waitForWorkersLaunch(conf, workerIds);
    } catch (IOException e) {
      LOG.error(e + " waitForWorkersLaunch failed");
    } catch (InterruptedException e) {
      LOG.error(e + " waitForWorkersLaunch failed");
    }
  }
Exemplo n.º 6
0
  /**
   * launch a worker in distributed mode
   *
   * @param conf
   * @param sharedcontext
   * @param topologyId
   * @param supervisorId
   * @param port
   * @param workerId
   * @throws IOException
   * @pdOid 6ea369dd-5ce2-4212-864b-1f8b2ed94abb
   */
  public void launchWorker(
      Map conf,
      IContext sharedcontext,
      String topologyId,
      String supervisorId,
      Integer port,
      String workerId,
      LocalAssignment assignment)
      throws IOException {

    // STORM-LOCAL-DIR/supervisor/stormdist/topologyId
    String stormroot = StormConfig.supervisor_stormdist_root(conf, topologyId);

    // STORM-LOCAL-DIR/supervisor/stormdist/topologyId/stormjar.jar
    String stormjar = StormConfig.stormjar_path(stormroot);

    // get supervisor conf
    Map stormConf = StormConfig.read_supervisor_topology_conf(conf, topologyId);

    Map totalConf = new HashMap();
    totalConf.putAll(conf);
    totalConf.putAll(stormConf);

    // get classpath
    // String[] param = new String[1];
    // param[0] = stormjar;
    // String classpath = JStormUtils.add_to_classpath(
    // JStormUtils.current_classpath(), param);

    // get child process parameter

    String stormhome = System.getProperty("jstorm.home");

    long memSize = assignment.getMem();
    int cpuNum = assignment.getCpu();
    String childopts = getChildOpts(totalConf);

    childopts += getGcDumpParam(totalConf);

    childopts = childopts.replace("%ID%", port.toString());
    childopts = childopts.replace("%TOPOLOGYID%", topologyId);
    if (stormhome != null) {
      childopts = childopts.replace("%JSTORM_HOME%", stormhome);
    } else {
      childopts = childopts.replace("%JSTORM_HOME%", "./");
    }
    Map<String, String> environment = new HashMap<String, String>();

    if (ConfigExtension.getWorkerRedirectOutput(totalConf)) {
      environment.put("REDIRECT", "true");
    } else {
      environment.put("REDIRECT", "false");
    }

    String logFileName = JStormUtils.genLogName(assignment.getTopologyName(), port);
    // String logFileName = topologyId + "-worker-" + port + ".log";

    environment.put("LD_LIBRARY_PATH", (String) totalConf.get(Config.JAVA_LIBRARY_PATH));

    StringBuilder commandSB = new StringBuilder();

    try {
      if (this.cgroupManager != null) {
        commandSB.append(cgroupManager.startNewWorker(cpuNum, workerId));
      }
    } catch (Exception e) {
      LOG.error("fail to prepare cgroup to workerId: " + workerId, e);
      return;
    }

    // commandSB.append("java -server -Xdebug
    // -Xrunjdwp:transport=dt_socket,address=8000,server=y,suspend=n ");
    commandSB.append("java -server ");
    commandSB.append(" -Xms" + memSize);
    commandSB.append(" -Xmx" + memSize + " ");
    commandSB.append(" -Xmn" + memSize / 3 + " ");
    commandSB.append(" -XX:PermSize=" + memSize / 16);
    commandSB.append(" -XX:MaxPermSize=" + memSize / 8);
    commandSB.append(" " + childopts);
    commandSB.append(" " + (assignment.getJvm() == null ? "" : assignment.getJvm()));

    commandSB.append(" -Djava.library.path=");
    commandSB.append((String) totalConf.get(Config.JAVA_LIBRARY_PATH));

    commandSB.append(" -Dlogfile.name=");
    commandSB.append(logFileName);

    // commandSB.append(" -Dlog4j.ignoreTCL=true");

    if (stormhome != null) {
      // commandSB.append(" -Dlogback.configurationFile=" + stormhome +
      // "/conf/cluster.xml");
      commandSB.append(
          " -Dlog4j.configuration=File:" + stormhome + "/conf/jstorm.log4j.properties");
      commandSB.append(" -Djstorm.home=");
      commandSB.append(stormhome);
    } else {
      // commandSB.append(" -Dlogback.configurationFile=cluster.xml");
      commandSB.append(" -Dlog4j.configuration=File:jstorm.log4j.properties");
    }

    String classpath = getClassPath(stormjar, stormhome, totalConf);
    String workerClassPath = (String) totalConf.get(Config.WORKER_CLASSPATH);
    List<String> otherLibs = (List<String>) stormConf.get(GenericOptionsParser.TOPOLOGY_LIB_NAME);
    StringBuilder sb = new StringBuilder();
    if (otherLibs != null) {
      for (String libName : otherLibs) {
        sb.append(StormConfig.stormlib_path(stormroot, libName)).append(":");
      }
    }
    workerClassPath = workerClassPath + ":" + sb.toString();

    Map<String, String> policyReplaceMap = new HashMap<String, String>();
    String realClassPath = classpath + ":" + workerClassPath;
    policyReplaceMap.put(SandBoxMaker.CLASS_PATH_KEY, realClassPath);
    commandSB.append(sandBoxMaker.sandboxPolicy(workerId, policyReplaceMap));

    // commandSB.append(" -Dlog4j.configuration=storm.log.properties");

    commandSB.append(" -cp ");
    // commandSB.append(workerClassPath + ":");
    commandSB.append(classpath);
    if (!ConfigExtension.isEnableTopologyClassLoader(totalConf))
      commandSB.append(":").append(workerClassPath);

    commandSB.append(" org.act.tstream.daemon.worker.Worker ");
    commandSB.append(topologyId);

    commandSB.append(" ");
    commandSB.append(supervisorId);

    commandSB.append(" ");
    commandSB.append(port);

    commandSB.append(" ");
    commandSB.append(workerId);

    commandSB.append(" ");
    commandSB.append(workerClassPath + ":" + stormjar);

    LOG.info("Launching worker with command: " + commandSB);
    LOG.info("Environment:" + environment.toString());

    JStormUtils.launch_process(commandSB.toString(), environment, true);
  }