Пример #1
0
  /**
   * wait for worker launch if the time is not > * SUPERVISOR_WORKER_START_TIMEOUT_SECS, otherwise
   * info failed
   *
   * @param conf
   * @param workerId
   * @param startTime
   * @throws IOException
   * @throws InterruptedException
   * @pdOid f0a6ab43-8cd3-44e1-8fd3-015a2ec51c6a
   */
  public void waitForWorkerLaunch(Map conf, String workerId, int startTime)
      throws IOException, InterruptedException {

    LocalState ls = StormConfig.worker_state(conf, workerId);

    while (true) {

      WorkerHeartbeat whb = (WorkerHeartbeat) ls.get(Common.LS_WORKER_HEARTBEAT);
      if (whb == null
          && ((TimeUtils.current_time_secs() - startTime)
              < JStormUtils.parseInt(conf.get(Config.SUPERVISOR_WORKER_START_TIMEOUT_SECS)))) {
        LOG.info(workerId + " still hasn't started");
        Time.sleep(500);
      } else {
        // whb is valid or timeout
        break;
      }
    }

    WorkerHeartbeat whb = (WorkerHeartbeat) ls.get(Common.LS_WORKER_HEARTBEAT);
    if (whb == null) {
      LOG.error("Failed to start Worker " + workerId);
    } else {
      LOG.info("Successfully start worker " + workerId);
    }
  }
 public void execute(TridentTuple tuple, TridentCollector collector) {
   GenericRecord docEntry = new GenericData.Record(schema);
   docEntry.put("docid", tuple.getStringByField("documentId"));
   docEntry.put("time", Time.currentTimeMillis());
   docEntry.put("line", tuple.getStringByField("document"));
   try {
     dataFileWriter.append(docEntry);
     dataFileWriter.flush();
   } catch (IOException e) {
     LOG.error("Error writing to document record: " + e);
     throw new RuntimeException(e);
   }
 }
 private long now() {
   return Time.currentTimeMillis();
 }