Ejemplo n.º 1
0
    private void markJobComplete(
        String indexerName, String jobId, boolean success, String jobState, Counters counters) {
      try {
        // Lock internal bypasses the index-in-delete-state check, which does not matter (and might
        // cause
        // failure) in our case.
        String lock = indexerModel.lockIndexerInternal(indexerName, false);
        try {
          // Read current situation of record and assure it is still actual
          IndexerDefinition indexer = indexerModel.getFreshIndexer(indexerName);

          ActiveBatchBuildInfo activeJobInfo = indexer.getActiveBatchBuildInfo();

          if (activeJobInfo == null) {
            // This might happen if we got some older update event on the indexer right after we
            // marked this job as finished.
            log.error(
                "Unexpected situation: indexer build job completed but indexer does not have an active"
                    + " build job. Index: "
                    + indexer.getName()
                    + ", job: "
                    + jobId
                    + ". Ignoring this event.");
            runningJobs.remove(indexerName);
            return;
          } else if (!activeJobInfo.getJobId().equals(jobId)) {
            // I don't think this should ever occur: a new job will never start before we marked
            // this one as finished, especially since we lock when creating/updating indexes.
            log.error(
                "Abnormal situation: indexer is associated with index build job "
                    + activeJobInfo.getJobId()
                    + " but expected job "
                    + jobId
                    + ". Will mark job as"
                    + " done anyway.");
          }

          BatchBuildInfoBuilder jobInfoBuilder = new BatchBuildInfoBuilder();
          jobInfoBuilder.jobState(jobState);
          jobInfoBuilder.success(success);
          jobInfoBuilder.jobId(jobId);
          jobInfoBuilder.batchIndexConfiguration(activeJobInfo.getBatchIndexConfiguration());

          if (activeJobInfo != null) {
            jobInfoBuilder.submitTime(activeJobInfo.getSubmitTime());
            jobInfoBuilder.trackingUrl(activeJobInfo.getTrackingUrl());
          }

          if (counters != null) {
            jobInfoBuilder.counter(
                getCounterKey(Task.Counter.MAP_INPUT_RECORDS),
                counters.getCounter(Task.Counter.MAP_INPUT_RECORDS));
            jobInfoBuilder.counter(
                getCounterKey(JobInProgress.Counter.TOTAL_LAUNCHED_MAPS),
                counters.getCounter(JobInProgress.Counter.TOTAL_LAUNCHED_MAPS));
            jobInfoBuilder.counter(
                getCounterKey(JobInProgress.Counter.NUM_FAILED_MAPS),
                counters.getCounter(JobInProgress.Counter.NUM_FAILED_MAPS));
            // TODO
            //
            // jobInfo.addCounter(getCounterKey(IndexBatchBuildCounters.NUM_FAILED_RECORDS),
            //
            // counters.getCounter(IndexBatchBuildCounters.NUM_FAILED_RECORDS));
          }

          indexer =
              new IndexerDefinitionBuilder()
                  .lastBatchBuildInfo(jobInfoBuilder.build())
                  .activeBatchBuildInfo(null)
                  .batchIndexingState(BatchIndexingState.INACTIVE)
                  .build();

          runningJobs.remove(indexerName);
          indexerModel.updateIndexerInternal(indexer);

          log.info(
              "Marked indexer build job as finished for indexer "
                  + indexerName
                  + ", job ID =  "
                  + jobId);

        } finally {
          indexerModel.unlockIndexer(lock, true);
        }
      } catch (Throwable t) {
        log.error("Error trying to mark index build job as finished for indexer " + indexerName, t);
      }
    }