Beispiel #1
0
  @Override
  public Status appendBatch(List<AvroFlumeEvent> events) {
    logger.debug(
        "Avro source {}: Received avro event batch of {} events.", getName(), events.size());
    sourceCounter.incrementAppendBatchReceivedCount();
    sourceCounter.addToEventReceivedCount(events.size());

    List<Event> batch = new ArrayList<Event>();

    for (AvroFlumeEvent avroEvent : events) {
      Event event =
          EventBuilder.withBody(avroEvent.getBody().array(), toStringMap(avroEvent.getHeaders()));

      batch.add(event);
    }

    try {
      getChannelProcessor().processEventBatch(batch);
    } catch (Throwable t) {
      logger.error(
          "Avro source " + getName() + ": Unable to process event " + "batch. Exception follows.",
          t);
      if (t instanceof Error) {
        throw (Error) t;
      }
      return Status.FAILED;
    }

    sourceCounter.incrementAppendBatchAcceptedCount();
    sourceCounter.addToEventAcceptedCount(events.size());

    return Status.OK;
  }
    @Override
    public void run() {
      System.out.println("start to run SpoolDirectoryRunnable...");
      int backoffInterval = 250;
      try {
        while (!Thread.interrupted()) {
          List<Event> events = reader.readEvents(batchSize);
          if (events.isEmpty()) {
            break;
          }
          sourceCounter.addToEventReceivedCount(events.size());
          sourceCounter.incrementAppendBatchReceivedCount();

          try {
            getChannelProcessor().processEventBatch(events);
            reader.commit();
          } catch (ChannelException ex) {
            logger.warn(
                "The channel is full, and cannot write data now. The "
                    + "source will try again after "
                    + String.valueOf(backoffInterval)
                    + " milliseconds");
            hitChannelException = true;
            if (backoff) {
              TimeUnit.MILLISECONDS.sleep(backoffInterval);
              backoffInterval = backoffInterval << 1;
              backoffInterval = backoffInterval >= maxBackoff ? maxBackoff : backoffInterval;
            }
            continue;
          }
          backoffInterval = 250;
          sourceCounter.addToEventAcceptedCount(events.size());
          sourceCounter.incrementAppendBatchAcceptedCount();
        }
      } catch (Throwable t) {
        logger.error(
            "FATAL: "
                + SpoolDirectoryZipSource.this.toString()
                + ": "
                + "Uncaught exception in SpoolDirectorySource thread. "
                + "Restart or reconfigure Flume to continue processing.",
            t);
        hasFatalError = true;
        Throwables.propagate(t);
      }
    }