private void processSpilledBacklog() throws HyracksDataException {
   try {
     Iterator<ByteBuffer> backlog = spiller.replayData();
     while (backlog.hasNext()) {
       process(backlog.next());
       nProcessed++;
     }
     DataBucket bucket = pool.getDataBucket();
     bucket.setContentType(ContentType.EOSD);
     bucket.setDesiredReadCount(1);
     mBuffer.sendMessage(bucket);
     spiller.reset();
   } catch (Exception e) {
     e.printStackTrace();
     throw new HyracksDataException(e);
   }
 }
  private void processBufferredBacklog() throws HyracksDataException {
    try {
      if (LOGGER.isLoggable(Level.INFO)) {
        LOGGER.info("Processing backlog " + this.runtimeId);
      }

      if (frameCollection != null) {
        Iterator<ByteBuffer> backlog = frameCollection.getFrameCollectionIterator();
        while (backlog.hasNext()) {
          process(backlog.next());
          nProcessed++;
        }
        DataBucket bucket = pool.getDataBucket();
        bucket.setContentType(ContentType.EOSD);
        bucket.setDesiredReadCount(1);
        mBuffer.sendMessage(bucket);
        feedManager.getFeedMemoryManager().releaseMemoryComponent(frameCollection);
        frameCollection = null;
      }
    } catch (Exception e) {
      e.printStackTrace();
      throw new HyracksDataException(e);
    }
  }
 protected void process(ByteBuffer frame) throws HyracksDataException {
   boolean frameProcessed = false;
   while (!frameProcessed) {
     try {
       if (!bufferingEnabled) {
         coreOperator.nextFrame(frame); // synchronous
         mBuffer.sendReport(frame);
       } else {
         DataBucket bucket = pool.getDataBucket();
         if (bucket != null) {
           if (frame != null) {
             bucket.reset(frame); // created a copy here
             bucket.setContentType(ContentType.DATA);
           } else {
             bucket.setContentType(ContentType.EOD);
           }
           bucket.setDesiredReadCount(1);
           mBuffer.sendMessage(bucket);
           mBuffer.sendReport(frame);
           nProcessed++;
         } else {
           if (fpa.spillToDiskOnCongestion()) {
             if (frame != null) {
               boolean spilled = spiller.processMessage(frame);
               if (spilled) {
                 setMode(Mode.SPILL);
               } else {
                 reportUnresolvableCongestion();
               }
             }
           } else if (fpa.discardOnCongestion()) {
             boolean discarded = discarder.processMessage(frame);
             if (!discarded) {
               reportUnresolvableCongestion();
             }
           } else if (fpa.throttlingEnabled()) {
             setThrottlingEnabled(true);
           } else {
             reportUnresolvableCongestion();
           }
         }
       }
       frameProcessed = true;
     } catch (Exception e) {
       if (feedPolicyAccessor.continueOnSoftFailure()) {
         frame = exceptionHandler.handleException(e, frame);
         if (frame == null) {
           frameProcessed = true;
           if (LOGGER.isLoggable(Level.WARNING)) {
             LOGGER.warning(
                 "Encountered exception! "
                     + e.getMessage()
                     + "Insufficient information, Cannot extract failing tuple");
           }
         }
       } else {
         if (LOGGER.isLoggable(Level.WARNING)) {
           LOGGER.warning(
               "Ingestion policy does not require recovering from tuple. Feed would terminate");
         }
         mBuffer.close(false);
         throw new HyracksDataException(e);
       }
     }
   }
 }