@Override
 public void commit(String offset) throws StageException {
   consumer.commit(offset);
 }
 @Override
 public boolean inErrorState() {
   return producer.inErrorState() || consumer.inErrorState();
 }
 @Override
 public void errorNotification(Throwable throwable) {
   consumer.error(throwable);
 }
 @Override
 public String produce(String lastSourceOffset, int maxBatchSize, BatchMaker batchMaker)
     throws StageException {
   OffsetAndResult<Map.Entry> offsetAndResult;
   if (getContext().isPreview()) {
     // we support text and csv today
     List<Map.Entry> records = new ArrayList<>();
     int count = 0;
     Iterator<String> keys = previewBuffer.keySet().iterator();
     while (count < maxBatchSize && count < previewBuffer.size() && keys.hasNext()) {
       String key = keys.next();
       String[] keyParts = key.split("::");
       if (count == 0
           && DataFormat.DELIMITED == dataFormat
           && CsvHeader.NO_HEADER != csvHeader
           && keyParts.length > 1
           && keyParts[1].equals("0")) {
         // add header
         if (CsvHeader.WITH_HEADER == csvHeader) {
           records.add(new Pair(previewBuffer.get(key), null));
         } else if (CsvHeader.IGNORE_HEADER == csvHeader) {
           // this record will be ignored - don't increment the count
         }
       } else {
         records.add(new Pair(key, previewBuffer.get(key)));
         count++;
       }
     }
     offsetAndResult = new OffsetAndResult<>(recordsProduced, records);
   } else {
     offsetAndResult = consumer.take();
   }
   if (offsetAndResult == null) {
     LOG.info("Received null batch, returning null");
     return null;
   }
   String messageId = null;
   int count = 0;
   String header = null;
   for (Map.Entry message : offsetAndResult.getResult()) {
     count++;
     messageId = String.valueOf(message.getKey());
     List<Record> listRecords = null;
     if (dataFormat == DataFormat.TEXT) {
       listRecords = processMessage(messageId, message.getValue());
     } else if (dataFormat == DataFormat.DELIMITED) {
       switch (csvHeader) {
         case IGNORE_HEADER:
           // ignore header by skipping this header string
           // [1] - startOffset - [2] - contextKey
           String[] offsetContextSplit = messageId.split("::");
           if (offsetContextSplit.length > 1 && offsetContextSplit[1].equals("0")) {
             break;
           }
         case NO_HEADER:
           listRecords = processMessage(messageId, message.getValue());
           break;
         case WITH_HEADER:
           if (header == null) {
             header = messageId;
             LOG.debug("Header is: {}", header);
             Utils.checkState(
                 message.getValue() == null,
                 Utils.formatL(
                     "Message value for header record should be null, was: '{}'",
                     message.getValue()));
           } else {
             listRecords = processMessage(messageId, header + "\n" + message.getValue());
           }
           break;
         default:
           String msg = Utils.format("Unrecognized header: '{}'", csvHeader);
           LOG.warn(msg);
           throw new IllegalStateException(msg);
       }
     } else if (dataFormat == DataFormat.AVRO) {
       listRecords = processMessage(messageId, message.getValue());
     } else {
       throw new IllegalStateException(Utils.format("Unrecognized data format: '{}'", dataFormat));
     }
     if (listRecords != null) {
       for (Record record : listRecords) {
         batchMaker.addRecord(record);
       }
     }
   }
   if (count == 0) {
     LOG.info("Received no records, returning null");
     return null;
   }
   return Utils.checkNotNull(messageId, "Log error, message ID cannot be null at this point.");
 }