@Override public void close(TaskAttemptContext taskAttemptContext) throws IOException { if (batch.size != 0) { writer.addRowBatch(batch); } writer.close(); }
@Override public void write(NullWritable nullWritable, V v) throws IOException { // if the batch is full, write it out. if (batch.size == batch.getMaxSize()) { writer.addRowBatch(batch); batch.reset(); } // add the new row int row = batch.size++; // skip over the OrcKey or OrcValue if (v instanceof OrcKey) { v = (V) ((OrcKey) v).key; } else if (v instanceof OrcValue) { v = (V) ((OrcValue) v).value; } if (isTopStruct) { for (int f = 0; f < schema.getChildren().size(); ++f) { OrcMapredRecordWriter.setColumn( schema.getChildren().get(f), batch.cols[f], row, ((OrcStruct) v).getFieldValue(f)); } } else { OrcMapredRecordWriter.setColumn(schema, batch.cols[0], row, v); } }
public OrcMapreduceRecordWriter(Writer writer) { this.writer = writer; schema = writer.getSchema(); this.batch = schema.createRowBatch(); isTopStruct = schema.getCategory() == TypeDescription.Category.STRUCT; }