/**
   * Load a record batch from a single buffer.
   *
   * @param def The definition for the record batch.
   * @param buf The buffer that holds the data associated with the record batch
   * @return Whether or not the schema changed since the previous load.
   * @throws SchemaChangeException
   */
  public boolean load(RecordBatchDef def, ByteBuf buf) throws SchemaChangeException {
    //    logger.debug("Loading record batch with def {} and data {}", def, buf);
    this.valueCount = def.getRecordCount();
    boolean schemaChanged = schema == null;

    Map<MaterializedField, ValueVector> oldFields = Maps.newHashMap();
    for (VectorWrapper<?> w : container) {
      ValueVector v = w.getValueVector();
      oldFields.put(v.getField(), v);
    }

    VectorContainer newVectors = new VectorContainer();

    List<FieldMetadata> fields = def.getFieldList();

    int bufOffset = 0;
    for (FieldMetadata fmd : fields) {
      FieldDef fieldDef = fmd.getDef();
      ValueVector v = oldFields.remove(fieldDef);
      if (v != null) {
        container.add(v);
        continue;
      }

      // if we arrive here, we didn't have a matching vector.
      schemaChanged = true;
      MaterializedField m = new MaterializedField(fieldDef);
      v = TypeHelper.getNewVector(m, allocator);
      if (fmd.getValueCount() == 0) {
        v.clear();
      } else {
        v.load(fmd, buf.slice(bufOffset, fmd.getBufferLength()));
      }
      bufOffset += fmd.getBufferLength();
      newVectors.add(v);
    }

    if (!oldFields.isEmpty()) {
      schemaChanged = true;
      for (ValueVector v : oldFields.values()) {
        v.close();
      }
    }

    // rebuild the schema.
    SchemaBuilder b = BatchSchema.newBuilder();
    for (VectorWrapper<?> v : newVectors) {
      b.addField(v.getField());
    }
    b.setSelectionVectorMode(BatchSchema.SelectionVectorMode.NONE);
    this.schema = b.build();
    container = newVectors;
    return schemaChanged;
  }
  public static FragmentWritableBatch getEmptyLastWithSchema(
      QueryId queryId,
      int sendMajorFragmentId,
      int sendMinorFragmentId,
      int receiveMajorFragmentId,
      int receiveMinorFragmentId,
      BatchSchema schema) {

    List<SerializedField> fields = Lists.newArrayList();
    for (MaterializedField field : schema) {
      fields.add(field.getAsBuilder().build());
    }
    RecordBatchDef def = RecordBatchDef.newBuilder().addAllField(fields).build();
    return new FragmentWritableBatch(
        true,
        queryId,
        sendMajorFragmentId,
        sendMinorFragmentId,
        receiveMajorFragmentId,
        receiveMinorFragmentId,
        def);
  }
public class FragmentWritableBatch {
  static final org.slf4j.Logger logger =
      org.slf4j.LoggerFactory.getLogger(FragmentWritableBatch.class);

  private static RecordBatchDef EMPTY_DEF = RecordBatchDef.newBuilder().setRecordCount(0).build();

  private final ByteBuf[] buffers;
  private final FragmentRecordBatch header;

  public FragmentWritableBatch(
      boolean isLast,
      QueryId queryId,
      int sendMajorFragmentId,
      int sendMinorFragmentId,
      int receiveMajorFragmentId,
      int receiveMinorFragmentId,
      WritableBatch batch) {
    this(
        isLast,
        queryId,
        sendMajorFragmentId,
        sendMinorFragmentId,
        receiveMajorFragmentId,
        receiveMinorFragmentId,
        batch.getDef(),
        batch.getBuffers());
  }

  private FragmentWritableBatch(
      boolean isLast,
      QueryId queryId,
      int sendMajorFragmentId,
      int sendMinorFragmentId,
      int receiveMajorFragmentId,
      int receiveMinorFragmentId,
      RecordBatchDef def,
      ByteBuf... buffers) {
    this.buffers = buffers;
    FragmentHandle handle =
        FragmentHandle //
            .newBuilder() //
            .setMajorFragmentId(receiveMajorFragmentId) //
            .setMinorFragmentId(receiveMinorFragmentId) //
            .setQueryId(queryId) //
            .build();
    this.header =
        FragmentRecordBatch //
            .newBuilder() //
            .setIsLastBatch(isLast) //
            .setDef(def) //
            .setHandle(handle) //
            .setSendingMajorFragmentId(sendMajorFragmentId) //
            .setSendingMinorFragmentId(sendMinorFragmentId) //
            .build();
  }

  public static FragmentWritableBatch getEmptyLast(
      QueryId queryId,
      int sendMajorFragmentId,
      int sendMinorFragmentId,
      int receiveMajorFragmentId,
      int receiveMinorFragmentId) {
    return new FragmentWritableBatch(
        true,
        queryId,
        sendMajorFragmentId,
        sendMinorFragmentId,
        receiveMajorFragmentId,
        receiveMinorFragmentId,
        EMPTY_DEF);
  }

  public static FragmentWritableBatch getEmptyLastWithSchema(
      QueryId queryId,
      int sendMajorFragmentId,
      int sendMinorFragmentId,
      int receiveMajorFragmentId,
      int receiveMinorFragmentId,
      BatchSchema schema) {

    List<SerializedField> fields = Lists.newArrayList();
    for (MaterializedField field : schema) {
      fields.add(field.getAsBuilder().build());
    }
    RecordBatchDef def = RecordBatchDef.newBuilder().addAllField(fields).build();
    return new FragmentWritableBatch(
        true,
        queryId,
        sendMajorFragmentId,
        sendMinorFragmentId,
        receiveMajorFragmentId,
        receiveMinorFragmentId,
        def);
  }

  public ByteBuf[] getBuffers() {
    return buffers;
  }

  public long getByteCount() {
    long n = 0;
    for (ByteBuf buf : buffers) {
      n += buf.readableBytes();
    }
    return n;
  }

  public FragmentRecordBatch getHeader() {
    return header;
  }
}