ByteBuffer makeAFrame(int capacity, int count, int deletedBytes) throws HyracksDataException {
   ByteBuffer buffer = ByteBuffer.allocate(capacity);
   int metaOffset = capacity - 4;
   buffer.putInt(metaOffset, deletedBytes);
   metaOffset -= 4;
   buffer.putInt(metaOffset, count);
   metaOffset -= 4;
   for (int i = 0; i < count; i++, metaOffset -= 4) {
     makeARecord(builder, i);
     for (int x = 0; x < builder.getFieldEndOffsets().length; x++) {
       buffer.putInt(builder.getFieldEndOffsets()[x]);
     }
     buffer.put(builder.getByteArray(), 0, builder.getSize());
     assert (metaOffset > buffer.position());
     buffer.putInt(metaOffset, buffer.position());
   }
   return buffer;
 }
 private void printTuple(ArrayTupleBuilder tb, IPrinter[] printers, PrintStream printStream)
     throws HyracksDataException {
   int[] offsets = tb.getFieldEndOffsets();
   for (int i = 0; i < printers.length; i++) {
     int offset = i == 0 ? 0 : offsets[i - 1];
     int length = i == 0 ? offsets[0] : offsets[i] - offsets[i - 1];
     printers[i].print(tb.getByteArray(), offset, length, printStream);
     printStream.println();
   }
 }
  static void prepareData(
      IHyracksTaskContext ctx,
      List<IFrame> frameList,
      int minDataSize,
      int minRecordSize,
      int maxRecordSize,
      Map<Integer, String> specialData,
      Map<Integer, String> keyValuePair)
      throws HyracksDataException {

    ArrayTupleBuilder tb = new ArrayTupleBuilder(RecordDesc.getFieldCount());
    FrameTupleAppender appender = new FrameTupleAppender();

    int datasize = 0;
    if (specialData != null) {
      for (Map.Entry<Integer, String> entry : specialData.entrySet()) {
        tb.reset();
        tb.addField(IntegerSerializerDeserializer.INSTANCE, entry.getKey());
        tb.addField(UTF8StringSerializerDeserializer.INSTANCE, entry.getValue());

        VSizeFrame frame =
            new VSizeFrame(
                ctx,
                FrameHelper.calcAlignedFrameSizeToStore(
                    tb.getFieldEndOffsets().length, tb.getSize(), ctx.getInitialFrameSize()));
        appender.reset(frame, true);
        assertTrue(appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize()));
        frameList.add(frame);
        datasize += frame.getFrameSize();
      }
      keyValuePair.putAll(specialData);
    }

    VSizeFrame frame = new VSizeFrame(ctx, ctx.getInitialFrameSize());
    appender.reset(frame, true);
    while (datasize < minDataSize) {
      tb.reset();
      int key = GRandom.nextInt(minDataSize + 1);
      if (!keyValuePair.containsKey(key)) {
        String value = generateRandomRecord(minRecordSize, maxRecordSize);
        tb.addField(IntegerSerializerDeserializer.INSTANCE, key);
        tb.addField(UTF8StringSerializerDeserializer.INSTANCE, value);

        if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
          frameList.add(frame);
          datasize += frame.getFrameSize();
          frame =
              new VSizeFrame(
                  ctx,
                  FrameHelper.calcAlignedFrameSizeToStore(
                      tb.getFieldEndOffsets().length, tb.getSize(), ctx.getInitialFrameSize()));
          appender.reset(frame, true);
          assertTrue(appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize()));
        }

        keyValuePair.put(key, value);
      }
    }
    if (appender.getTupleCount() > 0) {
      frameList.add(frame);
    }
  }
  @Override
  public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
    accessor.reset(buffer);
    int tupleCount = accessor.getTupleCount();

    for (int i = 0; i < tupleCount; i++) {
      short numTokens = 0;

      tokenizer.reset(
          accessor.getBuffer().array(),
          accessor.getTupleStartOffset(i)
              + accessor.getFieldSlotsLength()
              + accessor.getFieldStartOffset(i, docField),
          accessor.getFieldLength(i, docField));

      if (addNumTokensKey) {
        // Get the total number of tokens.
        numTokens = tokenizer.getTokensCount();
      }

      // Write token and data into frame by following the order specified
      // in the writeKeyFieldsFirst field.
      while (tokenizer.hasNext()) {

        tokenizer.next();

        builder.reset();

        // Writing Order: token, number of token, keyfield1 ... n
        if (!writeKeyFieldsFirst) {
          try {
            IToken token = tokenizer.getToken();
            token.serializeToken(builderData);

            builder.addFieldEndOffset();
            // Add number of tokens if requested.
            if (addNumTokensKey) {
              builder.getDataOutput().writeShort(numTokens);
              builder.addFieldEndOffset();
            }
          } catch (IOException e) {
            throw new HyracksDataException(e.getMessage());
          }

          for (int k = 0; k < keyFields.length; k++) {
            builder.addField(accessor, i, keyFields[k]);
          }

        }
        // Writing Order: keyfield1 ... n, token, number of token
        else {

          for (int k = 0; k < keyFields.length; k++) {
            builder.addField(accessor, i, keyFields[k]);
          }

          try {
            IToken token = tokenizer.getToken();
            token.serializeToken(builderData);

            builder.addFieldEndOffset();
            // Add number of tokens if requested.
            if (addNumTokensKey) {
              builder.getDataOutput().writeShort(numTokens);
              builder.addFieldEndOffset();
            }
          } catch (IOException e) {
            throw new HyracksDataException(e.getMessage());
          }
        }

        FrameUtils.appendToWriter(
            writer,
            appender,
            builder.getFieldEndOffsets(),
            builder.getByteArray(),
            0,
            builder.getSize());
      }
    }
  }
  @Override
  public void performOp(ITupleReference tuple, TestOperation op)
      throws HyracksDataException, IndexException {
    LSMBTreeAccessor accessor = (LSMBTreeAccessor) indexAccessor;
    IIndexCursor searchCursor = accessor.createSearchCursor(false);
    MultiComparator cmp = accessor.getMultiComparator();
    RangePredicate rangePred = new RangePredicate(tuple, tuple, true, true, cmp, cmp);

    switch (op) {
      case INSERT:
        try {
          accessor.insert(tuple);
        } catch (TreeIndexDuplicateKeyException e) {
          // Ignore duplicate keys, since we get random tuples.
        }
        break;

      case DELETE:
        // Create a tuple reference with only key fields.
        deleteTb.reset();
        for (int i = 0; i < numKeyFields; i++) {
          deleteTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
        }
        deleteTuple.reset(deleteTb.getFieldEndOffsets(), deleteTb.getByteArray());
        try {
          accessor.delete(deleteTuple);
        } catch (TreeIndexNonExistentKeyException e) {
          // Ignore non-existant keys, since we get random tuples.
        }
        break;

      case UPDATE:
        try {
          accessor.update(tuple);
        } catch (TreeIndexNonExistentKeyException e) {
          // Ignore non-existant keys, since we get random tuples.
        } catch (BTreeNotUpdateableException e) {
          // Ignore not updateable exception due to numKeys == numFields.
        }
        break;

      case POINT_SEARCH:
        searchCursor.reset();
        rangePred.setLowKey(tuple, true);
        rangePred.setHighKey(tuple, true);
        accessor.search(searchCursor, rangePred);
        consumeCursorTuples(searchCursor);
        break;

      case SCAN:
        searchCursor.reset();
        rangePred.setLowKey(null, true);
        rangePred.setHighKey(null, true);
        accessor.search(searchCursor, rangePred);
        consumeCursorTuples(searchCursor);
        break;

      case MERGE:
        accessor.scheduleMerge(NoOpIOOperationCallback.INSTANCE, lsmBTree.getImmutableComponents());
        break;

      default:
        throw new HyracksDataException("Op " + op.toString() + " not supported.");
    }
  }