static void prepareData(
      IHyracksTaskContext ctx,
      List<IFrame> frameList,
      int minDataSize,
      int minRecordSize,
      int maxRecordSize,
      Map<Integer, String> specialData,
      Map<Integer, String> keyValuePair)
      throws HyracksDataException {

    ArrayTupleBuilder tb = new ArrayTupleBuilder(RecordDesc.getFieldCount());
    FrameTupleAppender appender = new FrameTupleAppender();

    int datasize = 0;
    if (specialData != null) {
      for (Map.Entry<Integer, String> entry : specialData.entrySet()) {
        tb.reset();
        tb.addField(IntegerSerializerDeserializer.INSTANCE, entry.getKey());
        tb.addField(UTF8StringSerializerDeserializer.INSTANCE, entry.getValue());

        VSizeFrame frame =
            new VSizeFrame(
                ctx,
                FrameHelper.calcAlignedFrameSizeToStore(
                    tb.getFieldEndOffsets().length, tb.getSize(), ctx.getInitialFrameSize()));
        appender.reset(frame, true);
        assertTrue(appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize()));
        frameList.add(frame);
        datasize += frame.getFrameSize();
      }
      keyValuePair.putAll(specialData);
    }

    VSizeFrame frame = new VSizeFrame(ctx, ctx.getInitialFrameSize());
    appender.reset(frame, true);
    while (datasize < minDataSize) {
      tb.reset();
      int key = GRandom.nextInt(minDataSize + 1);
      if (!keyValuePair.containsKey(key)) {
        String value = generateRandomRecord(minRecordSize, maxRecordSize);
        tb.addField(IntegerSerializerDeserializer.INSTANCE, key);
        tb.addField(UTF8StringSerializerDeserializer.INSTANCE, value);

        if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
          frameList.add(frame);
          datasize += frame.getFrameSize();
          frame =
              new VSizeFrame(
                  ctx,
                  FrameHelper.calcAlignedFrameSizeToStore(
                      tb.getFieldEndOffsets().length, tb.getSize(), ctx.getInitialFrameSize()));
          appender.reset(frame, true);
          assertTrue(appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize()));
        }

        keyValuePair.put(key, value);
      }
    }
    if (appender.getTupleCount() > 0) {
      frameList.add(frame);
    }
  }
  @Override
  public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
    accessor.reset(buffer);
    int tupleCount = accessor.getTupleCount();

    for (int i = 0; i < tupleCount; i++) {
      short numTokens = 0;

      tokenizer.reset(
          accessor.getBuffer().array(),
          accessor.getTupleStartOffset(i)
              + accessor.getFieldSlotsLength()
              + accessor.getFieldStartOffset(i, docField),
          accessor.getFieldLength(i, docField));

      if (addNumTokensKey) {
        // Get the total number of tokens.
        numTokens = tokenizer.getTokensCount();
      }

      // Write token and data into frame by following the order specified
      // in the writeKeyFieldsFirst field.
      while (tokenizer.hasNext()) {

        tokenizer.next();

        builder.reset();

        // Writing Order: token, number of token, keyfield1 ... n
        if (!writeKeyFieldsFirst) {
          try {
            IToken token = tokenizer.getToken();
            token.serializeToken(builderData);

            builder.addFieldEndOffset();
            // Add number of tokens if requested.
            if (addNumTokensKey) {
              builder.getDataOutput().writeShort(numTokens);
              builder.addFieldEndOffset();
            }
          } catch (IOException e) {
            throw new HyracksDataException(e.getMessage());
          }

          for (int k = 0; k < keyFields.length; k++) {
            builder.addField(accessor, i, keyFields[k]);
          }

        }
        // Writing Order: keyfield1 ... n, token, number of token
        else {

          for (int k = 0; k < keyFields.length; k++) {
            builder.addField(accessor, i, keyFields[k]);
          }

          try {
            IToken token = tokenizer.getToken();
            token.serializeToken(builderData);

            builder.addFieldEndOffset();
            // Add number of tokens if requested.
            if (addNumTokensKey) {
              builder.getDataOutput().writeShort(numTokens);
              builder.addFieldEndOffset();
            }
          } catch (IOException e) {
            throw new HyracksDataException(e.getMessage());
          }
        }

        FrameUtils.appendToWriter(
            writer,
            appender,
            builder.getFieldEndOffsets(),
            builder.getByteArray(),
            0,
            builder.getSize());
      }
    }
  }
 @SuppressWarnings("rawtypes")
 public void testSchemaful() {
   try {
     File file = new File("target/classad-wtih-temporals.adm");
     File expected =
         new File(getClass().getResource("/results/classad-with-temporals.adm").toURI().getPath());
     FileUtils.deleteQuietly(file);
     PrintStream printStream = new PrintStream(Files.newOutputStream(Paths.get(file.toURI())));
     String[] recordFieldNames = {
       "GlobalJobId",
       "Owner",
       "ClusterId",
       "ProcId",
       "RemoteWallClockTime",
       "CompletionDate",
       "QDate",
       "JobCurrentStartDate",
       "JobStartDate",
       "JobCurrentStartExecutingDate"
     };
     IAType[] recordFieldTypes = {
       BuiltinType.ASTRING,
       BuiltinType.ASTRING,
       BuiltinType.AINT32,
       BuiltinType.AINT32,
       BuiltinType.ADURATION,
       BuiltinType.ADATETIME,
       BuiltinType.ADATETIME,
       BuiltinType.ADATETIME,
       BuiltinType.ADATETIME,
       BuiltinType.ADATETIME
     };
     ARecordType recordType = new ARecordType("value", recordFieldNames, recordFieldTypes, true);
     int numOfTupleFields = 1;
     ISerializerDeserializer[] serdes = new ISerializerDeserializer[1];
     serdes[0] = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(recordType);
     IPrinterFactory[] printerFactories = new IPrinterFactory[1];
     printerFactories[0] = AqlADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(recordType);
     // create output descriptor
     IPrinter[] printers = new IPrinter[printerFactories.length];
     for (int i = 0; i < printerFactories.length; i++) {
       printers[i] = printerFactories[i].createPrinter();
     }
     ClassAdObjectPool objectPool = new ClassAdObjectPool();
     String[] files = new String[] {"/classad-with-temporals.classads"};
     ClassAdParser parser =
         new ClassAdParser(recordType, false, false, false, null, null, null, objectPool);
     ArrayTupleBuilder tb = new ArrayTupleBuilder(numOfTupleFields);
     for (String path : files) {
       List<Path> paths = new ArrayList<>();
       paths.add(Paths.get(getClass().getResource(path).toURI()));
       FileSystemWatcher watcher = new FileSystemWatcher(paths, null, false);
       LocalFSInputStream in = new LocalFSInputStream(watcher);
       SemiStructuredRecordReader recordReader = new SemiStructuredRecordReader(in, "[", "]");
       while (recordReader.hasNext()) {
         tb.reset();
         IRawRecord<char[]> record = recordReader.next();
         parser.parse(record, tb.getDataOutput());
         tb.addFieldEndOffset();
         printTuple(tb, printers, printStream);
       }
       recordReader.close();
       printStream.close();
       Assert.assertTrue(FileUtils.contentEquals(file, expected));
     }
   } catch (Throwable th) {
     System.err.println("TEST FAILED");
     th.printStackTrace();
     Assert.assertTrue(false);
   }
   System.err.println("TEST PASSED");
 }
  @Override
  public void performOp(ITupleReference tuple, TestOperation op)
      throws HyracksDataException, IndexException {
    LSMBTreeAccessor accessor = (LSMBTreeAccessor) indexAccessor;
    IIndexCursor searchCursor = accessor.createSearchCursor(false);
    MultiComparator cmp = accessor.getMultiComparator();
    RangePredicate rangePred = new RangePredicate(tuple, tuple, true, true, cmp, cmp);

    switch (op) {
      case INSERT:
        try {
          accessor.insert(tuple);
        } catch (TreeIndexDuplicateKeyException e) {
          // Ignore duplicate keys, since we get random tuples.
        }
        break;

      case DELETE:
        // Create a tuple reference with only key fields.
        deleteTb.reset();
        for (int i = 0; i < numKeyFields; i++) {
          deleteTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
        }
        deleteTuple.reset(deleteTb.getFieldEndOffsets(), deleteTb.getByteArray());
        try {
          accessor.delete(deleteTuple);
        } catch (TreeIndexNonExistentKeyException e) {
          // Ignore non-existant keys, since we get random tuples.
        }
        break;

      case UPDATE:
        try {
          accessor.update(tuple);
        } catch (TreeIndexNonExistentKeyException e) {
          // Ignore non-existant keys, since we get random tuples.
        } catch (BTreeNotUpdateableException e) {
          // Ignore not updateable exception due to numKeys == numFields.
        }
        break;

      case POINT_SEARCH:
        searchCursor.reset();
        rangePred.setLowKey(tuple, true);
        rangePred.setHighKey(tuple, true);
        accessor.search(searchCursor, rangePred);
        consumeCursorTuples(searchCursor);
        break;

      case SCAN:
        searchCursor.reset();
        rangePred.setLowKey(null, true);
        rangePred.setHighKey(null, true);
        accessor.search(searchCursor, rangePred);
        consumeCursorTuples(searchCursor);
        break;

      case MERGE:
        accessor.scheduleMerge(NoOpIOOperationCallback.INSTANCE, lsmBTree.getImmutableComponents());
        break;

      default:
        throw new HyracksDataException("Op " + op.toString() + " not supported.");
    }
  }
 void makeARecord(ArrayTupleBuilder builder, int i) throws HyracksDataException {
   builder.reset();
   builder.addField(fields[0], i + 1);
   builder.addField(fields[1], Utility.repeatString(TEST_CH, i + 1));
 }