@SuppressWarnings("unchecked")
  public static void writeObject(IAObject obj, DataOutput dataOutput) throws HyracksDataException {
    switch (obj.getType().getTypeTag()) {
      case RECORD:
        {
          IARecordBuilder recordBuilder = new RecordBuilder();
          recordBuilder.reset((ARecordType) obj.getType());
          recordBuilder.init();
          writeRecord((AMutableRecord) obj, dataOutput, recordBuilder);
          break;
        }

      case ORDEREDLIST:
        {
          OrderedListBuilder listBuilder = new OrderedListBuilder();
          listBuilder.reset((AOrderedListType) ((AMutableOrderedList) obj).getType());
          IACursor cursor = ((AMutableOrderedList) obj).getCursor();
          ArrayBackedValueStorage listItemValue = new ArrayBackedValueStorage();
          while (cursor.next()) {
            listItemValue.reset();
            IAObject item = cursor.get();
            writeObject(item, listItemValue.getDataOutput());
            listBuilder.addItem(listItemValue);
          }
          listBuilder.write(dataOutput, true);
          break;
        }

      case UNORDEREDLIST:
        {
          UnorderedListBuilder listBuilder = new UnorderedListBuilder();
          listBuilder.reset((AUnorderedListType) ((AMutableUnorderedList) obj).getType());
          IACursor cursor = ((AMutableUnorderedList) obj).getCursor();
          ArrayBackedValueStorage listItemValue = new ArrayBackedValueStorage();
          while (cursor.next()) {
            listItemValue.reset();
            IAObject item = cursor.get();
            writeObject(item, listItemValue.getDataOutput());
            listBuilder.addItem(listItemValue);
          }
          listBuilder.write(dataOutput, true);
          break;
        }

      default:
        AqlSerializerDeserializerProvider.INSTANCE
            .getSerializerDeserializer(obj.getType())
            .serialize(obj, dataOutput);
        break;
    }
  }
/** Translates a Dataset metadata entity to an ITupleReference and vice versa. */
public class CompactionPolicyTupleTranslator extends AbstractTupleTranslator<CompactionPolicy> {
  // Field indexes of serialized CompactionPolicy in a tuple.
  // Key field.
  public static final int COMPACTION_POLICY_DATAVERSE_NAME_FIELD_INDEX = 0;

  public static final int COMPACTION_POLICY_NAME_FIELD_INDEX = 1;

  // Payload field containing serialized compactionPolicy.
  public static final int COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX = 2;

  @SuppressWarnings("unchecked")
  private ISerializerDeserializer<ARecord> recordSerDes =
      AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(
          MetadataRecordTypes.COMPACTION_POLICY_RECORDTYPE);

  public CompactionPolicyTupleTranslator(boolean getTuple) {
    super(getTuple, MetadataPrimaryIndexes.COMPACTION_POLICY_DATASET.getFieldCount());
  }

  @Override
  public CompactionPolicy getMetadataEntityFromTuple(ITupleReference tuple) throws IOException {
    byte[] serRecord = tuple.getFieldData(COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
    int recordStartOffset = tuple.getFieldStart(COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
    int recordLength = tuple.getFieldLength(COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
    ByteArrayInputStream stream =
        new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
    DataInput in = new DataInputStream(stream);
    ARecord compactionPolicyRecord = (ARecord) recordSerDes.deserialize(in);
    return createCompactionPolicyFromARecord(compactionPolicyRecord);
  }

  private CompactionPolicy createCompactionPolicyFromARecord(ARecord compactionPolicyRecord) {
    CompactionPolicy compactionPolicy = null;
    String dataverseName =
        ((AString)
                compactionPolicyRecord.getValueByPos(
                    MetadataRecordTypes.COMPACTION_POLICY_ARECORD_DATAVERSE_NAME_FIELD_INDEX))
            .getStringValue();
    String policyName =
        ((AString)
                compactionPolicyRecord.getValueByPos(
                    MetadataRecordTypes.COMPACTION_POLICY_ARECORD_POLICY_NAME_FIELD_INDEX))
            .getStringValue();
    String className =
        ((AString)
                compactionPolicyRecord.getValueByPos(
                    MetadataRecordTypes.COMPACTION_POLICY_ARECORD_CLASSNAME_FIELD_INDEX))
            .getStringValue();

    compactionPolicy = new CompactionPolicy(dataverseName, policyName, className);
    return compactionPolicy;
  }

  @Override
  public ITupleReference getTupleFromMetadataEntity(CompactionPolicy compactionPolicy)
      throws IOException, MetadataException {

    tupleBuilder.reset();
    aString.setValue(compactionPolicy.getDataverseName());
    stringSerde.serialize(aString, tupleBuilder.getDataOutput());
    tupleBuilder.addFieldEndOffset();

    aString.setValue(compactionPolicy.getPolicyName());
    stringSerde.serialize(aString, tupleBuilder.getDataOutput());
    tupleBuilder.addFieldEndOffset();

    recordBuilder.reset(MetadataRecordTypes.COMPACTION_POLICY_RECORDTYPE);

    // write field 0
    fieldValue.reset();
    aString.setValue(compactionPolicy.getDataverseName());
    stringSerde.serialize(aString, fieldValue.getDataOutput());
    recordBuilder.addField(
        MetadataRecordTypes.COMPACTION_POLICY_ARECORD_DATAVERSE_NAME_FIELD_INDEX, fieldValue);

    // write field 1
    fieldValue.reset();
    aString.setValue(compactionPolicy.getPolicyName());
    stringSerde.serialize(aString, fieldValue.getDataOutput());
    recordBuilder.addField(
        MetadataRecordTypes.COMPACTION_POLICY_ARECORD_POLICY_NAME_FIELD_INDEX, fieldValue);

    // write field 2
    fieldValue.reset();
    aString.setValue(compactionPolicy.getClassName());
    stringSerde.serialize(aString, fieldValue.getDataOutput());
    recordBuilder.addField(
        MetadataRecordTypes.COMPACTION_POLICY_ARECORD_CLASSNAME_FIELD_INDEX, fieldValue);

    // write record
    recordBuilder.write(tupleBuilder.getDataOutput(), true);
    tupleBuilder.addFieldEndOffset();

    tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
    return tuple;
  }
}
public abstract class AbstractAvgAggregateFunction implements ICopyAggregateFunction {
  private static final int SUM_FIELD_ID = 0;
  private static final int COUNT_FIELD_ID = 1;

  private final ARecordType recType;

  private DataOutput out;
  private ArrayBackedValueStorage inputVal = new ArrayBackedValueStorage();
  private ICopyEvaluator eval;
  protected ATypeTag aggType;
  private double sum;
  private long count;
  private AMutableDouble aDouble = new AMutableDouble(0);
  private AMutableInt64 aInt64 = new AMutableInt64(0);

  private ArrayBackedValueStorage avgBytes = new ArrayBackedValueStorage();
  private ByteArrayAccessibleOutputStream sumBytes = new ByteArrayAccessibleOutputStream();
  private DataOutput sumBytesOutput = new DataOutputStream(sumBytes);
  private ByteArrayAccessibleOutputStream countBytes = new ByteArrayAccessibleOutputStream();
  private DataOutput countBytesOutput = new DataOutputStream(countBytes);
  private ICopyEvaluator evalSum = new AccessibleByteArrayEval(avgBytes.getDataOutput(), sumBytes);
  private ICopyEvaluator evalCount =
      new AccessibleByteArrayEval(avgBytes.getDataOutput(), countBytes);
  private ClosedRecordConstructorEval recordEval;

  @SuppressWarnings("unchecked")
  private ISerializerDeserializer<ADouble> doubleSerde =
      AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADOUBLE);

  @SuppressWarnings("unchecked")
  private ISerializerDeserializer<AInt64> longSerde =
      AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);

  @SuppressWarnings("unchecked")
  private ISerializerDeserializer<ANull> nullSerde =
      AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ANULL);

  public AbstractAvgAggregateFunction(ICopyEvaluatorFactory[] args, IDataOutputProvider output)
      throws AlgebricksException {
    eval = args[0].createEvaluator(inputVal);
    out = output.getDataOutput();

    ARecordType tmpRecType;
    try {
      tmpRecType =
          new ARecordType(
              null,
              new String[] {"sum", "count"},
              new IAType[] {BuiltinType.ADOUBLE, BuiltinType.AINT64},
              false);
    } catch (AsterixException | HyracksDataException e) {
      throw new AlgebricksException(e);
    }

    recType = tmpRecType;
    recordEval =
        new ClosedRecordConstructorEval(
            recType, new ICopyEvaluator[] {evalSum, evalCount}, avgBytes, out);
  }

  @Override
  public void init() {
    aggType = ATypeTag.SYSTEM_NULL;
    sum = 0.0;
    count = 0;
  }

  public abstract void step(IFrameTupleReference tuple) throws AlgebricksException;

  public abstract void finish() throws AlgebricksException;

  public abstract void finishPartial() throws AlgebricksException;

  protected abstract void processNull();

  protected void processDataValues(IFrameTupleReference tuple) throws AlgebricksException {
    if (skipStep()) {
      return;
    }
    inputVal.reset();
    eval.evaluate(tuple);
    ATypeTag typeTag =
        EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(inputVal.getByteArray()[0]);
    if (typeTag == ATypeTag.NULL) {
      processNull();
      return;
    } else if (aggType == ATypeTag.SYSTEM_NULL) {
      aggType = typeTag;
    } else if (typeTag != ATypeTag.SYSTEM_NULL && !ATypeHierarchy.isCompatible(typeTag, aggType)) {
      throw new AlgebricksException(
          "Unexpected type "
              + typeTag
              + " in aggregation input stream. Expected type "
              + aggType
              + ".");
    } else if (ATypeHierarchy.canPromote(aggType, typeTag)) {
      aggType = typeTag;
    }
    ++count;
    switch (typeTag) {
      case INT8:
        {
          byte val = AInt8SerializerDeserializer.getByte(inputVal.getByteArray(), 1);
          sum += val;
          break;
        }
      case INT16:
        {
          short val = AInt16SerializerDeserializer.getShort(inputVal.getByteArray(), 1);
          sum += val;
          break;
        }
      case INT32:
        {
          int val = AInt32SerializerDeserializer.getInt(inputVal.getByteArray(), 1);
          sum += val;
          break;
        }
      case INT64:
        {
          long val = AInt64SerializerDeserializer.getLong(inputVal.getByteArray(), 1);
          sum += val;
          break;
        }
      case FLOAT:
        {
          float val = AFloatSerializerDeserializer.getFloat(inputVal.getByteArray(), 1);
          sum += val;
          break;
        }
      case DOUBLE:
        {
          double val = ADoubleSerializerDeserializer.getDouble(inputVal.getByteArray(), 1);
          sum += val;
          break;
        }
      default:
        {
          throw new NotImplementedException("Cannot compute AVG for values of type " + typeTag);
        }
    }
    inputVal.reset();
  }

  protected void finishPartialResults() throws AlgebricksException {
    try {
      // Double check that count 0 is accounted
      if (aggType == ATypeTag.SYSTEM_NULL) {
        if (GlobalConfig.DEBUG) {
          GlobalConfig.ASTERIX_LOGGER.finest("AVG aggregate ran over empty input.");
        }
        out.writeByte(ATypeTag.SYSTEM_NULL.serialize());
      } else if (aggType == ATypeTag.NULL) {
        out.writeByte(ATypeTag.NULL.serialize());
      } else {
        sumBytes.reset();
        aDouble.setValue(sum);
        doubleSerde.serialize(aDouble, sumBytesOutput);
        countBytes.reset();
        aInt64.setValue(count);
        longSerde.serialize(aInt64, countBytesOutput);
        recordEval.evaluate(null);
      }
    } catch (IOException e) {
      throw new AlgebricksException(e);
    }
  }

  protected void processPartialResults(IFrameTupleReference tuple) throws AlgebricksException {
    if (skipStep()) {
      return;
    }
    inputVal.reset();
    eval.evaluate(tuple);
    byte[] serBytes = inputVal.getByteArray();
    ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serBytes[0]);
    switch (typeTag) {
      case NULL:
        {
          processNull();
          break;
        }
      case SYSTEM_NULL:
        {
          // Ignore and return.
          break;
        }
      case RECORD:
        {
          // Expected.
          aggType = ATypeTag.DOUBLE;
          int nullBitmapSize = 0;
          int offset1 =
              ARecordSerializerDeserializer.getFieldOffsetById(
                  serBytes, SUM_FIELD_ID, nullBitmapSize, false);
          sum += ADoubleSerializerDeserializer.getDouble(serBytes, offset1);
          int offset2 =
              ARecordSerializerDeserializer.getFieldOffsetById(
                  serBytes, COUNT_FIELD_ID, nullBitmapSize, false);
          count += AInt64SerializerDeserializer.getLong(serBytes, offset2);
          break;
        }
      default:
        {
          throw new AlgebricksException(
              "Global-Avg is not defined for values of type "
                  + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serBytes[0]));
        }
    }
  }

  protected void finishFinalResults() throws AlgebricksException {
    try {
      if (count == 0 || aggType == ATypeTag.NULL) {
        nullSerde.serialize(ANull.NULL, out);
      } else {
        aDouble.setValue(sum / count);
        doubleSerde.serialize(aDouble, out);
      }
    } catch (IOException e) {
      throw new AlgebricksException(e);
    }
  }

  protected boolean skipStep() {
    return false;
  }
}
public abstract class AbstractTripleStringStringEval implements ICopyEvaluator {

  private DataOutput dout;
  private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
  private static final byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
  private ArrayBackedValueStorage array0 = new ArrayBackedValueStorage();
  private ArrayBackedValueStorage array1 = new ArrayBackedValueStorage();
  private ArrayBackedValueStorage array2 = new ArrayBackedValueStorage();
  private ICopyEvaluator eval0;
  private ICopyEvaluator eval1;
  private ICopyEvaluator eval2;

  private AMutableString resultBuffer = new AMutableString("");

  @SuppressWarnings("rawtypes")
  private ISerializerDeserializer nullSerde =
      AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ANULL);

  @SuppressWarnings("rawtypes")
  private ISerializerDeserializer strSerde =
      AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);

  private final UTF8StringPointable strPtr1st = new UTF8StringPointable();
  private final UTF8StringPointable strPtr2nd = new UTF8StringPointable();
  private final UTF8StringPointable strPtr3rd = new UTF8StringPointable();

  private final FunctionIdentifier funcID;

  public AbstractTripleStringStringEval(
      DataOutput dout,
      ICopyEvaluatorFactory eval0,
      ICopyEvaluatorFactory eval1,
      ICopyEvaluatorFactory eval2,
      FunctionIdentifier funcID)
      throws AlgebricksException {
    this.dout = dout;
    this.eval0 = eval0.createEvaluator(array0);
    this.eval1 = eval1.createEvaluator(array1);
    this.eval2 = eval2.createEvaluator(array2);
    this.funcID = funcID;
  }

  @SuppressWarnings("unchecked")
  @Override
  public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
    array0.reset();
    eval0.evaluate(tuple);
    array1.reset();
    eval1.evaluate(tuple);
    array2.reset();
    eval2.evaluate(tuple);

    try {
      if (array0.getByteArray()[0] == SER_NULL_TYPE_TAG
          || array1.getByteArray()[0] == SER_NULL_TYPE_TAG
          || array2.getByteArray()[0] == SER_NULL_TYPE_TAG) {
        nullSerde.serialize(ANull.NULL, dout);
        return;
      } else if (array0.getByteArray()[0] != SER_STRING_TYPE_TAG
          || array1.getByteArray()[0] != SER_STRING_TYPE_TAG
          || array2.getByteArray()[0] != SER_STRING_TYPE_TAG) {
        throw new AlgebricksException(
            funcID.getName()
                + ": expects input type (STRING/NULL, STRING/NULL, STRING/NULL), but got ("
                + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(array0.getByteArray()[0])
                + ", "
                + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(array1.getByteArray()[0])
                + ", "
                + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(array2.getByteArray()[0])
                + ".");
      }
    } catch (HyracksDataException e) {
      throw new AlgebricksException(e);
    }

    strPtr1st.set(array0.getByteArray(), array0.getStartOffset() + 1, array0.getLength());
    strPtr2nd.set(array1.getByteArray(), array1.getStartOffset() + 1, array1.getLength());
    strPtr3rd.set(array2.getByteArray(), array2.getStartOffset() + 1, array2.getLength());

    String res = compute(strPtr1st, strPtr2nd, strPtr3rd);
    resultBuffer.setValue(res);
    try {
      strSerde.serialize(resultBuffer, dout);
    } catch (HyracksDataException e) {
      throw new AlgebricksException(e);
    }
  }

  protected abstract String compute(
      UTF8StringPointable strPtr1st, UTF8StringPointable strPtr2nd, UTF8StringPointable strPtr3rd)
      throws AlgebricksException;
}
 @SuppressWarnings("rawtypes")
 public void testSchemaful() {
   try {
     File file = new File("target/classad-wtih-temporals.adm");
     File expected =
         new File(getClass().getResource("/results/classad-with-temporals.adm").toURI().getPath());
     FileUtils.deleteQuietly(file);
     PrintStream printStream = new PrintStream(Files.newOutputStream(Paths.get(file.toURI())));
     String[] recordFieldNames = {
       "GlobalJobId",
       "Owner",
       "ClusterId",
       "ProcId",
       "RemoteWallClockTime",
       "CompletionDate",
       "QDate",
       "JobCurrentStartDate",
       "JobStartDate",
       "JobCurrentStartExecutingDate"
     };
     IAType[] recordFieldTypes = {
       BuiltinType.ASTRING,
       BuiltinType.ASTRING,
       BuiltinType.AINT32,
       BuiltinType.AINT32,
       BuiltinType.ADURATION,
       BuiltinType.ADATETIME,
       BuiltinType.ADATETIME,
       BuiltinType.ADATETIME,
       BuiltinType.ADATETIME,
       BuiltinType.ADATETIME
     };
     ARecordType recordType = new ARecordType("value", recordFieldNames, recordFieldTypes, true);
     int numOfTupleFields = 1;
     ISerializerDeserializer[] serdes = new ISerializerDeserializer[1];
     serdes[0] = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(recordType);
     IPrinterFactory[] printerFactories = new IPrinterFactory[1];
     printerFactories[0] = AqlADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(recordType);
     // create output descriptor
     IPrinter[] printers = new IPrinter[printerFactories.length];
     for (int i = 0; i < printerFactories.length; i++) {
       printers[i] = printerFactories[i].createPrinter();
     }
     ClassAdObjectPool objectPool = new ClassAdObjectPool();
     String[] files = new String[] {"/classad-with-temporals.classads"};
     ClassAdParser parser =
         new ClassAdParser(recordType, false, false, false, null, null, null, objectPool);
     ArrayTupleBuilder tb = new ArrayTupleBuilder(numOfTupleFields);
     for (String path : files) {
       List<Path> paths = new ArrayList<>();
       paths.add(Paths.get(getClass().getResource(path).toURI()));
       FileSystemWatcher watcher = new FileSystemWatcher(paths, null, false);
       LocalFSInputStream in = new LocalFSInputStream(watcher);
       SemiStructuredRecordReader recordReader = new SemiStructuredRecordReader(in, "[", "]");
       while (recordReader.hasNext()) {
         tb.reset();
         IRawRecord<char[]> record = recordReader.next();
         parser.parse(record, tb.getDataOutput());
         tb.addFieldEndOffset();
         printTuple(tb, printers, printStream);
       }
       recordReader.close();
       printStream.close();
       Assert.assertTrue(FileUtils.contentEquals(file, expected));
     }
   } catch (Throwable th) {
     System.err.println("TEST FAILED");
     th.printStackTrace();
     Assert.assertTrue(false);
   }
   System.err.println("TEST PASSED");
 }