private void writeSearchResults(IFrameTupleAccessor leftAccessor, int tIndex) throws Exception {
    while (cursor.hasNext()) {
      tb.reset();
      cursor.next();

      ITupleReference frameTuple = cursor.getTuple();
      for (int i = 0; i < inputRecDesc.getFields().length; i++) {
        int tupleStart = leftAccessor.getTupleStartOffset(tIndex);
        int fieldStart = leftAccessor.getFieldStartOffset(tIndex, i);
        int offset = leftAccessor.getFieldSlotsLength() + tupleStart + fieldStart;
        int len = leftAccessor.getFieldEndOffset(tIndex, i) - fieldStart;
        dos.write(leftAccessor.getBuffer().array(), offset, len);
        tb.addFieldEndOffset();
      }
      for (int i = 0; i < frameTuple.getFieldCount(); i++) {
        dos.write(
            frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
        tb.addFieldEndOffset();
      }

      if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
        FrameUtils.flushFrame(writeBuffer, writer);
        appender.reset(writeBuffer, true);
        if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
          throw new IllegalStateException();
        }
      }
    }
  }
  /** write the right result */
  private void writeRightResults(ITupleReference frameTuple) throws Exception {
    tb.reset();
    for (int i = 0; i < frameTuple.getFieldCount(); i++) {
      dos.write(
          frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
      tb.addFieldEndOffset();
    }

    if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
      FrameUtils.flushFrame(writeBuffer, writer);
      appender.reset(writeBuffer, true);
      if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
        throw new IllegalStateException();
      }
    }
  }
  @Override
  public void open() throws HyracksDataException {
    /** open the function */
    functionProxy.functionOpen();
    accessor = new FrameTupleAccessor(treeIndexHelper.getTaskContext().getFrameSize(), recDesc);

    try {
      treeIndexHelper.open();
      btree = (BTree) treeIndexHelper.getIndexInstance();
      cursorFrame = btree.getLeafFrameFactory().createFrame();
      setCursor();

      // Construct range predicate.
      lowKeySearchCmp = BTreeUtils.getSearchMultiComparator(btree.getComparatorFactories(), lowKey);
      highKeySearchCmp =
          BTreeUtils.getSearchMultiComparator(btree.getComparatorFactories(), highKey);
      rangePred =
          new RangePredicate(
              null, null, lowKeyInclusive, highKeyInclusive, lowKeySearchCmp, highKeySearchCmp);

      writeBuffer = treeIndexHelper.getTaskContext().allocateFrame();
      tb = new ArrayTupleBuilder(btree.getFieldCount());
      dos = tb.getDataOutput();
      appender = new FrameTupleAppender(treeIndexHelper.getTaskContext().getFrameSize());
      appender.reset(writeBuffer, true);
      indexAccessor =
          btree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);

      cloneUpdateTb = new ArrayTupleBuilder(btree.getFieldCount());
      updateBuffer.setFieldCount(btree.getFieldCount());
    } catch (Exception e) {
      treeIndexHelper.close();
      throw new HyracksDataException(e);
    }
  }
  /** write the left result */
  private void writeLeftResults(IFrameTupleAccessor leftAccessor, int tIndex) throws Exception {
    tb.reset();
    for (int i = 0; i < inputRecDesc.getFields().length; i++) {
      int tupleStart = leftAccessor.getTupleStartOffset(tIndex);
      int fieldStart = leftAccessor.getFieldStartOffset(tIndex, i);
      int offset = leftAccessor.getFieldSlotsLength() + tupleStart + fieldStart;
      int len = leftAccessor.getFieldEndOffset(tIndex, i) - fieldStart;
      dos.write(leftAccessor.getBuffer().array(), offset, len);
      tb.addFieldEndOffset();
    }

    if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
      FrameUtils.flushFrame(writeBuffer, writer);
      appender.reset(writeBuffer, true);
      if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
        throw new IllegalStateException();
      }
    }
  }
  public void probe(ByteBuffer buffer, IFrameWriter writer) throws HyracksDataException {
    accessorProbe.reset(buffer);
    int tupleCount = accessorProbe.getTupleCount();

    boolean print = false;
    if (print) {
      accessorProbe.prettyPrint();
    }

    if (numOfSpilledParts == 0) {
      inMemJoiner.join(buffer, writer);
      return;
    }

    for (int i = 0; i < tupleCount; ++i) {
      int pid = probeHpc.partition(accessorProbe, i, numOfPartitions);

      if (buildPSizeInTups[pid] > 0) { // Tuple has potential match from previous phase
        if (pStatus.get(pid)) { // pid is Spilled
          boolean needToClear = false;
          ByteBuffer buff = sPartBuffs[curPBuff[pid]];
          while (true) {
            probeTupAppenderToSpilled.reset(buff, needToClear);
            if (probeTupAppenderToSpilled.append(accessorProbe, i)) {
              break;
            }
            probeWrite(pid, buff);
            buff.clear();
            needToClear = true;
          }
        } else { // pid is Resident
          while (true) {
            if (probeTupAppenderToResident.append(accessorProbe, i)) {
              break;
            }
            inMemJoiner.join(probeResBuff, writer);
            probeTupAppenderToResident.reset(probeResBuff, true);
          }
        }
        probePSizeInTups[pid]++;
      }
    }
  }
  private void processTuple(int tid, int pid) throws HyracksDataException {
    ByteBuffer partition =
        memBuffs[curPBuff[pid]]; // Getting current buffer for the target partition

    if (!pStatus.get(pid)) { // resident partition
      buildTupAppender.reset(partition, false);
      while (true) {
        if (buildTupAppender.append(
            accessorBuild, tid)) { // Tuple added to resident partition successfully
          break;
        }
        // partition does not have enough room
        int newBuffIx = allocateFreeBuffer(pid);
        if (newBuffIx == NO_MORE_FREE_BUFFER) { // Spill one partition
          int pidToSpill = selectPartitionToSpill();
          if (pidToSpill == -1) { // No more partition to spill
            throw new HyracksDataException(
                "not enough memory for Hash Join (Allocation exceeds the limit)");
          }
          spillPartition(pidToSpill);
          buildTupAppender.reset(memBuffs[pidToSpill], true);
          processTuple(tid, pid);
          break;
        } // New Buffer allocated successfully
        partition =
            memBuffs[
                curPBuff[
                    pid]]; // Current Buffer for the partition is now updated by
                           // allocateFreeBuffer() call above
        buildTupAppender.reset(partition, true);
        if (!buildTupAppender.append(accessorBuild, tid)) {
          throw new HyracksDataException(
              "Invalid State (Can not append to newly allocated buffer)");
        }
        buildPSizeInFrames[pid]++;
        break;
      }
    } else { // spilled partition
      boolean needClear = false;
      while (true) {
        buildTupAppender.reset(partition, needClear);
        if (buildTupAppender.append(accessorBuild, tid)) {
          break;
        }
        // Dedicated in-memory buffer for the partition is full, needed to be flushed first
        buildWrite(pid, partition);
        partition.clear();
        needClear = true;
        buildPSizeInFrames[pid]++;
      }
    }
  }
 @Override
 public void close() throws HyracksDataException {
   try {
     if (appender.getTupleCount() > 0) {
       FrameUtils.flushFrame(writeBuffer, writer);
     }
     writer.close();
     try {
       cursor.close();
     } catch (Exception e) {
       throw new HyracksDataException(e);
     }
   } finally {
     treeIndexOpHelper.close();
   }
 }
  @Override
  public void open() throws HyracksDataException {
    accessor = new FrameTupleAccessor(treeIndexOpHelper.getTaskContext().getFrameSize(), recDesc);

    try {
      treeIndexOpHelper.open();
      index = (ITreeIndex) treeIndexOpHelper.getIndexInstance();
      writer.open();

      int lowKeySearchFields = index.getComparatorFactories().length;
      int highKeySearchFields = index.getComparatorFactories().length;
      if (lowKey != null) lowKeySearchFields = lowKey.getFieldCount();
      if (highKey != null) highKeySearchFields = highKey.getFieldCount();

      IBinaryComparator[] lowKeySearchComparators = new IBinaryComparator[lowKeySearchFields];
      for (int i = 0; i < lowKeySearchFields; i++) {
        lowKeySearchComparators[i] = index.getComparatorFactories()[i].createBinaryComparator();
      }
      lowKeySearchCmp = new MultiComparator(lowKeySearchComparators);

      if (lowKeySearchFields == highKeySearchFields) {
        highKeySearchCmp = lowKeySearchCmp;
      } else {
        IBinaryComparator[] highKeySearchComparators = new IBinaryComparator[highKeySearchFields];
        for (int i = 0; i < highKeySearchFields; i++) {
          highKeySearchComparators[i] = index.getComparatorFactories()[i].createBinaryComparator();
        }
        highKeySearchCmp = new MultiComparator(highKeySearchComparators);
      }

      rangePred =
          new RangePredicate(
              null, null, lowKeyInclusive, highKeyInclusive, lowKeySearchCmp, highKeySearchCmp);
      writeBuffer = treeIndexOpHelper.getTaskContext().allocateFrame();
      tb = new ArrayTupleBuilder(inputRecDesc.getFields().length + index.getFieldCount());
      dos = tb.getDataOutput();
      appender = new FrameTupleAppender(treeIndexOpHelper.getTaskContext().getFrameSize());
      appender.reset(writeBuffer, true);
      indexAccessor =
          index.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
      setCursor();
    } catch (Exception e) {
      treeIndexOpHelper.close();
      throw new HyracksDataException(e);
    }
  }
  @Override
  public void open() throws HyracksDataException {
    accessor = new FrameTupleAccessor(treeIndexOpHelper.getTaskContext().getFrameSize(), recDesc);

    try {
      treeIndexOpHelper.open();
      btree = (BTree) treeIndexOpHelper.getIndexInstance();
      cursorFrame = btree.getLeafFrameFactory().createFrame();
      setCursor();
      writer.open();

      rangePred = new RangePredicate(null, null, true, true, null, null);
      int lowKeySearchFields = btree.getComparatorFactories().length;
      IBinaryComparator[] lowKeySearchComparators = new IBinaryComparator[lowKeySearchFields];
      for (int i = 0; i < lowKeySearchFields; i++) {
        lowKeySearchComparators[i] = btree.getComparatorFactories()[i].createBinaryComparator();
      }
      lowKeySearchCmp = new MultiComparator(lowKeySearchComparators);

      writeBuffer = treeIndexOpHelper.getTaskContext().allocateFrame();
      tb = new ArrayTupleBuilder(btree.getFieldCount());
      dos = tb.getDataOutput();
      appender = new FrameTupleAppender(treeIndexOpHelper.getTaskContext().getFrameSize());
      appender.reset(writeBuffer, true);

      indexAccessor =
          btree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);

      /** set the search cursor */
      rangePred.setLowKey(null, true);
      rangePred.setHighKey(null, true);
      cursor.reset();
      indexAccessor.search(cursor, rangePred);

      /** set up current top tuple */
      if (cursor.hasNext()) {
        cursor.next();
        currentTopTuple = cursor.getTuple();
        match = false;
      }

    } catch (Exception e) {
      treeIndexOpHelper.close();
      throw new HyracksDataException(e);
    }
  }
  public void initProbe() {

    sPartBuffs = new ByteBuffer[numOfSpilledParts];
    for (int i = 0; i < numOfSpilledParts; i++) {
      sPartBuffs[i] = ctx.allocateFrame();
    }
    curPBuff = new int[numOfPartitions];
    int nextBuffIxToAlloc = 0;
    /* We only need to allocate one frame per spilled partition.
     * Resident partitions do not need frames in probe, as their tuples join
     * immediately with the resident build tuples using the inMemoryHashJoin */
    for (int i = 0; i < numOfPartitions; i++) {
      curPBuff[i] = (pStatus.get(i)) ? nextBuffIxToAlloc++ : BUFFER_FOR_RESIDENT_PARTS;
    }
    probePSizeInTups = new int[numOfPartitions];
    probeRFWriters = new RunFileWriter[numOfPartitions];

    probeResBuff = ctx.allocateFrame();

    probeTupAppenderToResident = new FrameTupleAppender(ctx.getFrameSize());
    probeTupAppenderToResident.reset(probeResBuff, true);

    probeTupAppenderToSpilled = new FrameTupleAppender(ctx.getFrameSize());
  }