public SSTableSliceIterator(
      SSTableReader ssTable, String key, byte[] startColumn, byte[] finishColumn, boolean reversed)
      throws IOException {
    this.reversed = reversed;

    /* Morph key into actual key based on the partition type. */
    DecoratedKey decoratedKey = ssTable.getPartitioner().decorateKey(key);
    FileDataInput fdi =
        ssTable.getFileDataInput(
            decoratedKey, DatabaseDescriptor.getSlicedReadBufferSizeInKB() * 1024);
    this.comparator = ssTable.getColumnComparator();
    this.startColumn = startColumn;
    this.finishColumn = finishColumn;
    if (fdi != null) {
      // BIGDATA: move up here
      DecoratedKey keyInDisk = ssTable.getPartitioner().convertFromDiskFormat(fdi.readUTF());
      assert keyInDisk.equals(decoratedKey)
          : String.format("%s != %s in %s", keyInDisk, decoratedKey, fdi.getPath());

      dataSize = fdi.readInt(); // row data size
      dataStart = fdi.getAbsolutePosition();
      rowFormat = ColumnFamily.serializer().deserializeRowFormat(fdi);
      realDataStart = fdi.getAbsolutePosition();

      // !BIGDATA: branch out to the different code process for new rowformat/compression.
      if (ColumnFamily.serializer().isNewRowFormat(rowFormat)) {
        // new row format
        reader = new BigdataColumnGroupReader(ssTable, decoratedKey, fdi);
      } else {
        // old row format
        reader = new ColumnGroupReader(ssTable, decoratedKey, fdi);
      }
    }
  }
 /**
  * ************************************************************************ PAY ATTENTION: This
  * method is internally used for message. This is also the old format in SSTable. [1] Row-Mutation
  * (RowMutationSerializer) [2] Query-Result (RowSerializer)
  * ************************************************************************
  */
 public ColumnFamily deserializeFromSSTable(SSTableReader sstable, DataInput file)
     throws IOException {
   ColumnFamily cf = sstable.makeColumnFamily();
   deserializeFromSSTableNoColumns(cf, file);
   deserializeColumns(file, cf);
   return cf;
 }
    public ColumnGroupReader(SSTableReader ssTable, DecoratedKey key, FileDataInput input)
        throws IOException {
      this.file = input;
      this.ssTable = ssTable;
      assert file.getAbsolutePosition() == realDataStart;

      // BIGDATA: some code move up.

      IndexHelper.skipBloomFilter(file);
      indexes = IndexHelper.deserializeIndex(file);

      emptyColumnFamily =
          ColumnFamily.serializer()
              .deserializeFromSSTableNoColumns(ssTable.makeColumnFamily(), file);
      file.readInt(); // column count

      file.mark();
      curRangeIndex = IndexHelper.indexFor(startColumn, indexes, comparator, reversed);
      if (reversed && curRangeIndex == indexes.size()) curRangeIndex--;
    }
    public BigdataColumnGroupReader(SSTableReader ssTable, DecoratedKey key, FileDataInput input)
        throws IOException {
      this.file = input;
      this.ssTable = ssTable;
      assert file.getAbsolutePosition() == realDataStart;

      if (ColumnFamily.serializer().isNewRowFormatIndexAtEnd(rowFormat)) {
        ////// HEADER //////

        // skip bloom filter
        IndexHelper.skipBloomFilter(file);

        // read deletion meta info
        emptyColumnFamily =
            ColumnFamily.serializer()
                .deserializeFromSSTableNoColumns(ssTable.makeColumnFamily(), file);
        file.readInt(); // column count

        // the position of the first block
        firstBlockPos = file.getAbsolutePosition();

        ////// TRAILER //////

        // seek to the trailer
        // THE FIRST SEEK!!!
        file.seek(dataStart + dataSize - (Integer.SIZE / Byte.SIZE));

        // index size (with column index size's int)
        int indexSize = file.readInt();

        ////// INDEX //////

        // seek to index position
        // THE SECOND SEEK!!!
        file.seek(dataStart + dataSize - (Integer.SIZE / Byte.SIZE) - indexSize);

        // read index into memory
        indexes = IndexHelper.deserializeIndex(file);
      } else {
        // skip bloom filter
        IndexHelper.skipBloomFilter(file);

        // read in index
        indexes = IndexHelper.deserializeIndex(file);

        // read deletion meta info
        emptyColumnFamily =
            ColumnFamily.serializer()
                .deserializeFromSSTableNoColumns(ssTable.makeColumnFamily(), file);
        file.readInt(); // column count

        // the position of the first block
        firstBlockPos = file.getAbsolutePosition();
      }

      curRangeIndex = IndexHelper.indexFor(startColumn, indexes, comparator, reversed);
      if (reversed && curRangeIndex == indexes.size()) curRangeIndex--;

      // compression algorithm used when writing
      Compression.Algorithm compressAlgo;
      try {
        compressAlgo =
            Compression.getCompressionAlgorithmById(
                ColumnFamily.serializer().getNewRowFormatCompressAlgo(rowFormat));
      } catch (IllegalArgumentException e) {
        logger.error(e.toString());
        throw new IOException(e);
      }
      compressContext = ColumnFamilySerializer.CompressionContext.getInstance(compressAlgo);
    }