public boolean getNextBlock() throws IOException {
      if (curRangeIndex < 0 || curRangeIndex >= indexes.size()) return false;

      /* seek to the correct offset to the data, and calculate the data size */
      IndexHelper.IndexInfo curColPosition = indexes.get(curRangeIndex);

      /* see if this read is really necessary. */
      if (reversed) {
        if ((finishColumn.length > 0
                && comparator.compare(finishColumn, curColPosition.lastName) > 0)
            || (startColumn.length > 0
                && comparator.compare(startColumn, curColPosition.firstName) < 0)) return false;
      } else {
        if ((startColumn.length > 0 && comparator.compare(startColumn, curColPosition.lastName) > 0)
            || (finishColumn.length > 0
                && comparator.compare(finishColumn, curColPosition.firstName) < 0)) return false;
      }

      boolean outOfBounds = false;

      // seek to current block
      // curIndexInfo.offset is the relative offset from the first block
      file.seek(firstBlockPos + curColPosition.offset);

      // read all columns of current block into memory!
      DataInputStream blockIn =
          ColumnFamily.serializer()
              .getBlockInputStream(file, curColPosition.sizeOnDisk, compressContext);
      try {
        int size = 0;
        while ((size < curColPosition.width) && !outOfBounds) {
          IColumn column = emptyColumnFamily.getColumnSerializer().deserialize(blockIn);
          size += column.serializedSize();
          if (reversed) blockColumns.addFirst(column);
          else blockColumns.addLast(column);

          /* see if we can stop seeking. */
          if (!reversed && finishColumn.length > 0)
            outOfBounds = comparator.compare(column.name(), finishColumn) >= 0;
          else if (reversed && startColumn.length > 0)
            outOfBounds = comparator.compare(column.name(), startColumn) >= 0;

          if (outOfBounds) break;
        }
      } catch (IOException e) {
        logger.error(e.toString());
        throw e;
      } finally {
        ColumnFamily.serializer().releaseBlockInputStream(blockIn, compressContext);
      }

      if (reversed) curRangeIndex--;
      else curRangeIndex++;
      return true;
    }
    public ColumnGroupReader(SSTableReader ssTable, DecoratedKey key, FileDataInput input)
        throws IOException {
      this.file = input;
      this.ssTable = ssTable;
      assert file.getAbsolutePosition() == realDataStart;

      // BIGDATA: some code move up.

      IndexHelper.skipBloomFilter(file);
      indexes = IndexHelper.deserializeIndex(file);

      emptyColumnFamily =
          ColumnFamily.serializer()
              .deserializeFromSSTableNoColumns(ssTable.makeColumnFamily(), file);
      file.readInt(); // column count

      file.mark();
      curRangeIndex = IndexHelper.indexFor(startColumn, indexes, comparator, reversed);
      if (reversed && curRangeIndex == indexes.size()) curRangeIndex--;
    }
    public boolean getNextBlock() throws IOException {
      if (curRangeIndex < 0 || curRangeIndex >= indexes.size()) return false;

      /* seek to the correct offset to the data, and calculate the data size */
      IndexHelper.IndexInfo curColPosition = indexes.get(curRangeIndex);

      /* see if this read is really necessary. */
      if (reversed) {
        if ((finishColumn.length > 0
                && comparator.compare(finishColumn, curColPosition.lastName) > 0)
            || (startColumn.length > 0
                && comparator.compare(startColumn, curColPosition.firstName) < 0)) return false;
      } else {
        if ((startColumn.length > 0 && comparator.compare(startColumn, curColPosition.lastName) > 0)
            || (finishColumn.length > 0
                && comparator.compare(finishColumn, curColPosition.firstName) < 0)) return false;
      }

      boolean outOfBounds = false;

      file.reset();
      long curOffset = file.skipBytes((int) curColPosition.offset);
      assert curOffset == curColPosition.offset;
      while (file.bytesPastMark() < curColPosition.offset + curColPosition.width && !outOfBounds) {
        IColumn column = emptyColumnFamily.getColumnSerializer().deserialize(file);
        if (reversed) blockColumns.addFirst(column);
        else blockColumns.addLast(column);

        /* see if we can stop seeking. */
        if (!reversed && finishColumn.length > 0)
          outOfBounds = comparator.compare(column.name(), finishColumn) >= 0;
        else if (reversed && startColumn.length > 0)
          outOfBounds = comparator.compare(column.name(), startColumn) >= 0;

        if (outOfBounds) break;
      }

      if (reversed) curRangeIndex--;
      else curRangeIndex++;
      return true;
    }
  public SSTableSliceIterator(
      SSTableReader ssTable, String key, byte[] startColumn, byte[] finishColumn, boolean reversed)
      throws IOException {
    this.reversed = reversed;

    /* Morph key into actual key based on the partition type. */
    DecoratedKey decoratedKey = ssTable.getPartitioner().decorateKey(key);
    FileDataInput fdi =
        ssTable.getFileDataInput(
            decoratedKey, DatabaseDescriptor.getSlicedReadBufferSizeInKB() * 1024);
    this.comparator = ssTable.getColumnComparator();
    this.startColumn = startColumn;
    this.finishColumn = finishColumn;
    if (fdi != null) {
      // BIGDATA: move up here
      DecoratedKey keyInDisk = ssTable.getPartitioner().convertFromDiskFormat(fdi.readUTF());
      assert keyInDisk.equals(decoratedKey)
          : String.format("%s != %s in %s", keyInDisk, decoratedKey, fdi.getPath());

      dataSize = fdi.readInt(); // row data size
      dataStart = fdi.getAbsolutePosition();
      rowFormat = ColumnFamily.serializer().deserializeRowFormat(fdi);
      realDataStart = fdi.getAbsolutePosition();

      // !BIGDATA: branch out to the different code process for new rowformat/compression.
      if (ColumnFamily.serializer().isNewRowFormat(rowFormat)) {
        // new row format
        reader = new BigdataColumnGroupReader(ssTable, decoratedKey, fdi);
      } else {
        // old row format
        reader = new ColumnGroupReader(ssTable, decoratedKey, fdi);
      }
    }
  }
 public void close() throws IOException {
   file.close();
 }
    public BigdataColumnGroupReader(SSTableReader ssTable, DecoratedKey key, FileDataInput input)
        throws IOException {
      this.file = input;
      this.ssTable = ssTable;
      assert file.getAbsolutePosition() == realDataStart;

      if (ColumnFamily.serializer().isNewRowFormatIndexAtEnd(rowFormat)) {
        ////// HEADER //////

        // skip bloom filter
        IndexHelper.skipBloomFilter(file);

        // read deletion meta info
        emptyColumnFamily =
            ColumnFamily.serializer()
                .deserializeFromSSTableNoColumns(ssTable.makeColumnFamily(), file);
        file.readInt(); // column count

        // the position of the first block
        firstBlockPos = file.getAbsolutePosition();

        ////// TRAILER //////

        // seek to the trailer
        // THE FIRST SEEK!!!
        file.seek(dataStart + dataSize - (Integer.SIZE / Byte.SIZE));

        // index size (with column index size's int)
        int indexSize = file.readInt();

        ////// INDEX //////

        // seek to index position
        // THE SECOND SEEK!!!
        file.seek(dataStart + dataSize - (Integer.SIZE / Byte.SIZE) - indexSize);

        // read index into memory
        indexes = IndexHelper.deserializeIndex(file);
      } else {
        // skip bloom filter
        IndexHelper.skipBloomFilter(file);

        // read in index
        indexes = IndexHelper.deserializeIndex(file);

        // read deletion meta info
        emptyColumnFamily =
            ColumnFamily.serializer()
                .deserializeFromSSTableNoColumns(ssTable.makeColumnFamily(), file);
        file.readInt(); // column count

        // the position of the first block
        firstBlockPos = file.getAbsolutePosition();
      }

      curRangeIndex = IndexHelper.indexFor(startColumn, indexes, comparator, reversed);
      if (reversed && curRangeIndex == indexes.size()) curRangeIndex--;

      // compression algorithm used when writing
      Compression.Algorithm compressAlgo;
      try {
        compressAlgo =
            Compression.getCompressionAlgorithmById(
                ColumnFamily.serializer().getNewRowFormatCompressAlgo(rowFormat));
      } catch (IllegalArgumentException e) {
        logger.error(e.toString());
        throw new IOException(e);
      }
      compressContext = ColumnFamilySerializer.CompressionContext.getInstance(compressAlgo);
    }