/**
   * If tfs, fields frequencies and blocks are not required, skip that part of the chunk
   *
   * @throws IOException
   */
  protected final void skip() throws IOException {

    tfsCodec.skip(input);

    if (fieldsCount > 0) {
      for (int j = 0; j < fieldsCount; j++) {
        fieldsCodec.skip(input);
      }
    }

    if (hasBlocks > 0) {
      blocksCodec.skip(input);
    }
  }
  /**
   * This load a chunk (just the document ids!) and decompress it
   *
   * @throws IOException
   */
  protected final void load() throws IOException {

    chunkSize = (numberOfEntries > chunkSize) ? chunkSize : numberOfEntries;

    idsCodec.decompress(input, ids, chunkSize);
    Delta.inverseDelta(ids, chunkSize);

    currentPosting = -1;
    blkCnt = 0;

    decompressed = false;
  }
  /**
   * This load the a chunk (tfs, fields and blocks (optionally)) and decompress it
   *
   * @throws IOException
   */
  protected final void decompress() throws IOException {

    tfsCodec.decompress(input, tfs, chunkSize);

    if (fieldsCount > 0) {
      for (int j = 0; j < fieldsCount; j++) {
        fieldsCodec.decompress(input, fieldsMatrix[j], chunkSize);
      }
    }

    if (hasBlocks > 0) {
      //			if (hasBlocks > 1) {
      //				tfsCodec.decompress(input, bfs, chunkSize);
      //			}
      tfsCodec.decompress(input, bfs, chunkSize);

      int numBlocks = 0;
      for (int i = 0; i < chunkSize; i++) numBlocks += bfs[i];
      blocksMatrix = ArrayUtils.growOrCreate(blocksMatrix, numBlocks);
      blocksCodec.decompress(input, blocksMatrix, numBlocks);
    }

    decompressed = true;
  }