/**
  * Advances to the next document matching the query. <br>
  * The iterator over the matching documents is buffered using {@link TermDocs#read(int[],int[])}.
  *
  * @return true iff there is another document matching the query.
  */
 public boolean next() throws IOException {
   pointer++;
   if (pointer >= pointerMax) {
     pointerMax = termDocs.read(docs, freqs); // refill buffer
     if (pointerMax != 0) {
       pointer = 0;
     } else {
       termDocs.close(); // close stream
       doc = Integer.MAX_VALUE; // set to sentinel value
       return false;
     }
   }
   doc = docs[pointer];
   return true;
 }
  public boolean AddTFcollection() throws IOException {
    HashMap CTF = new HashMap();

    termEum = reader.terms();
    while (termEum.next()) {

      term = termEum.term();

      termDocs = reader.termDocs(term);

      if (termDocs == null) {
        return false;
      }

      next();

      tf = 0.0d;
      while (doc < Integer.MAX_VALUE) { // for docs in window
        int f = freqs[pointer];

        tf += f * 1.0d;

        if (++pointer >= pointerMax) {
          pointerMax = termDocs.read(docs, freqs); // refill buffers
          if (pointerMax != 0) {
            pointer = 0;
          } else {
            termDocs.close(); // close stream
            doc = Integer.MAX_VALUE; // set to sentinel value
            break;
          }
        }
        doc = docs[pointer];
      }

      // System.out.println(term.text()+"  "+ tf);
      CTF.put(term.text(), tf);
    }
    termEum.close();
    FileOutputStream result = new FileOutputStream(new File(indexDir + "ExtendedCTF"));

    ByteArrayOutputStream bo = new ByteArrayOutputStream();
    ObjectOutputStream oo = new ObjectOutputStream(bo);
    oo.writeObject(CTF);
    result.write(bo.toByteArray());
    result.close();
    return true;
  }