Ejemplo n.º 1
0
  /**
   * static boolean hasPendingDeletes(SegmentInfos infos) { for (SegmentInfo info : infos) { if
   * (info.deletes.any()) { return true; } } return false; }
   */
  void part2(IndexWriter writer, RangeMergePolicy fsmp) throws Exception {
    for (int x = 20; x < 25; x++) {
      writer.addDocument(DocHelper.createDocument(x, "5", 2));
      // System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
    }
    writer.flush(false, false);
    for (int x = 25; x < 30; x++) {
      writer.addDocument(DocHelper.createDocument(x, "5", 2));
      // System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
    }
    writer.flush(false, false);

    // System.out.println("infos3:"+writer.segmentInfos);

    Term delterm = new Term("id", "8");
    writer.deleteDocuments(delterm);
    // System.out.println("segdels3:" + writer.docWriter.deletesToString());

    fsmp.doMerge = true;
    fsmp.start = 1;
    fsmp.length = 2;
    writer.maybeMerge();

    // deletes for info1, the newly created segment from the
    // merge should have no deletes because they were applied in
    // the merge
    // SegmentInfo info1 = writer.segmentInfos.info(1);
    // assertFalse(exists(info1, writer.docWriter.segmentDeletes));

    // System.out.println("infos4:"+writer.segmentInfos);
    // System.out.println("segdels4:" + writer.docWriter.deletesToString());
  }
Ejemplo n.º 2
0
 public void testIndexDivisor() throws IOException {
   dir = new MockRAMDirectory();
   testDoc = new Document();
   DocHelper.setupDoc(testDoc);
   DocHelper.writeDoc(dir, testDoc);
   testTermDocs(2);
   testBadSeek(2);
   testSkipTo(2);
 }
Ejemplo n.º 3
0
 @Override
 public void setUp() throws Exception {
   super.setUp();
   mergedDir = newDirectory();
   merge1Dir = newDirectory();
   merge2Dir = newDirectory();
   DocHelper.setupDoc(doc1);
   SegmentCommitInfo info1 = DocHelper.writeDoc(random(), merge1Dir, doc1);
   DocHelper.setupDoc(doc2);
   SegmentCommitInfo info2 = DocHelper.writeDoc(random(), merge2Dir, doc2);
   reader1 = new SegmentReader(info1, newIOContext(random()));
   reader2 = new SegmentReader(info2, newIOContext(random()));
 }
Ejemplo n.º 4
0
 public void testIndexDivisorAfterLoad() throws IOException {
   dir = new MockRAMDirectory();
   testDoc = new Document();
   DocHelper.setupDoc(testDoc);
   SegmentInfo si = DocHelper.writeDoc(dir, testDoc);
   SegmentReader reader = SegmentReader.get(si);
   assertEquals(1, reader.docFreq(new Term("keyField", "Keyword")));
   try {
     reader.setTermInfosIndexDivisor(2);
     fail("did not hit IllegalStateException exception");
   } catch (IllegalStateException ise) {
     // expected
   }
 }
Ejemplo n.º 5
0
 @BeforeClass
 public static void beforeClass() throws Exception {
   fieldInfos = new FieldInfos.Builder();
   DocHelper.setupDoc(testDoc);
   for (IndexableField field : testDoc) {
     fieldInfos.addOrUpdate(field.name(), field.fieldType());
   }
   dir = newDirectory();
   IndexWriterConfig conf =
       newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
           .setMergePolicy(newLogMergePolicy());
   ((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(false);
   IndexWriter writer = new IndexWriter(dir, conf);
   writer.addDocument(testDoc);
   writer.close();
   FaultyIndexInput.doFail = false;
 }
Ejemplo n.º 6
0
 @BeforeClass
 public static void beforeClass() throws Exception {
   testDoc = new Document();
   fieldInfos = new FieldInfos.Builder();
   DocHelper.setupDoc(testDoc);
   for (IndexableField field : testDoc.getFields()) {
     FieldInfo fieldInfo = fieldInfos.getOrAdd(field.name());
     IndexableFieldType ift = field.fieldType();
     fieldInfo.setIndexOptions(ift.indexOptions());
     if (ift.omitNorms()) {
       fieldInfo.setOmitsNorms();
     }
     fieldInfo.setDocValuesType(ift.docValuesType());
   }
   dir = newDirectory();
   IndexWriterConfig conf =
       newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy());
   conf.getMergePolicy().setNoCFSRatio(0.0);
   IndexWriter writer = new IndexWriter(dir, conf);
   writer.addDocument(testDoc);
   writer.close();
 }
Ejemplo n.º 7
0
 protected void setUp() throws Exception {
   super.setUp();
   DocHelper.setupDoc(testDoc);
   info = DocHelper.writeDoc(dir, testDoc);
 }
Ejemplo n.º 8
0
  public void testDeletes1() throws Exception {
    // IndexWriter.debug2 = System.out;
    Directory dir = new MockDirectoryWrapper(new Random(random().nextLong()), new RAMDirectory());
    IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
    iwc.setMergeScheduler(new SerialMergeScheduler());
    iwc.setMaxBufferedDocs(5000);
    iwc.setRAMBufferSizeMB(100);
    RangeMergePolicy fsmp = new RangeMergePolicy(false);
    iwc.setMergePolicy(fsmp);
    IndexWriter writer = new IndexWriter(dir, iwc);
    for (int x = 0; x < 5; x++) {
      writer.addDocument(DocHelper.createDocument(x, "1", 2));
      // System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
    }
    // System.out.println("commit1");
    writer.commit();
    assertEquals(1, writer.segmentInfos.size());
    for (int x = 5; x < 10; x++) {
      writer.addDocument(DocHelper.createDocument(x, "2", 2));
      // System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
    }
    // System.out.println("commit2");
    writer.commit();
    assertEquals(2, writer.segmentInfos.size());

    for (int x = 10; x < 15; x++) {
      writer.addDocument(DocHelper.createDocument(x, "3", 2));
      // System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
    }

    writer.deleteDocuments(new Term("id", "1"));

    writer.deleteDocuments(new Term("id", "11"));

    // flushing without applying deletes means
    // there will still be deletes in the segment infos
    writer.flush(false, false);
    assertTrue(writer.bufferedUpdatesStream.any());

    // get reader flushes pending deletes
    // so there should not be anymore
    IndexReader r1 = writer.getReader();
    assertFalse(writer.bufferedUpdatesStream.any());
    r1.close();

    // delete id:2 from the first segment
    // merge segments 0 and 1
    // which should apply the delete id:2
    writer.deleteDocuments(new Term("id", "2"));
    writer.flush(false, false);
    fsmp = (RangeMergePolicy) writer.getConfig().getMergePolicy();
    fsmp.doMerge = true;
    fsmp.start = 0;
    fsmp.length = 2;
    writer.maybeMerge();

    assertEquals(2, writer.segmentInfos.size());

    // id:2 shouldn't exist anymore because
    // it's been applied in the merge and now it's gone
    IndexReader r2 = writer.getReader();
    int[] id2docs = toDocsArray(new Term("id", "2"), null, r2);
    assertTrue(id2docs == null);
    r2.close();

    /**
     * // added docs are in the ram buffer for (int x = 15; x < 20; x++) {
     * writer.addDocument(TestIndexWriterReader.createDocument(x, "4", 2));
     * System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs()); }
     * assertTrue(writer.numRamDocs() > 0); // delete from the ram buffer writer.deleteDocuments(new
     * Term("id", Integer.toString(13)));
     *
     * <p>Term id3 = new Term("id", Integer.toString(3));
     *
     * <p>// delete from the 1st segment writer.deleteDocuments(id3);
     *
     * <p>assertTrue(writer.numRamDocs() > 0);
     *
     * <p>//System.out // .println("segdels1:" + writer.docWriter.deletesToString());
     *
     * <p>//assertTrue(writer.docWriter.segmentDeletes.size() > 0);
     *
     * <p>// we cause a merge to happen fsmp.doMerge = true; fsmp.start = 0; fsmp.length = 2;
     * System.out.println("maybeMerge "+writer.segmentInfos);
     *
     * <p>SegmentInfo info0 = writer.segmentInfos.info(0); SegmentInfo info1 =
     * writer.segmentInfos.info(1);
     *
     * <p>writer.maybeMerge(); System.out.println("maybeMerge after "+writer.segmentInfos); // there
     * should be docs in RAM assertTrue(writer.numRamDocs() > 0);
     *
     * <p>// assert we've merged the 1 and 2 segments // and still have a segment leftover == 2
     * assertEquals(2, writer.segmentInfos.size()); assertFalse(segThere(info0,
     * writer.segmentInfos)); assertFalse(segThere(info1, writer.segmentInfos));
     *
     * <p>//System.out.println("segdels2:" + writer.docWriter.deletesToString());
     *
     * <p>//assertTrue(writer.docWriter.segmentDeletes.size() > 0);
     *
     * <p>IndexReader r = writer.getReader(); IndexReader r1 = r.getSequentialSubReaders()[0];
     * printDelDocs(r1.getLiveDocs()); int[] docs = toDocsArray(id3, null, r);
     * System.out.println("id3 docs:"+Arrays.toString(docs)); // there shouldn't be any docs for
     * id:3 assertTrue(docs == null); r.close();
     *
     * <p>part2(writer, fsmp);
     */
    // System.out.println("segdels2:"+writer.docWriter.segmentDeletes.toString());
    // System.out.println("close");
    writer.close();
    dir.close();
  }
Ejemplo n.º 9
0
  public void testMerge() throws IOException {
    final Codec codec = Codec.getDefault();
    final SegmentInfo si =
        new SegmentInfo(
            mergedDir,
            Version.LATEST,
            mergedSegment,
            -1,
            false,
            codec,
            Collections.emptyMap(),
            StringHelper.randomId(),
            new HashMap<>());

    SegmentMerger merger =
        new SegmentMerger(
            Arrays.<CodecReader>asList(reader1, reader2),
            si,
            InfoStream.getDefault(),
            mergedDir,
            new FieldInfos.FieldNumbers(),
            newIOContext(random(), new IOContext(new MergeInfo(-1, -1, false, -1))));
    MergeState mergeState = merger.merge();
    int docsMerged = mergeState.segmentInfo.maxDoc();
    assertTrue(docsMerged == 2);
    // Should be able to open a new SegmentReader against the new directory
    SegmentReader mergedReader =
        new SegmentReader(
            new SegmentCommitInfo(mergeState.segmentInfo, 0, -1L, -1L, -1L),
            newIOContext(random()));
    assertTrue(mergedReader != null);
    assertTrue(mergedReader.numDocs() == 2);
    Document newDoc1 = mergedReader.document(0);
    assertTrue(newDoc1 != null);
    // There are 2 unstored fields on the document
    assertTrue(
        DocHelper.numFields(newDoc1) == DocHelper.numFields(doc1) - DocHelper.unstored.size());
    Document newDoc2 = mergedReader.document(1);
    assertTrue(newDoc2 != null);
    assertTrue(
        DocHelper.numFields(newDoc2) == DocHelper.numFields(doc2) - DocHelper.unstored.size());

    PostingsEnum termDocs =
        TestUtil.docs(
            random(), mergedReader, DocHelper.TEXT_FIELD_2_KEY, new BytesRef("field"), null, 0);
    assertTrue(termDocs != null);
    assertTrue(termDocs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);

    int tvCount = 0;
    for (FieldInfo fieldInfo : mergedReader.getFieldInfos()) {
      if (fieldInfo.hasVectors()) {
        tvCount++;
      }
    }

    // System.out.println("stored size: " + stored.size());
    assertEquals("We do not have 3 fields that were indexed with term vector", 3, tvCount);

    Terms vector = mergedReader.getTermVectors(0).terms(DocHelper.TEXT_FIELD_2_KEY);
    assertNotNull(vector);
    assertEquals(3, vector.size());
    TermsEnum termsEnum = vector.iterator();

    int i = 0;
    while (termsEnum.next() != null) {
      String term = termsEnum.term().utf8ToString();
      int freq = (int) termsEnum.totalTermFreq();
      // System.out.println("Term: " + term + " Freq: " + freq);
      assertTrue(DocHelper.FIELD_2_TEXT.indexOf(term) != -1);
      assertTrue(DocHelper.FIELD_2_FREQS[i] == freq);
      i++;
    }

    TestSegmentReader.checkNorms(mergedReader);
    mergedReader.close();
  }