public void testDocsAndPositionsEnumStart() throws Exception { Analyzer analyzer = new MockAnalyzer(random()); int numIters = atLeast(3); MemoryIndex memory = new MemoryIndex(true, false, random().nextInt(50) * 1024 * 1024); for (int i = 0; i < numIters; i++) { // check reuse memory.addField("foo", "bar", analyzer); LeafReader reader = (LeafReader) memory.createSearcher().getIndexReader(); TestUtil.checkReader(reader); assertEquals(1, reader.terms("foo").getSumTotalTermFreq()); PostingsEnum disi = reader.postings(new Term("foo", "bar"), PostingsEnum.ALL); int docid = disi.docID(); assertEquals(-1, docid); assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS); assertEquals(0, disi.nextPosition()); assertEquals(0, disi.startOffset()); assertEquals(3, disi.endOffset()); // now reuse and check again TermsEnum te = reader.terms("foo").iterator(); assertTrue(te.seekExact(new BytesRef("bar"))); disi = te.postings(disi); docid = disi.docID(); assertEquals(-1, docid); assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS); reader.close(); memory.reset(); } }
public void testRefCounts2() throws IOException { Directory dir1 = getDir1(random()); Directory dir2 = getDir2(random()); LeafReader ir1 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir1)); LeafReader ir2 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir2)); // don't close subreaders, so ParallelReader will increment refcounts ParallelLeafReader pr = new ParallelLeafReader(false, ir1, ir2); // check RefCounts assertEquals(2, ir1.getRefCount()); assertEquals(2, ir2.getRefCount()); pr.close(); assertEquals(1, ir1.getRefCount()); assertEquals(1, ir2.getRefCount()); ir1.close(); ir2.close(); assertEquals(0, ir1.getRefCount()); assertEquals(0, ir2.getRefCount()); dir1.close(); dir2.close(); }
public void testChangeGaps() throws Exception { // LUCENE-5324: check that it is possible to change the wrapper's gaps final int positionGap = random().nextInt(1000); final int offsetGap = random().nextInt(1000); final Analyzer delegate = new MockAnalyzer(random()); final Analyzer a = new DelegatingAnalyzerWrapper(delegate.getReuseStrategy()) { @Override protected Analyzer getWrappedAnalyzer(String fieldName) { return delegate; } @Override public int getPositionIncrementGap(String fieldName) { return positionGap; } @Override public int getOffsetGap(String fieldName) { return offsetGap; } }; final RandomIndexWriter writer = new RandomIndexWriter(random(), newDirectory(), a); final Document doc = new Document(); final FieldType ft = new FieldType(); ft.setIndexOptions(IndexOptions.DOCS); ft.setTokenized(true); ft.setStoreTermVectors(true); ft.setStoreTermVectorPositions(true); ft.setStoreTermVectorOffsets(true); doc.add(new Field("f", "a", ft)); doc.add(new Field("f", "a", ft)); writer.addDocument(doc); final LeafReader reader = getOnlySegmentReader(writer.getReader()); final Fields fields = reader.getTermVectors(0); final Terms terms = fields.terms("f"); final TermsEnum te = terms.iterator(); assertEquals(new BytesRef("a"), te.next()); final PostingsEnum dpe = te.postings(null, PostingsEnum.ALL); assertEquals(0, dpe.nextDoc()); assertEquals(2, dpe.freq()); assertEquals(0, dpe.nextPosition()); assertEquals(0, dpe.startOffset()); final int endOffset = dpe.endOffset(); assertEquals(1 + positionGap, dpe.nextPosition()); assertEquals(1 + endOffset + offsetGap, dpe.endOffset()); assertEquals(null, te.next()); reader.close(); writer.close(); writer.w.getDirectory().close(); }
public void testIncompatibleIndexes() throws IOException { // two documents: Directory dir1 = getDir1(random()); // one document only: Directory dir2 = newDirectory(); IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig(new MockAnalyzer(random()))); Document d3 = new Document(); d3.add(newTextField("f3", "v1", Field.Store.YES)); w2.addDocument(d3); w2.close(); LeafReader ir1 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir1)); LeafReader ir2 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir2)); try { new ParallelLeafReader(ir1, ir2); fail("didn't get exptected exception: indexes don't have same number of documents"); } catch (IllegalArgumentException e) { // expected exception } try { new ParallelLeafReader( random().nextBoolean(), new LeafReader[] {ir1, ir2}, new LeafReader[] {ir1, ir2}); fail("didn't get expected exception: indexes don't have same number of documents"); } catch (IllegalArgumentException e) { // expected exception } // check RefCounts assertEquals(1, ir1.getRefCount()); assertEquals(1, ir2.getRefCount()); ir1.close(); ir2.close(); dir1.close(); dir2.close(); }
public void testCloseInnerReader() throws Exception { Directory dir1 = getDir1(random()); LeafReader ir1 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir1)); // with overlapping ParallelLeafReader pr = new ParallelLeafReader(true, new LeafReader[] {ir1}, new LeafReader[] {ir1}); ir1.close(); try { pr.document(0); fail("ParallelLeafReader should be already closed because inner reader was closed!"); } catch (AlreadyClosedException e) { // pass } // noop: pr.close(); dir1.close(); }
public void testDocsEnumStart() throws Exception { Analyzer analyzer = new MockAnalyzer(random()); MemoryIndex memory = new MemoryIndex(random().nextBoolean(), false, random().nextInt(50) * 1024 * 1024); memory.addField("foo", "bar", analyzer); LeafReader reader = (LeafReader) memory.createSearcher().getIndexReader(); TestUtil.checkReader(reader); PostingsEnum disi = TestUtil.docs(random(), reader, "foo", new BytesRef("bar"), null, PostingsEnum.NONE); int docid = disi.docID(); assertEquals(-1, docid); assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS); // now reuse and check again TermsEnum te = reader.terms("foo").iterator(); assertTrue(te.seekExact(new BytesRef("bar"))); disi = te.postings(disi, PostingsEnum.NONE); docid = disi.docID(); assertEquals(-1, docid); assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS); reader.close(); }
@Override public void tearDown() throws Exception { reader.close(); directory.close(); super.tearDown(); }