@BeforeClass public static void beforeClass() throws Exception { directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter( random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMergePolicy(newLogMergePolicy())); for (int i = 0; i < docFields.length; i++) { Document doc = new Document(); doc.add(newField(field, docFields[i], Field.Store.NO, Field.Index.ANALYZED)); writer.addDocument(doc); } writer.close(); littleReader = IndexReader.open(directory); searcher = new IndexSearcher(littleReader); // Make big index dir2 = new MockDirectoryWrapper(random, new RAMDirectory(directory)); // First multiply small test index: mulFactor = 1; int docCount = 0; do { final Directory copy = new MockDirectoryWrapper(random, new RAMDirectory(dir2)); RandomIndexWriter w = new RandomIndexWriter(random, dir2); w.addIndexes(copy); docCount = w.maxDoc(); w.close(); mulFactor *= 2; } while (docCount < 3000); RandomIndexWriter w = new RandomIndexWriter( random, dir2, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(_TestUtil.nextInt(random, 50, 1000))); Document doc = new Document(); doc.add(newField("field2", "xxx", Field.Store.NO, Field.Index.ANALYZED)); for (int i = 0; i < NUM_EXTRA_DOCS / 2; i++) { w.addDocument(doc); } doc = new Document(); doc.add(newField("field2", "big bad bug", Field.Store.NO, Field.Index.ANALYZED)); for (int i = 0; i < NUM_EXTRA_DOCS / 2; i++) { w.addDocument(doc); } reader = w.getReader(); bigSearcher = newSearcher(reader); w.close(); }
public void testSeekCeilNotFound() throws Exception { Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir); Document doc = new Document(); // Get empty string in there! doc.add(newStringField("field", "", Field.Store.NO)); w.addDocument(doc); for (int i = 0; i < 36; i++) { doc = new Document(); String term = "" + (char) (97 + i); String term2 = "a" + (char) (97 + i); doc.add(newTextField("field", term + " " + term2, Field.Store.NO)); w.addDocument(doc); } w.forceMerge(1); IndexReader r = w.getReader(); TermsEnum te = MultiFields.getTerms(r, "field").iterator(null); assertEquals(TermsEnum.SeekStatus.NOT_FOUND, te.seekCeil(new BytesRef(new byte[] {0x22}))); assertEquals("a", te.term().utf8ToString()); assertEquals(1L, te.ord()); r.close(); w.close(); dir.close(); }
public void testMoreThan32ProhibitedClauses() throws Exception { final Directory d = newDirectory(); final RandomIndexWriter w = new RandomIndexWriter(random(), d); Document doc = new Document(); doc.add( new TextField( "field", "0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33", Field.Store.NO)); w.addDocument(doc); doc = new Document(); doc.add(new TextField("field", "33", Field.Store.NO)); w.addDocument(doc); final IndexReader r = w.getReader(); w.close(); final IndexSearcher s = newSearcher(r); final BooleanQuery q = new BooleanQuery(); for (int term = 0; term < 33; term++) { q.add( new BooleanClause( new TermQuery(new Term("field", "" + term)), BooleanClause.Occur.MUST_NOT)); } q.add(new BooleanClause(new TermQuery(new Term("field", "33")), BooleanClause.Occur.SHOULD)); final int[] count = new int[1]; s.search( q, new Collector() { private Scorer scorer; @Override public void setScorer(Scorer scorer) { // Make sure we got BooleanScorer: this.scorer = scorer; assertEquals( "Scorer is implemented by wrong class", BooleanScorer.class.getName() + "$BucketScorer", scorer.getClass().getName()); } @Override public void collect(int doc) { count[0]++; } @Override public void setNextReader(AtomicReaderContext context) {} @Override public boolean acceptsDocsOutOfOrder() { return true; } }); assertEquals(1, count[0]); r.close(); d.close(); }
public void testSpanNot() throws Exception { SpanQuery[] clauses = new SpanQuery[2]; clauses[0] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "one")); clauses[1] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "three")); SpanQuery spq = new SpanNearQuery(clauses, 5, true); SpanNotQuery snq = new SpanNotQuery(spq, new SpanTermQuery(new Term(PayloadHelper.FIELD, "two"))); Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter( random(), directory, newIndexWriterConfig(new PayloadAnalyzer()).setSimilarity(similarity)); Document doc = new Document(); doc.add(newTextField(PayloadHelper.FIELD, "one two three one four three", Field.Store.YES)); writer.addDocument(doc); IndexReader reader = writer.getReader(); writer.close(); checkSpans(MultiSpansWrapper.wrap(reader, snq, SpanWeight.Postings.PAYLOADS), 1, new int[] {2}); reader.close(); directory.close(); }
@Test public void testFuzzyQuery() throws Exception { Analyzer analyzer = new MockAnalyzer(random()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field")); Document document = new Document(); document.add(new SuggestField("suggest_field", "suggestion", 2)); document.add(new SuggestField("suggest_field", "suaggestion", 4)); document.add(new SuggestField("suggest_field", "ssuggestion", 1)); iw.addDocument(document); document = new Document(); document.add(new SuggestField("suggest_field", "sugfoo", 1)); iw.addDocument(document); if (rarely()) { iw.commit(); } DirectoryReader reader = iw.getReader(); SuggestIndexSearcher suggestIndexSearcher = new SuggestIndexSearcher(reader); CompletionQuery query = new FuzzyCompletionQuery(analyzer, new Term("suggest_field", "sugg")); TopSuggestDocs suggest = suggestIndexSearcher.suggest(query, 4); assertSuggestions( suggest, new Entry("suaggestion", 4 * 2), new Entry("suggestion", 2 * 3), new Entry("sugfoo", 1 * 3), new Entry("ssuggestion", 1 * 1)); reader.close(); iw.close(); }
public void testCachingWorks() throws Exception { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); writer.close(); IndexReader reader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir)); AtomicReaderContext context = (AtomicReaderContext) reader.getContext(); MockFilter filter = new MockFilter(); CachingWrapperFilter cacher = new CachingWrapperFilter(filter); // first time, nested filter is called DocIdSet strongRef = cacher.getDocIdSet(context, context.reader().getLiveDocs()); assertTrue("first time", filter.wasCalled()); // make sure no exception if cache is holding the wrong docIdSet cacher.getDocIdSet(context, context.reader().getLiveDocs()); // second time, nested filter should not be called filter.clear(); cacher.getDocIdSet(context, context.reader().getLiveDocs()); assertFalse("second time", filter.wasCalled()); reader.close(); dir.close(); }
public void test() throws Exception { BaseDirectoryWrapper d = newDirectory(); d.setCheckIndexOnClose(false); // we nuke files, but verify the reader still works RandomIndexWriter w = new RandomIndexWriter(random(), d); int numDocs = atLeast(100); for (int i = 0; i < numDocs; i++) { Document doc = new Document(); doc.add(newField("foo", "bar", TextField.TYPE_NOT_STORED)); w.addDocument(doc); } IndexReader r = w.getReader(); w.commit(); w.close(); for (String fileName : d.listAll()) { try { d.deleteFile(fileName); // may succeed, e.g. if the file is completely read into RAM. } catch (IOException ioe) { // ignore: this means codec (correctly) is holding // the file open } } for (LeafReaderContext cxt : r.leaves()) { TestUtil.checkReader(cxt.reader()); } r.close(); d.close(); }
public void testDifferentTypedDocValuesField2() throws Exception { Directory d = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), d); Document doc = new Document(); // Index doc values are single-valued so we should not // be able to add same field more than once: Field f = new NumericDocValuesField("field", 17); doc.add(f); doc.add(new SortedDocValuesField("field", new BytesRef("hello"))); try { w.addDocument(doc); fail("didn't hit expected exception"); } catch (IllegalArgumentException iae) { // expected } doc = new Document(); doc.add(f); w.addDocument(doc); w.forceMerge(1); DirectoryReader r = w.getReader(); assertEquals(17, getOnlySegmentReader(r).getNumericDocValues("field").get(0)); r.close(); w.close(); d.close(); }
@Override public void setUp() throws Exception { super.setUp(); numIterations = atLeast(50); dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter( random(), dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.KEYWORD, false)) .setMaxBufferedDocs(_TestUtil.nextInt(random(), 50, 1000))); Document doc = new Document(); Field field = newStringField("field", "", Field.Store.YES); doc.add(field); terms = new TreeSet<BytesRef>(); int num = atLeast(200); for (int i = 0; i < num; i++) { String s = _TestUtil.randomUnicodeString(random()); field.setStringValue(s); terms.add(new BytesRef(s)); writer.addDocument(doc); } termsAutomaton = BasicAutomata.makeStringUnion(terms); reader = writer.getReader(); searcher = newSearcher(reader); writer.close(); }
public void testNumerics() throws Exception { Directory dir = newDirectory(); Document doc = new Document(); Field field = new NumericDocValuesField("numbers", 0); doc.add(field); IndexWriterConfig iwc = newIndexWriterConfig(random(), null); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); int numDocs = TEST_NIGHTLY ? atLeast(500) : atLeast(50); for (int i = 0; i < numDocs; i++) { field.setLongValue(random().nextLong()); iw.addDocument(doc); if (random().nextInt(17) == 0) { iw.commit(); } } DirectoryReader ir = iw.getReader(); iw.forceMerge(1); DirectoryReader ir2 = iw.getReader(); LeafReader merged = getOnlyLeafReader(ir2); iw.close(); NumericDocValues multi = MultiDocValues.getNumericValues(ir, "numbers"); NumericDocValues single = merged.getNumericDocValues("numbers"); for (int i = 0; i < numDocs; i++) { assertEquals(single.get(i), multi.get(i)); } ir.close(); ir2.close(); dir.close(); }
public void testMax() throws Exception { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); Document doc = new Document(); doc.add(new SortedSetDocValuesField("value", new BytesRef("foo"))); doc.add(new SortedSetDocValuesField("value", new BytesRef("bar"))); doc.add(newStringField("id", "1", Field.Store.YES)); writer.addDocument(doc); doc = new Document(); doc.add(new SortedSetDocValuesField("value", new BytesRef("baz"))); doc.add(newStringField("id", "2", Field.Store.YES)); writer.addDocument(doc); IndexReader ir = writer.getReader(); writer.close(); // slow wrapper does not support random access ordinals (there is no need for that!) IndexSearcher searcher = newSearcher(ir, false); Sort sort = new Sort(new SortedSetSortField("value", false, SortedSetSortField.Selector.MAX)); TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort); assertEquals(2, td.totalHits); // 'baz' comes before 'foo' assertEquals("2", searcher.doc(td.scoreDocs[0].doc).get("id")); assertEquals("1", searcher.doc(td.scoreDocs[1].doc).get("id")); assertNoFieldCaches(); ir.close(); dir.close(); }
@Test public void testMultipleSegments() throws Exception { Analyzer analyzer = new MockAnalyzer(random()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field")); int num = Math.min(1000, atLeast(10)); List<Entry> entries = new ArrayList<>(); // ensure at least some segments have no suggest field for (int i = num; i > 0; i--) { Document document = new Document(); if (random().nextInt(4) == 1) { document.add(new SuggestField("suggest_field", "abc_" + i, i)); entries.add(new Entry("abc_" + i, i)); } document.add(new StoredField("weight_fld", i)); iw.addDocument(document); if (usually()) { iw.commit(); } } DirectoryReader reader = iw.getReader(); SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader); PrefixCompletionQuery query = new PrefixCompletionQuery(analyzer, new Term("suggest_field", "abc_")); TopSuggestDocs suggest = indexSearcher.suggest(query, (entries.size() == 0) ? 1 : entries.size()); assertSuggestions(suggest, entries.toArray(new Entry[entries.size()])); reader.close(); iw.close(); }
@Test public void testEarlyTermination() throws Exception { Analyzer analyzer = new MockAnalyzer(random()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field")); int num = Math.min(1000, atLeast(10)); // have segments of 4 documents // with descending suggestion weights // suggest should early terminate for // segments with docs having lower suggestion weights for (int i = num; i > 0; i--) { Document document = new Document(); document.add(new SuggestField("suggest_field", "abc_" + i, i)); iw.addDocument(document); if (i % 4 == 0) { iw.commit(); } } DirectoryReader reader = iw.getReader(); SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader); PrefixCompletionQuery query = new PrefixCompletionQuery(analyzer, new Term("suggest_field", "abc_")); TopSuggestDocs suggest = indexSearcher.suggest(query, 1); assertSuggestions(suggest, new Entry("abc_" + num, num)); reader.close(); iw.close(); }
@Test public void testSuggestOnAllFilteredDocuments() throws Exception { Analyzer analyzer = new MockAnalyzer(random()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field")); int num = Math.min(1000, atLeast(10)); for (int i = 0; i < num; i++) { Document document = new Document(); document.add(new SuggestField("suggest_field", "abc_" + i, i)); document.add(newStringField("str_fld", "deleted", Field.Store.NO)); iw.addDocument(document); if (usually()) { iw.commit(); } } BitsProducer filter = new BitsProducer() { @Override public Bits getBits(LeafReaderContext context) throws IOException { return new Bits.MatchNoBits(context.reader().maxDoc()); } }; DirectoryReader reader = iw.getReader(); SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader); // no random access required; // calling suggest with filter that does not match any documents should early terminate PrefixCompletionQuery query = new PrefixCompletionQuery(analyzer, new Term("suggest_field", "abc_"), filter); TopSuggestDocs suggest = indexSearcher.suggest(query, num); assertThat(suggest.totalHits, equalTo(0)); reader.close(); iw.close(); }
@Test public void testDupSuggestFieldValues() throws Exception { Analyzer analyzer = new MockAnalyzer(random()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field")); final int num = Math.min(1000, atLeast(300)); int[] weights = new int[num]; for (int i = 0; i < num; i++) { Document document = new Document(); weights[i] = Math.abs(random().nextInt()); document.add(new SuggestField("suggest_field", "abc", weights[i])); iw.addDocument(document); if (usually()) { iw.commit(); } } DirectoryReader reader = iw.getReader(); Entry[] expectedEntries = new Entry[num]; Arrays.sort(weights); for (int i = 1; i <= num; i++) { expectedEntries[i - 1] = new Entry("abc", weights[num - i]); } SuggestIndexSearcher suggestIndexSearcher = new SuggestIndexSearcher(reader); PrefixCompletionQuery query = new PrefixCompletionQuery(analyzer, new Term("suggest_field", "abc")); TopSuggestDocs lookupDocs = suggestIndexSearcher.suggest(query, num); assertSuggestions(lookupDocs, expectedEntries); reader.close(); iw.close(); }
@Test public void testAllContextQuery() throws Exception { Analyzer analyzer = new MockAnalyzer(random()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field")); Document document = new Document(); document.add(new ContextSuggestField("suggest_field", "suggestion1", 4, "type1")); document.add(new ContextSuggestField("suggest_field", "suggestion2", 3, "type2")); document.add(new ContextSuggestField("suggest_field", "suggestion3", 2, "type3")); iw.addDocument(document); document = new Document(); document.add(new ContextSuggestField("suggest_field", "suggestion4", 1, "type4")); iw.addDocument(document); if (rarely()) { iw.commit(); } DirectoryReader reader = iw.getReader(); SuggestIndexSearcher suggestIndexSearcher = new SuggestIndexSearcher(reader); ContextQuery query = new ContextQuery(new PrefixCompletionQuery(analyzer, new Term("suggest_field", "sugg"))); TopSuggestDocs suggest = suggestIndexSearcher.suggest(query, 4); assertSuggestions( suggest, new Entry("suggestion1", "type1", 4), new Entry("suggestion2", "type2", 3), new Entry("suggestion3", "type3", 2), new Entry("suggestion4", "type4", 1)); reader.close(); iw.close(); }
public void testNRTAndCommit() throws Exception { Directory dir = newDirectory(); NRTCachingDirectory cachedDir = new NRTCachingDirectory(dir, 2.0, 25.0); MockAnalyzer analyzer = new MockAnalyzer(random()); analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH)); IndexWriterConfig conf = newIndexWriterConfig(analyzer); RandomIndexWriter w = new RandomIndexWriter(random(), cachedDir, conf); final LineFileDocs docs = new LineFileDocs(random(), true); final int numDocs = TestUtil.nextInt(random(), 100, 400); if (VERBOSE) { System.out.println("TEST: numDocs=" + numDocs); } final List<BytesRef> ids = new ArrayList<>(); DirectoryReader r = null; for (int docCount = 0; docCount < numDocs; docCount++) { final Document doc = docs.nextDoc(); ids.add(new BytesRef(doc.get("docid"))); w.addDocument(doc); if (random().nextInt(20) == 17) { if (r == null) { r = DirectoryReader.open(w.w); } else { final DirectoryReader r2 = DirectoryReader.openIfChanged(r); if (r2 != null) { r.close(); r = r2; } } assertEquals(1 + docCount, r.numDocs()); final IndexSearcher s = newSearcher(r); // Just make sure search can run; we can't assert // totHits since it could be 0 TopDocs hits = s.search(new TermQuery(new Term("body", "the")), 10); // System.out.println("tot hits " + hits.totalHits); } } if (r != null) { r.close(); } // Close should force cache to clear since all files are sync'd w.close(); final String[] cachedFiles = cachedDir.listCachedFiles(); for (String file : cachedFiles) { System.out.println("FAIL: cached file " + file + " remains after sync"); } assertEquals(0, cachedFiles.length); r = DirectoryReader.open(dir); for (BytesRef id : ids) { assertEquals(1, r.docFreq(new Term("docid", id))); } r.close(); cachedDir.close(); docs.close(); }
public void testEmptyDocs() throws IOException { Directory dir = newDirectory(); IndexWriterConfig iwConf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); iwConf.setMaxBufferedDocs(RandomInts.randomIntBetween(random(), 2, 30)); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConf); // make sure that the fact that documents might be empty is not a problem final Document emptyDoc = new Document(); final int numDocs = random().nextBoolean() ? 1 : atLeast(1000); for (int i = 0; i < numDocs; ++i) { iw.addDocument(emptyDoc); } iw.commit(); final DirectoryReader rd = DirectoryReader.open(dir); for (int i = 0; i < numDocs; ++i) { final Document doc = rd.document(i); assertNotNull(doc); assertTrue(doc.getFields().isEmpty()); } rd.close(); iw.close(); dir.close(); }
public void testMultiValuedDocValuesField() throws Exception { Directory d = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), d); Document doc = new Document(); Field f = new NumericDocValuesField("field", 17); // Index doc values are single-valued so we should not // be able to add same field more than once: doc.add(f); doc.add(f); try { w.addDocument(doc); fail("didn't hit expected exception"); } catch (IllegalArgumentException iae) { // expected } doc = new Document(); doc.add(f); w.addDocument(doc); w.forceMerge(1); DirectoryReader r = w.getReader(); w.close(); assertEquals(17, FieldCache.DEFAULT.getInts(getOnlySegmentReader(r), "field", false).get(0)); r.close(); d.close(); }
public void testSetAllGroups() throws Exception { Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter( random(), dir, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); Document doc = new Document(); doc.add(newField("group", "foo", StringField.TYPE_NOT_STORED)); doc.add(new SortedDocValuesField("group", new BytesRef("foo"))); w.addDocument(doc); IndexSearcher indexSearcher = newSearcher(w.getReader()); w.close(); GroupingSearch gs = new GroupingSearch("group"); gs.setAllGroups(true); TopGroups<?> groups = gs.search(indexSearcher, new TermQuery(new Term("group", "foo")), 0, 10); assertEquals(1, groups.totalHitCount); // assertEquals(1, groups.totalGroupCount.intValue()); assertEquals(1, groups.totalGroupedHitCount); assertEquals(1, gs.getAllMatchingGroups().size()); indexSearcher.getIndexReader().close(); dir.close(); }
public void testEvilSearcherFactory() throws Exception { final Directory dir = newDirectory(); final RandomIndexWriter w = new RandomIndexWriter(random(), dir); w.commit(); final IndexReader other = DirectoryReader.open(dir); final SearcherFactory theEvilOne = new SearcherFactory() { @Override public IndexSearcher newSearcher(IndexReader ignored) { return LuceneTestCase.newSearcher(other); } }; try { new SearcherManager(w.w, false, theEvilOne); fail("didn't hit expected exception"); } catch (IllegalStateException ise) { // expected } w.close(); other.close(); dir.close(); }
public void testNullDocIdSet() throws Exception { // Tests that if a Filter produces a null DocIdSet, which is given to // IndexSearcher, everything works fine. This came up in LUCENE-1754. Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); Document doc = new Document(); doc.add(newStringField("c", "val", Field.Store.NO)); writer.addDocument(doc); IndexReader reader = writer.getReader(); writer.close(); // First verify the document is searchable. IndexSearcher searcher = newSearcher(reader); Assert.assertEquals(1, searcher.search(new MatchAllDocsQuery(), 10).totalHits); // Now search w/ a Filter which returns a null DocIdSet Filter f = new Filter() { @Override public DocIdSet getDocIdSet(LeafReaderContext context, Bits acceptDocs) { return null; } @Override public String toString(String field) { return "nullDocIdSetFilter"; } }; Assert.assertEquals( 0, searcher.search(new FilteredQuery(new MatchAllDocsQuery(), f), 10).totalHits); reader.close(); dir.close(); }
public void testNullDocIdSetIterator() throws Exception { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); writer.close(); IndexReader reader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir)); AtomicReaderContext context = (AtomicReaderContext) reader.getContext(); final Filter filter = new Filter() { @Override public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) { return new DocIdSet() { @Override public DocIdSetIterator iterator() { return null; } }; } }; CachingWrapperFilter cacher = new CachingWrapperFilter(filter); // the caching filter should return the empty set constant assertNull(cacher.getDocIdSet(context, context.reader().getLiveDocs())); reader.close(); dir.close(); }
public void testLongFieldMinMax() throws Exception { Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir); int numDocs = atLeast(100); long minValue = Long.MAX_VALUE; long maxValue = Long.MIN_VALUE; for (int i = 0; i < numDocs; i++) { Document doc = new Document(); long num = random().nextLong(); minValue = Math.min(num, minValue); maxValue = Math.max(num, maxValue); doc.add(new LegacyLongField("field", num, Field.Store.NO)); w.addDocument(doc); } IndexReader r = w.getReader(); Terms terms = MultiFields.getTerms(r, "field"); assertEquals(new Long(minValue), LegacyNumericUtils.getMinLong(terms)); assertEquals(new Long(maxValue), LegacyNumericUtils.getMaxLong(terms)); r.close(); w.close(); dir.close(); }
public void testDoubleFieldMinMax() throws Exception { Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir); int numDocs = atLeast(100); double minValue = Double.POSITIVE_INFINITY; double maxValue = Double.NEGATIVE_INFINITY; for (int i = 0; i < numDocs; i++) { Document doc = new Document(); double num = random().nextDouble(); minValue = Math.min(num, minValue); maxValue = Math.max(num, maxValue); doc.add(new LegacyDoubleField("field", num, Field.Store.NO)); w.addDocument(doc); } IndexReader r = w.getReader(); Terms terms = MultiFields.getTerms(r, "field"); assertEquals( minValue, NumericUtils.sortableLongToDouble(LegacyNumericUtils.getMinLong(terms)), 0.0); assertEquals( maxValue, NumericUtils.sortableLongToDouble(LegacyNumericUtils.getMaxLong(terms)), 0.0); r.close(); w.close(); dir.close(); }
private float checkPhraseQuery(Document doc, PhraseQuery query, int slop, int expectedNumResults) throws Exception { query.setSlop(slop); Directory ramDir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, ramDir, new MockAnalyzer(MockTokenizer.WHITESPACE, false)); writer.addDocument(doc); IndexReader reader = writer.getReader(); IndexSearcher searcher = newSearcher(reader); TopDocs td = searcher.search(query, null, 10); // System.out.println("slop: "+slop+" query: "+query+" doc: "+doc+" Expecting number of hits: // "+expectedNumResults+" maxScore="+td.getMaxScore()); assertEquals( "slop: " + slop + " query: " + query + " doc: " + doc + " Wrong number of hits", expectedNumResults, td.totalHits); // QueryUtils.check(query,searcher); writer.close(); searcher.close(); reader.close(); ramDir.close(); return td.getMaxScore(); }
private IndexSearcher getSearcher() throws Exception { directory = newDirectory(); String[] docs = new String[] { "xx rr yy mm pp", "xx yy mm rr pp", "nopayload qq ss pp np", "one two three four five six seven eight nine ten eleven", "nine one two three four five six seven eight eleven ten" }; RandomIndexWriter writer = new RandomIndexWriter( random(), directory, newIndexWriterConfig(new PayloadAnalyzer()).setSimilarity(similarity)); Document doc = null; for (int i = 0; i < docs.length; i++) { doc = new Document(); String docText = docs[i]; doc.add(newTextField(PayloadHelper.FIELD, docText, Field.Store.YES)); writer.addDocument(doc); } closeIndexReader = writer.getReader(); writer.close(); IndexSearcher searcher = newSearcher(closeIndexReader); return searcher; }
@Test public void testContextQueryOnSuggestField() throws Exception { Analyzer analyzer = new MockAnalyzer(random()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field")); Document document = new Document(); document.add(new SuggestField("suggest_field", "abc", 3)); document.add(new SuggestField("suggest_field", "abd", 4)); document.add(new SuggestField("suggest_field", "The Foo Fighters", 2)); iw.addDocument(document); document = new Document(); document.add(new SuggestField("suggest_field", "abcdd", 5)); iw.addDocument(document); if (rarely()) { iw.commit(); } DirectoryReader reader = iw.getReader(); SuggestIndexSearcher suggestIndexSearcher = new SuggestIndexSearcher(reader); ContextQuery query = new ContextQuery(new PrefixCompletionQuery(analyzer, new Term("suggest_field", "ab"))); try { suggestIndexSearcher.suggest(query, 4); } catch (IllegalStateException expected) { assertTrue(expected.getMessage().contains("SuggestField")); } reader.close(); iw.close(); }
public void testMethod() throws Exception { Directory directory = newDirectory(); String[] values = new String[] {"1", "2", "3", "4"}; RandomIndexWriter writer = new RandomIndexWriter(random(), directory); for (int i = 0; i < values.length; i++) { Document doc = new Document(); doc.add(newStringField(FIELD, values[i], Field.Store.YES)); writer.addDocument(doc); } IndexReader ir = writer.getReader(); writer.close(); BooleanQuery booleanQuery1 = new BooleanQuery(); booleanQuery1.add(new TermQuery(new Term(FIELD, "1")), BooleanClause.Occur.SHOULD); booleanQuery1.add(new TermQuery(new Term(FIELD, "2")), BooleanClause.Occur.SHOULD); BooleanQuery query = new BooleanQuery(); query.add(booleanQuery1, BooleanClause.Occur.MUST); query.add(new TermQuery(new Term(FIELD, "9")), BooleanClause.Occur.MUST_NOT); IndexSearcher indexSearcher = newSearcher(ir); ScoreDoc[] hits = indexSearcher.search(query, null, 1000).scoreDocs; assertEquals("Number of matched documents", 2, hits.length); ir.close(); directory.close(); }
public void testBasic() throws Exception { Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir); Document doc = new Document(); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS); Field f = newField("foo", "this is a test test", ft); doc.add(f); for (int i = 0; i < 100; i++) { w.addDocument(doc); } IndexReader reader = w.getReader(); w.close(); assertNotNull(MultiFields.getTermPositionsEnum(reader, "foo", new BytesRef("test"))); PostingsEnum de = TestUtil.docs(random(), reader, "foo", new BytesRef("test"), null, PostingsEnum.FREQS); while (de.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { assertEquals(2, de.freq()); } reader.close(); dir.close(); }