// test using a sparse index (with deleted docs). @Test public void testSparseIndex() throws IOException { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter( dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); for (int d = -20; d <= 20; d++) { Document doc = new Document(); doc.add(new IntField("id_int", d, Field.Store.NO)); doc.add(newStringField("body", "body", Field.Store.NO)); writer.addDocument(doc); } writer.forceMerge(1); BytesRef term0 = new BytesRef(); NumericUtils.intToPrefixCoded(0, 0, term0); writer.deleteDocuments(new Term("id_int", term0)); writer.close(); IndexReader reader = DirectoryReader.open(dir); IndexSearcher search = newSearcher(reader); assertTrue(reader.hasDeletions()); ScoreDoc[] result; Query q = new TermQuery(new Term("body", "body")); result = search.search(q, FieldCacheRangeFilter.newIntRange("id_int", -20, 20, T, T), 100).scoreDocs; assertEquals("find all", 40, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id_int", 0, 20, T, T), 100).scoreDocs; assertEquals("find all", 20, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id_int", -20, 0, T, T), 100).scoreDocs; assertEquals("find all", 20, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id_int", 10, 20, T, T), 100).scoreDocs; assertEquals("find all", 11, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id_int", -20, -10, T, T), 100) .scoreDocs; assertEquals("find all", 11, result.length); reader.close(); dir.close(); }
@Test public void testFieldCacheRangeFilterDoubles() throws IOException { IndexReader reader = signedIndexReader; IndexSearcher search = newSearcher(reader); int numDocs = reader.numDocs(); Double minIdO = Double.valueOf(minId + .5); Double medIdO = Double.valueOf(minIdO.floatValue() + ((maxId - minId)) / 2.0); ScoreDoc[] result; Query q = new TermQuery(new Term("body", "body")); result = search.search( q, FieldCacheRangeFilter.newDoubleRange("id_double", minIdO, medIdO, T, T), numDocs) .scoreDocs; assertEquals("find all", numDocs / 2, result.length); int count = 0; result = search.search( q, FieldCacheRangeFilter.newDoubleRange("id_double", null, medIdO, F, T), numDocs) .scoreDocs; count += result.length; result = search.search( q, FieldCacheRangeFilter.newDoubleRange("id_double", medIdO, null, F, F), numDocs) .scoreDocs; count += result.length; assertEquals("sum of two concenatted ranges", numDocs, count); result = search.search( q, FieldCacheRangeFilter.newDoubleRange("id_double", null, null, T, T), numDocs) .scoreDocs; assertEquals("find all", numDocs, result.length); result = search.search( q, FieldCacheRangeFilter.newDoubleRange( "id_double", Double.valueOf(Double.POSITIVE_INFINITY), null, F, F), numDocs) .scoreDocs; assertEquals("infinity special case", 0, result.length); result = search.search( q, FieldCacheRangeFilter.newDoubleRange( "id_double", null, Double.valueOf(Double.NEGATIVE_INFINITY), F, F), numDocs) .scoreDocs; assertEquals("infinity special case", 0, result.length); }
public void testFieldCacheRangeFilterFloats() throws IOException { IndexReader reader = IndexReader.open(signedIndex.index, true); IndexSearcher search = new IndexSearcher(reader); int numDocs = reader.numDocs(); Float minIdO = Float.valueOf(minId + .5f); Float medIdO = Float.valueOf(minIdO.floatValue() + ((float) (maxId - minId)) / 2.0f); ScoreDoc[] result; Query q = new TermQuery(new Term("body", "body")); result = search.search(q, FieldCacheRangeFilter.newFloatRange("id", minIdO, medIdO, T, T), numDocs) .scoreDocs; assertEquals("find all", numDocs / 2, result.length); int count = 0; result = search.search(q, FieldCacheRangeFilter.newFloatRange("id", null, medIdO, F, T), numDocs) .scoreDocs; count += result.length; result = search.search(q, FieldCacheRangeFilter.newFloatRange("id", medIdO, null, F, F), numDocs) .scoreDocs; count += result.length; assertEquals("sum of two concenatted ranges", numDocs, count); result = search.search(q, FieldCacheRangeFilter.newFloatRange("id", null, null, T, T), numDocs) .scoreDocs; assertEquals("find all", numDocs, result.length); result = search.search( q, FieldCacheRangeFilter.newFloatRange( "id", Float.valueOf(Float.POSITIVE_INFINITY), null, F, F), numDocs) .scoreDocs; assertEquals("infinity special case", 0, result.length); result = search.search( q, FieldCacheRangeFilter.newFloatRange( "id", null, Float.valueOf(Float.NEGATIVE_INFINITY), F, F), numDocs) .scoreDocs; assertEquals("infinity special case", 0, result.length); }
public void testIsCacheAble() throws Exception { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); writer.addDocument(new Document()); writer.close(); IndexReader reader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir)); // not cacheable: assertDocIdSetCacheable( reader, new QueryWrapperFilter(new TermQuery(new Term("test", "value"))), false); // returns default empty docidset, always cacheable: assertDocIdSetCacheable( reader, NumericRangeFilter.newIntRange( "test", Integer.valueOf(10000), Integer.valueOf(-10000), true, true), true); // is cacheable: assertDocIdSetCacheable( reader, FieldCacheRangeFilter.newIntRange( "test", Integer.valueOf(10), Integer.valueOf(20), true, true), true); // a fixedbitset filter is always cacheable assertDocIdSetCacheable( reader, new Filter() { @Override public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) { return new FixedBitSet(context.reader().maxDoc()); } }, true); reader.close(); dir.close(); }
@Test public void testRangeFilterId() throws IOException { IndexReader reader = signedIndexReader; IndexSearcher search = newSearcher(reader); int medId = ((maxId - minId) / 2); String minIP = pad(minId); String maxIP = pad(maxId); String medIP = pad(medId); int numDocs = reader.numDocs(); assertEquals("num of docs", numDocs, 1 + maxId - minId); ScoreDoc[] result; Query q = new TermQuery(new Term("body", "body")); // test id, bounded on both ends result = search.search(q, FieldCacheRangeFilter.newStringRange("id", minIP, maxIP, T, T), numDocs) .scoreDocs; assertEquals("find all", numDocs, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("id", minIP, maxIP, T, F), numDocs) .scoreDocs; assertEquals("all but last", numDocs - 1, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("id", minIP, maxIP, F, T), numDocs) .scoreDocs; assertEquals("all but first", numDocs - 1, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("id", minIP, maxIP, F, F), numDocs) .scoreDocs; assertEquals("all but ends", numDocs - 2, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("id", medIP, maxIP, T, T), numDocs) .scoreDocs; assertEquals("med and up", 1 + maxId - medId, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("id", minIP, medIP, T, T), numDocs) .scoreDocs; assertEquals("up to med", 1 + medId - minId, result.length); // unbounded id result = search.search(q, FieldCacheRangeFilter.newStringRange("id", null, null, T, T), numDocs) .scoreDocs; assertEquals("find all", numDocs, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("id", minIP, null, T, F), numDocs) .scoreDocs; assertEquals("min and up", numDocs, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("id", null, maxIP, F, T), numDocs) .scoreDocs; assertEquals("max and down", numDocs, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("id", minIP, null, F, F), numDocs) .scoreDocs; assertEquals("not min, but up", numDocs - 1, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("id", null, maxIP, F, F), numDocs) .scoreDocs; assertEquals("not max, but down", numDocs - 1, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("id", medIP, maxIP, T, F), numDocs) .scoreDocs; assertEquals("med and up, not max", maxId - medId, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("id", minIP, medIP, F, T), numDocs) .scoreDocs; assertEquals("not min, up to med", medId - minId, result.length); // very small sets result = search.search(q, FieldCacheRangeFilter.newStringRange("id", minIP, minIP, F, F), numDocs) .scoreDocs; assertEquals("min,min,F,F", 0, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("id", medIP, medIP, F, F), numDocs) .scoreDocs; assertEquals("med,med,F,F", 0, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("id", maxIP, maxIP, F, F), numDocs) .scoreDocs; assertEquals("max,max,F,F", 0, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("id", minIP, minIP, T, T), numDocs) .scoreDocs; assertEquals("min,min,T,T", 1, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("id", null, minIP, F, T), numDocs) .scoreDocs; assertEquals("nul,min,F,T", 1, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("id", maxIP, maxIP, T, T), numDocs) .scoreDocs; assertEquals("max,max,T,T", 1, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("id", maxIP, null, T, F), numDocs) .scoreDocs; assertEquals("max,nul,T,T", 1, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("id", medIP, medIP, T, T), numDocs) .scoreDocs; assertEquals("med,med,T,T", 1, result.length); }
@Test public void testFieldCacheRangeFilterLongs() throws IOException { IndexReader reader = signedIndexReader; IndexSearcher search = newSearcher(reader); int numDocs = reader.numDocs(); int medId = ((maxId - minId) / 2); Long minIdO = Long.valueOf(minId); Long maxIdO = Long.valueOf(maxId); Long medIdO = Long.valueOf(medId); assertEquals("num of docs", numDocs, 1 + maxId - minId); ScoreDoc[] result; Query q = new TermQuery(new Term("body", "body")); // test id, bounded on both ends result = search.search( q, FieldCacheRangeFilter.newLongRange("id_long", minIdO, maxIdO, T, T), numDocs) .scoreDocs; assertEquals("find all", numDocs, result.length); result = search.search( q, FieldCacheRangeFilter.newLongRange("id_long", minIdO, maxIdO, T, F), numDocs) .scoreDocs; assertEquals("all but last", numDocs - 1, result.length); result = search.search( q, FieldCacheRangeFilter.newLongRange("id_long", minIdO, maxIdO, F, T), numDocs) .scoreDocs; assertEquals("all but first", numDocs - 1, result.length); result = search.search( q, FieldCacheRangeFilter.newLongRange("id_long", minIdO, maxIdO, F, F), numDocs) .scoreDocs; assertEquals("all but ends", numDocs - 2, result.length); result = search.search( q, FieldCacheRangeFilter.newLongRange("id_long", medIdO, maxIdO, T, T), numDocs) .scoreDocs; assertEquals("med and up", 1 + maxId - medId, result.length); result = search.search( q, FieldCacheRangeFilter.newLongRange("id_long", minIdO, medIdO, T, T), numDocs) .scoreDocs; assertEquals("up to med", 1 + medId - minId, result.length); // unbounded id result = search.search(q, FieldCacheRangeFilter.newLongRange("id_long", null, null, T, T), numDocs) .scoreDocs; assertEquals("find all", numDocs, result.length); result = search.search(q, FieldCacheRangeFilter.newLongRange("id_long", minIdO, null, T, F), numDocs) .scoreDocs; assertEquals("min and up", numDocs, result.length); result = search.search(q, FieldCacheRangeFilter.newLongRange("id_long", null, maxIdO, F, T), numDocs) .scoreDocs; assertEquals("max and down", numDocs, result.length); result = search.search(q, FieldCacheRangeFilter.newLongRange("id_long", minIdO, null, F, F), numDocs) .scoreDocs; assertEquals("not min, but up", numDocs - 1, result.length); result = search.search(q, FieldCacheRangeFilter.newLongRange("id_long", null, maxIdO, F, F), numDocs) .scoreDocs; assertEquals("not max, but down", numDocs - 1, result.length); result = search.search( q, FieldCacheRangeFilter.newLongRange("id_long", medIdO, maxIdO, T, F), numDocs) .scoreDocs; assertEquals("med and up, not max", maxId - medId, result.length); result = search.search( q, FieldCacheRangeFilter.newLongRange("id_long", minIdO, medIdO, F, T), numDocs) .scoreDocs; assertEquals("not min, up to med", medId - minId, result.length); // very small sets result = search.search( q, FieldCacheRangeFilter.newLongRange("id_long", minIdO, minIdO, F, F), numDocs) .scoreDocs; assertEquals("min,min,F,F", 0, result.length); result = search.search( q, FieldCacheRangeFilter.newLongRange("id_long", medIdO, medIdO, F, F), numDocs) .scoreDocs; assertEquals("med,med,F,F", 0, result.length); result = search.search( q, FieldCacheRangeFilter.newLongRange("id_long", maxIdO, maxIdO, F, F), numDocs) .scoreDocs; assertEquals("max,max,F,F", 0, result.length); result = search.search( q, FieldCacheRangeFilter.newLongRange("id_long", minIdO, minIdO, T, T), numDocs) .scoreDocs; assertEquals("min,min,T,T", 1, result.length); result = search.search(q, FieldCacheRangeFilter.newLongRange("id_long", null, minIdO, F, T), numDocs) .scoreDocs; assertEquals("nul,min,F,T", 1, result.length); result = search.search( q, FieldCacheRangeFilter.newLongRange("id_long", maxIdO, maxIdO, T, T), numDocs) .scoreDocs; assertEquals("max,max,T,T", 1, result.length); result = search.search(q, FieldCacheRangeFilter.newLongRange("id_long", maxIdO, null, T, F), numDocs) .scoreDocs; assertEquals("max,nul,T,T", 1, result.length); result = search.search( q, FieldCacheRangeFilter.newLongRange("id_long", medIdO, medIdO, T, T), numDocs) .scoreDocs; assertEquals("med,med,T,T", 1, result.length); // special cases result = search.search( q, FieldCacheRangeFilter.newLongRange( "id_long", Long.valueOf(Long.MAX_VALUE), null, F, F), numDocs) .scoreDocs; assertEquals("overflow special case", 0, result.length); result = search.search( q, FieldCacheRangeFilter.newLongRange( "id_long", null, Long.valueOf(Long.MIN_VALUE), F, F), numDocs) .scoreDocs; assertEquals("overflow special case", 0, result.length); result = search.search( q, FieldCacheRangeFilter.newLongRange("id_long", maxIdO, minIdO, T, T), numDocs) .scoreDocs; assertEquals("inverse range", 0, result.length); }
@Test public void testFieldCacheRangeFilterRand() throws IOException { IndexReader reader = signedIndexReader; IndexSearcher search = newSearcher(reader); String minRP = pad(signedIndexDir.minR); String maxRP = pad(signedIndexDir.maxR); int numDocs = reader.numDocs(); assertEquals("num of docs", numDocs, 1 + maxId - minId); ScoreDoc[] result; Query q = new TermQuery(new Term("body", "body")); // test extremes, bounded on both ends result = search.search(q, FieldCacheRangeFilter.newStringRange("rand", minRP, maxRP, T, T), numDocs) .scoreDocs; assertEquals("find all", numDocs, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("rand", minRP, maxRP, T, F), numDocs) .scoreDocs; assertEquals("all but biggest", numDocs - 1, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("rand", minRP, maxRP, F, T), numDocs) .scoreDocs; assertEquals("all but smallest", numDocs - 1, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("rand", minRP, maxRP, F, F), numDocs) .scoreDocs; assertEquals("all but extremes", numDocs - 2, result.length); // unbounded result = search.search(q, FieldCacheRangeFilter.newStringRange("rand", minRP, null, T, F), numDocs) .scoreDocs; assertEquals("smallest and up", numDocs, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("rand", null, maxRP, F, T), numDocs) .scoreDocs; assertEquals("biggest and down", numDocs, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("rand", minRP, null, F, F), numDocs) .scoreDocs; assertEquals("not smallest, but up", numDocs - 1, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("rand", null, maxRP, F, F), numDocs) .scoreDocs; assertEquals("not biggest, but down", numDocs - 1, result.length); // very small sets result = search.search(q, FieldCacheRangeFilter.newStringRange("rand", minRP, minRP, F, F), numDocs) .scoreDocs; assertEquals("min,min,F,F", 0, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("rand", maxRP, maxRP, F, F), numDocs) .scoreDocs; assertEquals("max,max,F,F", 0, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("rand", minRP, minRP, T, T), numDocs) .scoreDocs; assertEquals("min,min,T,T", 1, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("rand", null, minRP, F, T), numDocs) .scoreDocs; assertEquals("nul,min,F,T", 1, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("rand", maxRP, maxRP, T, T), numDocs) .scoreDocs; assertEquals("max,max,T,T", 1, result.length); result = search.search(q, FieldCacheRangeFilter.newStringRange("rand", maxRP, null, T, F), numDocs) .scoreDocs; assertEquals("max,nul,T,T", 1, result.length); }
// test using a sparse index (with deleted docs). The DocIdSet should be not cacheable, as it uses // TermDocs if the range contains 0 public void testSparseIndex() throws IOException { RAMDirectory dir = new RAMDirectory(); IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), T, IndexWriter.MaxFieldLength.LIMITED); for (int d = -20; d <= 20; d++) { Document doc = new Document(); doc.add(new Field("id", Integer.toString(d), Field.Store.NO, Field.Index.NOT_ANALYZED)); doc.add(new Field("body", "body", Field.Store.NO, Field.Index.NOT_ANALYZED)); writer.addDocument(doc); } writer.optimize(); writer.deleteDocuments(new Term("id", "0")); writer.close(); IndexReader reader = IndexReader.open(dir, true); IndexSearcher search = new IndexSearcher(reader); assertTrue(reader.hasDeletions()); ScoreDoc[] result; FieldCacheRangeFilter fcrf; Query q = new TermQuery(new Term("body", "body")); result = search.search( q, fcrf = FieldCacheRangeFilter.newByteRange( "id", Byte.valueOf((byte) -20), Byte.valueOf((byte) 20), T, T), 100) .scoreDocs; assertFalse( "DocIdSet must be not cacheable", fcrf.getDocIdSet(reader.getSequentialSubReaders()[0]).isCacheable()); assertEquals("find all", 40, result.length); result = search.search( q, fcrf = FieldCacheRangeFilter.newByteRange( "id", Byte.valueOf((byte) 0), Byte.valueOf((byte) 20), T, T), 100) .scoreDocs; assertFalse( "DocIdSet must be not cacheable", fcrf.getDocIdSet(reader.getSequentialSubReaders()[0]).isCacheable()); assertEquals("find all", 20, result.length); result = search.search( q, fcrf = FieldCacheRangeFilter.newByteRange( "id", Byte.valueOf((byte) -20), Byte.valueOf((byte) 0), T, T), 100) .scoreDocs; assertFalse( "DocIdSet must be not cacheable", fcrf.getDocIdSet(reader.getSequentialSubReaders()[0]).isCacheable()); assertEquals("find all", 20, result.length); result = search.search( q, fcrf = FieldCacheRangeFilter.newByteRange( "id", Byte.valueOf((byte) 10), Byte.valueOf((byte) 20), T, T), 100) .scoreDocs; assertTrue( "DocIdSet must be cacheable", fcrf.getDocIdSet(reader.getSequentialSubReaders()[0]).isCacheable()); assertEquals("find all", 11, result.length); result = search.search( q, fcrf = FieldCacheRangeFilter.newByteRange( "id", Byte.valueOf((byte) -20), Byte.valueOf((byte) -10), T, T), 100) .scoreDocs; assertTrue( "DocIdSet must be cacheable", fcrf.getDocIdSet(reader.getSequentialSubReaders()[0]).isCacheable()); assertEquals("find all", 11, result.length); }
public void testFieldCacheRangeFilterInts() throws IOException { IndexReader reader = IndexReader.open(signedIndex.index, true); IndexSearcher search = new IndexSearcher(reader); int numDocs = reader.numDocs(); int medId = ((maxId - minId) / 2); Integer minIdO = Integer.valueOf(minId); Integer maxIdO = Integer.valueOf(maxId); Integer medIdO = Integer.valueOf(medId); assertEquals("num of docs", numDocs, 1 + maxId - minId); ScoreDoc[] result; Query q = new TermQuery(new Term("body", "body")); // test id, bounded on both ends FieldCacheRangeFilter fcrf; result = search.search( q, fcrf = FieldCacheRangeFilter.newIntRange("id", minIdO, maxIdO, T, T), numDocs) .scoreDocs; assertTrue(fcrf.getDocIdSet(reader.getSequentialSubReaders()[0]).isCacheable()); assertEquals("find all", numDocs, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id", minIdO, maxIdO, T, F), numDocs) .scoreDocs; assertEquals("all but last", numDocs - 1, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id", minIdO, maxIdO, F, T), numDocs) .scoreDocs; assertEquals("all but first", numDocs - 1, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id", minIdO, maxIdO, F, F), numDocs) .scoreDocs; assertEquals("all but ends", numDocs - 2, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id", medIdO, maxIdO, T, T), numDocs) .scoreDocs; assertEquals("med and up", 1 + maxId - medId, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id", minIdO, medIdO, T, T), numDocs) .scoreDocs; assertEquals("up to med", 1 + medId - minId, result.length); // unbounded id result = search.search(q, FieldCacheRangeFilter.newIntRange("id", null, null, T, T), numDocs) .scoreDocs; assertEquals("find all", numDocs, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id", minIdO, null, T, F), numDocs) .scoreDocs; assertEquals("min and up", numDocs, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id", null, maxIdO, F, T), numDocs) .scoreDocs; assertEquals("max and down", numDocs, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id", minIdO, null, F, F), numDocs) .scoreDocs; assertEquals("not min, but up", numDocs - 1, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id", null, maxIdO, F, F), numDocs) .scoreDocs; assertEquals("not max, but down", numDocs - 1, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id", medIdO, maxIdO, T, F), numDocs) .scoreDocs; assertEquals("med and up, not max", maxId - medId, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id", minIdO, medIdO, F, T), numDocs) .scoreDocs; assertEquals("not min, up to med", medId - minId, result.length); // very small sets result = search.search(q, FieldCacheRangeFilter.newIntRange("id", minIdO, minIdO, F, F), numDocs) .scoreDocs; assertEquals("min,min,F,F", 0, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id", medIdO, medIdO, F, F), numDocs) .scoreDocs; assertEquals("med,med,F,F", 0, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id", maxIdO, maxIdO, F, F), numDocs) .scoreDocs; assertEquals("max,max,F,F", 0, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id", minIdO, minIdO, T, T), numDocs) .scoreDocs; assertEquals("min,min,T,T", 1, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id", null, minIdO, F, T), numDocs) .scoreDocs; assertEquals("nul,min,F,T", 1, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id", maxIdO, maxIdO, T, T), numDocs) .scoreDocs; assertEquals("max,max,T,T", 1, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id", maxIdO, null, T, F), numDocs) .scoreDocs; assertEquals("max,nul,T,T", 1, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id", medIdO, medIdO, T, T), numDocs) .scoreDocs; assertEquals("med,med,T,T", 1, result.length); // special cases result = search.search( q, FieldCacheRangeFilter.newIntRange( "id", Integer.valueOf(Integer.MAX_VALUE), null, F, F), numDocs) .scoreDocs; assertEquals("overflow special case", 0, result.length); result = search.search( q, FieldCacheRangeFilter.newIntRange( "id", null, Integer.valueOf(Integer.MIN_VALUE), F, F), numDocs) .scoreDocs; assertEquals("overflow special case", 0, result.length); result = search.search(q, FieldCacheRangeFilter.newIntRange("id", maxIdO, minIdO, T, T), numDocs) .scoreDocs; assertEquals("inverse range", 0, result.length); }