示例#1
0
  public void testDoubleFieldMinMax() throws Exception {
    Directory dir = newDirectory();
    RandomIndexWriter w = new RandomIndexWriter(random(), dir);
    int numDocs = atLeast(100);
    double minValue = Double.POSITIVE_INFINITY;
    double maxValue = Double.NEGATIVE_INFINITY;
    for (int i = 0; i < numDocs; i++) {
      Document doc = new Document();
      double num = random().nextDouble();
      minValue = Math.min(num, minValue);
      maxValue = Math.max(num, maxValue);
      doc.add(new LegacyDoubleField("field", num, Field.Store.NO));
      w.addDocument(doc);
    }

    IndexReader r = w.getReader();

    Terms terms = MultiFields.getTerms(r, "field");

    assertEquals(
        minValue, NumericUtils.sortableLongToDouble(LegacyNumericUtils.getMinLong(terms)), 0.0);
    assertEquals(
        maxValue, NumericUtils.sortableLongToDouble(LegacyNumericUtils.getMaxLong(terms)), 0.0);

    r.close();
    w.close();
    dir.close();
  }
示例#2
0
  public void testLongFieldMinMax() throws Exception {
    Directory dir = newDirectory();
    RandomIndexWriter w = new RandomIndexWriter(random(), dir);
    int numDocs = atLeast(100);
    long minValue = Long.MAX_VALUE;
    long maxValue = Long.MIN_VALUE;
    for (int i = 0; i < numDocs; i++) {
      Document doc = new Document();
      long num = random().nextLong();
      minValue = Math.min(num, minValue);
      maxValue = Math.max(num, maxValue);
      doc.add(new LegacyLongField("field", num, Field.Store.NO));
      w.addDocument(doc);
    }

    IndexReader r = w.getReader();

    Terms terms = MultiFields.getTerms(r, "field");
    assertEquals(new Long(minValue), LegacyNumericUtils.getMinLong(terms));
    assertEquals(new Long(maxValue), LegacyNumericUtils.getMaxLong(terms));

    r.close();
    w.close();
    dir.close();
  }
 @Override
 public BytesRef indexedValueForSearch(Object value) {
   BytesRefBuilder bytesRef = new BytesRefBuilder();
   LegacyNumericUtils.intToPrefixCoded(
       parseValue(value), 0, bytesRef); // 0 because of exact match
   return bytesRef.get();
 }
 void dump(PrintStream out) {
   out.println(field + ":");
   final BytesRef ref = new BytesRef();
   for (int i = 0; i < terms.size(); i++) {
     terms.get(ords[i], ref);
     out.print(ref + " " + ref.utf8ToString() + " ");
     try {
       out.print(Long.toHexString(LegacyNumericUtils.prefixCodedToLong(ref)) + "L");
     } catch (Exception e) {
       try {
         out.print(Integer.toHexString(LegacyNumericUtils.prefixCodedToInt(ref)) + "i");
       } catch (Exception ee) {
       }
     }
     out.println(" score=" + scores[ords[i]]);
     out.println("");
   }
 }
 @Override
 public FieldStats.Long stats(IndexReader reader) throws IOException {
   int maxDoc = reader.maxDoc();
   Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name());
   if (terms == null) {
     return null;
   }
   long minValue = LegacyNumericUtils.getMinInt(terms);
   long maxValue = LegacyNumericUtils.getMaxInt(terms);
   return new FieldStats.Long(
       maxDoc,
       terms.getDocCount(),
       terms.getSumDocFreq(),
       terms.getSumTotalTermFreq(),
       isSearchable(),
       isAggregatable(),
       minValue,
       maxValue);
 }
 /**
  * Encode as a BytesRef using a reusable object. This allows us to lazily create the BytesRef
  * (which is quite expensive), only when we need it.
  */
 @Override
 protected void fillBytesRef(BytesRefBuilder result) {
   assert result != null;
   LegacyNumericUtils.longToPrefixCoded(start, shift, result);
 }
示例#7
0
 public void testEmptyIntFieldMinMax() throws Exception {
   assertNull(LegacyNumericUtils.getMinInt(EMPTY_TERMS));
   assertNull(LegacyNumericUtils.getMaxInt(EMPTY_TERMS));
 }
  private void testRandomTrieAndClassicRangeQuery(int precisionStep) throws Exception {
    String field = "field" + precisionStep;
    int totalTermCountT = 0, totalTermCountC = 0, termCountT, termCountC;
    int num = TestUtil.nextInt(random(), 10, 20);
    for (int i = 0; i < num; i++) {
      long lower = (long) (random().nextDouble() * noDocs * distance) + startOffset;
      long upper = (long) (random().nextDouble() * noDocs * distance) + startOffset;
      if (lower > upper) {
        long a = lower;
        lower = upper;
        upper = a;
      }
      final BytesRef lowerBytes, upperBytes;
      BytesRefBuilder b = new BytesRefBuilder();
      LegacyNumericUtils.longToPrefixCoded(lower, 0, b);
      lowerBytes = b.toBytesRef();
      LegacyNumericUtils.longToPrefixCoded(upper, 0, b);
      upperBytes = b.toBytesRef();

      // test inclusive range
      LegacyNumericRangeQuery<Long> tq =
          LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, true, true);
      TermRangeQuery cq = new TermRangeQuery(field, lowerBytes, upperBytes, true, true);
      TopDocs tTopDocs = searcher.search(tq, 1);
      TopDocs cTopDocs = searcher.search(cq, 1);
      assertEquals(
          "Returned count for LegacyNumericRangeQuery and TermRangeQuery must be equal",
          cTopDocs.totalHits,
          tTopDocs.totalHits);
      totalTermCountT += termCountT = countTerms(tq);
      totalTermCountC += termCountC = countTerms(cq);
      checkTermCounts(precisionStep, termCountT, termCountC);
      // test exclusive range
      tq = LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, false, false);
      cq = new TermRangeQuery(field, lowerBytes, upperBytes, false, false);
      tTopDocs = searcher.search(tq, 1);
      cTopDocs = searcher.search(cq, 1);
      assertEquals(
          "Returned count for LegacyNumericRangeQuery and TermRangeQuery must be equal",
          cTopDocs.totalHits,
          tTopDocs.totalHits);
      totalTermCountT += termCountT = countTerms(tq);
      totalTermCountC += termCountC = countTerms(cq);
      checkTermCounts(precisionStep, termCountT, termCountC);
      // test left exclusive range
      tq = LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, false, true);
      cq = new TermRangeQuery(field, lowerBytes, upperBytes, false, true);
      tTopDocs = searcher.search(tq, 1);
      cTopDocs = searcher.search(cq, 1);
      assertEquals(
          "Returned count for LegacyNumericRangeQuery and TermRangeQuery must be equal",
          cTopDocs.totalHits,
          tTopDocs.totalHits);
      totalTermCountT += termCountT = countTerms(tq);
      totalTermCountC += termCountC = countTerms(cq);
      checkTermCounts(precisionStep, termCountT, termCountC);
      // test right exclusive range
      tq = LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, true, false);
      cq = new TermRangeQuery(field, lowerBytes, upperBytes, true, false);
      tTopDocs = searcher.search(tq, 1);
      cTopDocs = searcher.search(cq, 1);
      assertEquals(
          "Returned count for LegacyNumericRangeQuery and TermRangeQuery must be equal",
          cTopDocs.totalHits,
          tTopDocs.totalHits);
      totalTermCountT += termCountT = countTerms(tq);
      totalTermCountC += termCountC = countTerms(cq);
      checkTermCounts(precisionStep, termCountT, termCountC);
    }

    checkTermCounts(precisionStep, totalTermCountT, totalTermCountC);
    if (VERBOSE && precisionStep != Integer.MAX_VALUE) {
      System.out.println("Average number of terms during random search on '" + field + "':");
      System.out.println(" Numeric query: " + (((double) totalTermCountT) / (num * 4)));
      System.out.println(" Classical query: " + (((double) totalTermCountC) / (num * 4)));
    }
  }