public GeoDistanceRangeFilter(
      double lat,
      double lon,
      Double lowerVal,
      Double upperVal,
      boolean includeLower,
      boolean includeUpper,
      GeoDistance geoDistance,
      String fieldName,
      GeoPointFieldMapper mapper,
      FieldDataCache fieldDataCache,
      String optimizeBbox) {
    this.lat = lat;
    this.lon = lon;
    this.geoDistance = geoDistance;
    this.fieldName = fieldName;
    this.fieldDataCache = fieldDataCache;

    this.fixedSourceDistance = geoDistance.fixedSourceDistance(lat, lon, DistanceUnit.MILES);

    if (lowerVal != null) {
      double f = lowerVal.doubleValue();
      long i = NumericUtils.doubleToSortableLong(f);
      inclusiveLowerPoint = NumericUtils.sortableLongToDouble(includeLower ? i : (i + 1L));
    } else {
      inclusiveLowerPoint = Double.NEGATIVE_INFINITY;
    }
    if (upperVal != null) {
      double f = upperVal.doubleValue();
      long i = NumericUtils.doubleToSortableLong(f);
      inclusiveUpperPoint = NumericUtils.sortableLongToDouble(includeUpper ? i : (i - 1L));
    } else {
      inclusiveUpperPoint = Double.POSITIVE_INFINITY;
      // we disable bounding box in this case, since the upper point is all and we create bounding
      // box up to the
      // upper point it will effectively include all
      // TODO we can create a bounding box up to from and "not" it
      optimizeBbox = null;
    }

    if (optimizeBbox != null && !"none".equals(optimizeBbox)) {
      distanceBoundingCheck =
          GeoDistance.distanceBoundingCheck(lat, lon, inclusiveUpperPoint, DistanceUnit.MILES);
      if ("memory".equals(optimizeBbox)) {
        boundingBoxFilter = null;
      } else if ("indexed".equals(optimizeBbox)) {
        boundingBoxFilter =
            IndexedGeoBoundingBoxFilter.create(
                distanceBoundingCheck.topLeft(), distanceBoundingCheck.bottomRight(), mapper);
        distanceBoundingCheck =
            GeoDistance.ALWAYS_INSTANCE; // fine, we do the bounding box check using the filter
      } else {
        throw new ElasticSearchIllegalArgumentException(
            "type [" + optimizeBbox + "] for bounding box optimization not supported");
      }
    } else {
      distanceBoundingCheck = GeoDistance.ALWAYS_INSTANCE;
      boundingBoxFilter = null;
    }
  }
Ejemplo n.º 2
0
  /** we fake a double test using long2double conversion of NumericUtils */
  private void testDoubleRange(int precisionStep) throws Exception {
    final String field = "ascfield" + precisionStep;
    final long lower = -1000L, upper = +2000L;

    Query tq =
        NumericRangeQuery.newDoubleRange(
            field,
            precisionStep,
            NumericUtils.sortableLongToDouble(lower),
            NumericUtils.sortableLongToDouble(upper),
            true,
            true);
    TopDocs tTopDocs = searcher.search(tq, 1);
    assertEquals(
        "Returned count of range query must be equal to inclusive range length",
        upper - lower + 1,
        tTopDocs.totalHits);

    Filter tf =
        NumericRangeFilter.newDoubleRange(
            field,
            precisionStep,
            NumericUtils.sortableLongToDouble(lower),
            NumericUtils.sortableLongToDouble(upper),
            true,
            true);
    tTopDocs = searcher.search(new MatchAllDocsQuery(), tf, 1);
    assertEquals(
        "Returned count of range filter must be equal to inclusive range length",
        upper - lower + 1,
        tTopDocs.totalHits);
  }
Ejemplo n.º 3
0
  public void testDoubleFieldMinMax() throws Exception {
    Directory dir = newDirectory();
    RandomIndexWriter w = new RandomIndexWriter(random(), dir);
    int numDocs = atLeast(100);
    double minValue = Double.POSITIVE_INFINITY;
    double maxValue = Double.NEGATIVE_INFINITY;
    for (int i = 0; i < numDocs; i++) {
      Document doc = new Document();
      double num = random().nextDouble();
      minValue = Math.min(num, minValue);
      maxValue = Math.max(num, maxValue);
      doc.add(new LegacyDoubleField("field", num, Field.Store.NO));
      w.addDocument(doc);
    }

    IndexReader r = w.getReader();

    Terms terms = MultiFields.getTerms(r, "field");

    assertEquals(
        minValue, NumericUtils.sortableLongToDouble(LegacyNumericUtils.getMinLong(terms)), 0.0);
    assertEquals(
        maxValue, NumericUtils.sortableLongToDouble(LegacyNumericUtils.getMaxLong(terms)), 0.0);

    r.close();
    w.close();
    dir.close();
  }
Ejemplo n.º 4
0
  private void addPoint(IndexWriter writer, String name, double lat, double lng)
      throws IOException {

    Document doc = new Document();

    doc.add(new Field("name", name, Field.Store.YES, Field.Index.TOKENIZED));

    // convert the lat / long to lucene fields
    doc.add(
        new Field(
            latField,
            NumericUtils.doubleToPrefixCoded(lat),
            Field.Store.YES,
            Field.Index.UN_TOKENIZED));
    doc.add(
        new Field(
            lngField,
            NumericUtils.doubleToPrefixCoded(lng),
            Field.Store.YES,
            Field.Index.UN_TOKENIZED));

    // add a default meta field to make searching all documents easy
    doc.add(new Field("metafile", "doc", Field.Store.YES, Field.Index.TOKENIZED));
    writer.addDocument(doc);
  }
Ejemplo n.º 5
0
 @Override
 public BytesRef indexedValueForSearch(Object value) {
   long longValue = NumericUtils.doubleToSortableLong(parseDoubleValue(value));
   BytesRefBuilder bytesRef = new BytesRefBuilder();
   NumericUtils.longToPrefixCoded(longValue, 0, bytesRef); // 0 because of exact match
   return bytesRef.get();
 }
Ejemplo n.º 6
0
 @Override
 public BytesRef indexedValueForSearch(Object value) {
   int intValue = NumericUtils.floatToSortableInt(parseValue(value));
   BytesRefBuilder bytesRef = new BytesRefBuilder();
   NumericUtils.intToPrefixCoded(intValue, 0, bytesRef); // 0 because of exact match
   return bytesRef.get();
 }
 @Override
 public void fillBytesRef() {
   assert valueSize == 64 || valueSize == 32;
   if (valueSize == 64) {
     NumericUtils.longToPrefixCoded(value, shift, bytes);
   } else {
     NumericUtils.intToPrefixCoded((int) value, shift, bytes);
   }
 }
Ejemplo n.º 8
0
 private static byte[][] pack(Double value) {
   if (value == null) {
     // OK: open ended range
     return new byte[1][];
   }
   byte[][] result = new byte[][] {new byte[RamUsageEstimator.NUM_BYTES_LONG]};
   NumericUtils.longToBytesDirect(NumericUtils.doubleToSortableLong(value), result[0], 0);
   return result;
 }
Ejemplo n.º 9
0
 private static byte[][] pack(Float value) {
   if (value == null) {
     // OK: open ended range
     return new byte[1][];
   }
   byte[][] result = new byte[][] {new byte[RamUsageEstimator.NUM_BYTES_INT]};
   NumericUtils.intToBytesDirect(NumericUtils.floatToSortableInt(value), result[0], 0);
   return result;
 }
 @Override
 public BytesRef getBytesRef() {
   assert valueSize == 64 || valueSize == 32;
   if (valueSize == 64) {
     NumericUtils.longToPrefixCoded(value, shift, bytes);
   } else {
     NumericUtils.intToPrefixCoded((int) value, shift, bytes);
   }
   return bytes.get();
 }
Ejemplo n.º 11
0
 @Override
 public FieldStats stats(Terms terms, int maxDoc) throws IOException {
   float minValue = NumericUtils.sortableIntToFloat(NumericUtils.getMinInt(terms));
   float maxValue = NumericUtils.sortableIntToFloat(NumericUtils.getMaxInt(terms));
   return new FieldStats.Float(
       maxDoc,
       terms.getDocCount(),
       terms.getSumDocFreq(),
       terms.getSumTotalTermFreq(),
       minValue,
       maxValue);
 }
 @Override
 public FieldStats stats(Terms terms, int maxDoc) throws IOException {
   long minValue = NumericUtils.getMinInt(terms);
   long maxValue = NumericUtils.getMaxInt(terms);
   return new FieldStats.Long(
       maxDoc,
       terms.getDocCount(),
       terms.getSumDocFreq(),
       terms.getSumTotalTermFreq(),
       minValue,
       maxValue);
 }
 private void assertLegacyQuery(GeoDistanceRangeQueryBuilder queryBuilder, Query query)
     throws IOException {
   assertThat(query, instanceOf(GeoDistanceRangeQuery.class));
   GeoDistanceRangeQuery geoQuery = (GeoDistanceRangeQuery) query;
   assertThat(geoQuery.fieldName(), equalTo(queryBuilder.fieldName()));
   if (queryBuilder.point() != null) {
     GeoPoint expectedPoint = new GeoPoint(queryBuilder.point());
     if (GeoValidationMethod.isCoerce(queryBuilder.getValidationMethod())) {
       GeoUtils.normalizePoint(expectedPoint, true, true);
     }
     assertThat(geoQuery.lat(), equalTo(expectedPoint.lat()));
     assertThat(geoQuery.lon(), equalTo(expectedPoint.lon()));
   }
   assertThat(geoQuery.geoDistance(), equalTo(queryBuilder.geoDistance()));
   if (queryBuilder.from() != null && queryBuilder.from() instanceof Number) {
     double fromValue = ((Number) queryBuilder.from()).doubleValue();
     if (queryBuilder.unit() != null) {
       fromValue = queryBuilder.unit().toMeters(fromValue);
     }
     if (queryBuilder.geoDistance() != null) {
       fromValue = queryBuilder.geoDistance().normalize(fromValue, DistanceUnit.DEFAULT);
     }
     double fromSlop = Math.abs(fromValue) / 1000;
     if (queryBuilder.includeLower() == false) {
       fromSlop =
           NumericUtils.sortableLongToDouble(
                   (NumericUtils.doubleToSortableLong(Math.abs(fromValue)) + 1L))
               / 1000.0;
     }
     assertThat(geoQuery.minInclusiveDistance(), closeTo(fromValue, fromSlop));
   }
   if (queryBuilder.to() != null && queryBuilder.to() instanceof Number) {
     double toValue = ((Number) queryBuilder.to()).doubleValue();
     if (queryBuilder.unit() != null) {
       toValue = queryBuilder.unit().toMeters(toValue);
     }
     if (queryBuilder.geoDistance() != null) {
       toValue = queryBuilder.geoDistance().normalize(toValue, DistanceUnit.DEFAULT);
     }
     double toSlop = Math.abs(toValue) / 1000;
     if (queryBuilder.includeUpper() == false) {
       toSlop =
           NumericUtils.sortableLongToDouble(
                   (NumericUtils.doubleToSortableLong(Math.abs(toValue)) - 1L))
               / 1000.0;
     }
     assertThat(geoQuery.maxInclusiveDistance(), closeTo(toValue, toSlop));
   }
 }
 public void testIntStream() throws Exception {
   final NumericTokenStream stream = new NumericTokenStream().setIntValue(ivalue);
   // use getAttribute to test if attributes really exist, if not an IAE will be throwed
   final TermToBytesRefAttribute bytesAtt = stream.getAttribute(TermToBytesRefAttribute.class);
   final TypeAttribute typeAtt = stream.getAttribute(TypeAttribute.class);
   final NumericTokenStream.NumericTermAttribute numericAtt =
       stream.getAttribute(NumericTokenStream.NumericTermAttribute.class);
   final BytesRef bytes = bytesAtt.getBytesRef();
   stream.reset();
   assertEquals(32, numericAtt.getValueSize());
   for (int shift = 0; shift < 32; shift += NumericUtils.PRECISION_STEP_DEFAULT) {
     assertTrue("New token is available", stream.incrementToken());
     assertEquals("Shift value wrong", shift, numericAtt.getShift());
     final int hash = bytesAtt.fillBytesRef();
     assertEquals("Hash incorrect", bytes.hashCode(), hash);
     assertEquals(
         "Term is incorrectly encoded",
         ivalue & ~((1 << shift) - 1),
         NumericUtils.prefixCodedToInt(bytes));
     assertEquals(
         "Term raw value is incorrectly encoded",
         ((long) ivalue) & ~((1L << shift) - 1L),
         numericAtt.getRawValue());
     assertEquals(
         "Type incorrect",
         (shift == 0)
             ? NumericTokenStream.TOKEN_TYPE_FULL_PREC
             : NumericTokenStream.TOKEN_TYPE_LOWER_PREC,
         typeAtt.type());
   }
   assertFalse("More tokens available", stream.incrementToken());
   stream.end();
   stream.close();
 }
Ejemplo n.º 15
0
 private static byte[][] pack(Integer value) {
   if (value == null) {
     // OK: open ended range
     return new byte[1][];
   }
   byte[][] result = new byte[][] {new byte[RamUsageEstimator.NUM_BYTES_INT]};
   NumericUtils.intToBytes(value, result[0], 0);
   return result;
 }
 /**
  * Converts a list of long value to their bytes ref representation as performed by {@link
  * org.apache.lucene.analysis.NumericTokenStream}
  */
 private BytesRef[] toBytesRefs(long[] values) {
   BytesRef[] bytesRefs = new BytesRef[values.length];
   for (int i = 0; i < values.length; i++) {
     BytesRefBuilder b = new BytesRefBuilder();
     NumericUtils.longToPrefixCoded(values[i], 0, b);
     bytesRefs[i] = b.toBytesRef();
   }
   return bytesRefs;
 }
  /**
   * Test that the we are able to index a 100 documents, that the job management works during that
   * time and at least one of the submissions exists in the index afterwards.
   */
  @Test
  public void testCommit() throws CorruptIndexException, IOException, InterruptedException {

    // Send the first 100 subissions to the current transaction.
    Iterator<Submission> itr = subRepo.findAllSubmissions();

    long lastSubId = 0;
    for (int i = 0; i < 100; i++) {
      Submission sub = itr.next();
      assertNotNull(sub);
      indexer.updated(sub);

      lastSubId = sub.getId();
    }

    // Assert no job is running.
    assertTrue(lastSubId > 0);
    assertFalse(indexer.isJobRunning());
    assertEquals("None", indexer.getCurrentJobLabel());
    assertEquals(-1L, indexer.getCurrentJobProgress());
    assertEquals(-1L, indexer.getCurrentJobTotal());
    assertTrue(indexer.isUpdated(lastSubId));

    // Commit the 100 jobs.
    indexer.commit(false);

    // Check that a job is running
    assertTrue(indexer.isJobRunning());
    assertEquals("Update Index", indexer.getCurrentJobLabel());
    assertTrue(indexer.getCurrentJobProgress() >= 0 && indexer.getCurrentJobProgress() <= 100);
    assertEquals(100L, indexer.getCurrentJobTotal());

    // Wait for the current job to complete
    for (int i = 0; i < 1000; i++) {
      Thread.sleep(10);
      if (!indexer.isJobRunning()) break;
    }
    assertFalse(indexer.isJobRunning());

    // Check that we can find the documents we indexed.
    IndexReader reader = IndexReader.open(indexer.index);
    IndexSearcher searcher = new IndexSearcher(reader);
    Query query = new TermQuery(new Term("subId", NumericUtils.longToPrefixCoded(lastSubId)));

    TopDocs topDocs = searcher.search(query, 1);
    assertNotNull(topDocs);
    assertNotNull(topDocs.scoreDocs);
    assertTrue(topDocs.scoreDocs.length > 0);
    Document doc = searcher.doc(topDocs.scoreDocs[0].doc);
    assertNotNull(doc);
    assertEquals(String.valueOf(lastSubId), doc.get("subId"));
    searcher.close();
  }
Ejemplo n.º 18
0
  public static void displayPointRanges(Scanner in) {
    double[][] point = getPoints(in, 1);
    long hash, hashUpper;
    double lon, lat, lonUpper, latUpper;

    for (int i = 63; i >= 45; i -= GeoPointField.PRECISION_STEP) {
      BytesRefBuilder brb = new BytesRefBuilder();
      NumericUtils.longToPrefixCoded(
          GeoUtils.mortonHash(point[0][LON_INDEX], point[0][LAT_INDEX]), i, brb);
      BytesRef br = brb.get();
      hash = NumericUtils.prefixCodedToLong(br);
      hashUpper = hash | ((1L << i) - 1);
      lon = GeoUtils.mortonUnhashLon(hash);
      lat = GeoUtils.mortonUnhashLat(hash);
      lonUpper = GeoUtils.mortonUnhashLon(hashUpper);
      latUpper = GeoUtils.mortonUnhashLat(hashUpper);
      System.out.println(
          i + ": " + br + " " + hash + " (" + lon + "," + lat + ")" + " : " + "(" + lonUpper + ","
              + latUpper + ")");
    }
  }
Ejemplo n.º 19
0
  public LongFilter convertToDoubleFilter() {
    if (isPartitionBased()) {
      return new PartitionedLongFilter();
    }

    int numValids = includeValues == null ? 0 : includeValues.size();
    int numInvalids = excludeValues == null ? 0 : excludeValues.size();
    SetBackedLongFilter result = new SetBackedLongFilter(numValids, numInvalids);
    if (includeValues != null) {
      for (BytesRef val : includeValues) {
        double dval = Double.parseDouble(val.utf8ToString());
        result.addAccept(NumericUtils.doubleToSortableLong(dval));
      }
    }
    if (excludeValues != null) {
      for (BytesRef val : excludeValues) {
        double dval = Double.parseDouble(val.utf8ToString());
        result.addReject(NumericUtils.doubleToSortableLong(dval));
      }
    }
    return result;
  }
  // test using a sparse index (with deleted docs).
  @Test
  public void testSparseIndex() throws IOException {
    Directory dir = newDirectory();
    IndexWriter writer =
        new IndexWriter(
            dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));

    for (int d = -20; d <= 20; d++) {
      Document doc = new Document();
      doc.add(new IntField("id_int", d, Field.Store.NO));
      doc.add(newStringField("body", "body", Field.Store.NO));
      writer.addDocument(doc);
    }

    writer.forceMerge(1);
    BytesRef term0 = new BytesRef();
    NumericUtils.intToPrefixCoded(0, 0, term0);
    writer.deleteDocuments(new Term("id_int", term0));
    writer.close();

    IndexReader reader = DirectoryReader.open(dir);
    IndexSearcher search = newSearcher(reader);
    assertTrue(reader.hasDeletions());

    ScoreDoc[] result;
    Query q = new TermQuery(new Term("body", "body"));

    result =
        search.search(q, FieldCacheRangeFilter.newIntRange("id_int", -20, 20, T, T), 100).scoreDocs;
    assertEquals("find all", 40, result.length);

    result =
        search.search(q, FieldCacheRangeFilter.newIntRange("id_int", 0, 20, T, T), 100).scoreDocs;
    assertEquals("find all", 20, result.length);

    result =
        search.search(q, FieldCacheRangeFilter.newIntRange("id_int", -20, 0, T, T), 100).scoreDocs;
    assertEquals("find all", 20, result.length);

    result =
        search.search(q, FieldCacheRangeFilter.newIntRange("id_int", 10, 20, T, T), 100).scoreDocs;
    assertEquals("find all", 11, result.length);

    result =
        search.search(q, FieldCacheRangeFilter.newIntRange("id_int", -20, -10, T, T), 100)
            .scoreDocs;
    assertEquals("find all", 11, result.length);
    reader.close();
    dir.close();
  }
Ejemplo n.º 21
0
 private String convertNumber(Number number) {
   if (Integer.class.isInstance(number)) {
     return NumericUtils.intToPrefixCoded(number.intValue());
   } else if (Double.class.isInstance(number)) {
     return NumericUtils.doubleToPrefixCoded(number.doubleValue());
   } else if (Long.class.isInstance(number)) {
     return NumericUtils.longToPrefixCoded(number.longValue());
   } else if (Float.class.isInstance(number)) {
     return NumericUtils.floatToPrefixCoded(number.floatValue());
   } else if (Byte.class.isInstance(number)) {
     return NumericUtils.intToPrefixCoded(number.intValue());
   } else if (Short.class.isInstance(number)) {
     return NumericUtils.intToPrefixCoded(number.intValue());
   } else if (BigDecimal.class.isInstance(number)) {
     return NumericUtils.doubleToPrefixCoded(number.doubleValue());
   } else if (BigInteger.class.isInstance(number)) {
     return NumericUtils.longToPrefixCoded(number.longValue());
   } else {
     throw new IllegalArgumentException("Unsupported numeric type " + number.getClass().getName());
   }
 }
  /** Test that the index can be rebuilt. */
  @Test
  public void testRebuild() throws CorruptIndexException, IOException, InterruptedException {

    // Assert no job is running
    assertFalse(indexer.isJobRunning());
    assertEquals("None", indexer.getCurrentJobLabel());
    assertEquals(-1L, indexer.getCurrentJobProgress());
    assertEquals(-1L, indexer.getCurrentJobTotal());

    // Kickoff rebuilding the index
    indexer.rebuild(false);

    // Check that a job is running
    assertTrue(indexer.isJobRunning());
    assertEquals("Rebuild Index", indexer.getCurrentJobLabel());
    assertTrue(indexer.getCurrentJobProgress() >= 0);
    assertTrue(indexer.getCurrentJobTotal() >= 100);

    // Wait for the current job to complete
    for (int i = 0; i < 1000; i++) {
      Thread.sleep(10);
      if (!indexer.isJobRunning()) break;
    }
    assertFalse(indexer.isJobRunning());

    // Check that the first submission is present in the index.
    Submission sub = subRepo.findAllSubmissions().next();
    IndexReader reader = IndexReader.open(indexer.index);
    IndexSearcher searcher = new IndexSearcher(reader);
    Query query = new TermQuery(new Term("subId", NumericUtils.longToPrefixCoded(sub.getId())));

    TopDocs topDocs = searcher.search(query, 1);
    assertNotNull(topDocs);
    assertNotNull(topDocs.scoreDocs);
    assertTrue(topDocs.scoreDocs.length > 0);
    Document doc = searcher.doc(topDocs.scoreDocs[0].doc);
    assertNotNull(doc);
    assertEquals(String.valueOf(sub.getId()), doc.get("subId"));

    searcher.close();
  }
Ejemplo n.º 23
0
 @Override
 public String indexedToReadable(String indexedForm) {
   switch (type) {
     case INTEGER:
       return Integer.toString(NumericUtils.prefixCodedToInt(indexedForm));
     case FLOAT:
       return Float.toString(
           NumericUtils.sortableIntToFloat(NumericUtils.prefixCodedToInt(indexedForm)));
     case LONG:
       return Long.toString(NumericUtils.prefixCodedToLong(indexedForm));
     case DOUBLE:
       return Double.toString(
           NumericUtils.sortableLongToDouble(NumericUtils.prefixCodedToLong(indexedForm)));
     case DATE:
       return dateField.toExternal(new Date(NumericUtils.prefixCodedToLong(indexedForm)));
     default:
       throw new SolrException(
           SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + type);
   }
 }
Ejemplo n.º 24
0
 @Override
 public String readableToIndexed(String val) {
   switch (type) {
     case INTEGER:
       return NumericUtils.intToPrefixCoded(Integer.parseInt(val));
     case FLOAT:
       return NumericUtils.intToPrefixCoded(
           NumericUtils.floatToSortableInt(Float.parseFloat(val)));
     case LONG:
       return NumericUtils.longToPrefixCoded(Long.parseLong(val));
     case DOUBLE:
       return NumericUtils.longToPrefixCoded(
           NumericUtils.doubleToSortableLong(Double.parseDouble(val)));
     case DATE:
       return NumericUtils.longToPrefixCoded(dateField.parseMath(null, val).getTime());
     default:
       throw new SolrException(
           SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + type);
   }
 }
Ejemplo n.º 25
0
  /**
   * Returns a number of random albums, using ID3 tag.
   *
   * @param count Number of albums to return.
   * @param musicFolders Only return albums from these folders.
   * @return List of random albums.
   */
  public List<Album> getRandomAlbumsId3(int count, List<MusicFolder> musicFolders) {
    List<Album> result = new ArrayList<Album>();

    IndexReader reader = null;
    try {
      reader = createIndexReader(ALBUM_ID3);
      Searcher searcher = new IndexSearcher(reader);

      List<SpanTermQuery> musicFolderQueries = new ArrayList<SpanTermQuery>();
      for (MusicFolder musicFolder : musicFolders) {
        musicFolderQueries.add(
            new SpanTermQuery(
                new Term(FIELD_FOLDER_ID, NumericUtils.intToPrefixCoded(musicFolder.getId()))));
      }
      Query query =
          new SpanOrQuery(musicFolderQueries.toArray(new SpanQuery[musicFolderQueries.size()]));
      TopDocs topDocs = searcher.search(query, null, Integer.MAX_VALUE);
      List<ScoreDoc> scoreDocs = Lists.newArrayList(topDocs.scoreDocs);
      Random random = new Random(System.currentTimeMillis());

      while (!scoreDocs.isEmpty() && result.size() < count) {
        int index = random.nextInt(scoreDocs.size());
        Document doc = searcher.doc(scoreDocs.remove(index).doc);
        int id = Integer.valueOf(doc.get(FIELD_ID));
        try {
          addIfNotNull(albumDao.getAlbum(id), result);
        } catch (Exception x) {
          LOG.warn("Failed to get album file " + id, x);
        }
      }

    } catch (Throwable x) {
      LOG.error("Failed to search for random albums.", x);
    } finally {
      FileUtil.closeQuietly(reader);
    }
    return result;
  }
Ejemplo n.º 26
0
 @Override
 public BytesRef indexedValueForSearch(Object value) {
   BytesRefBuilder bytesRef = new BytesRefBuilder();
   NumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match
   return bytesRef.get();
 }
 /**
  * Initializes the token stream with the supplied <code>float</code> value.
  *
  * @param value the value, for which this TokenStream should enumerate tokens.
  * @return this instance, because of this you can use it the following way: <code>
  *     new Field(name, new NumericTokenStream(precisionStep).setFloatValue(value))</code>
  */
 public NumericTokenStream setFloatValue(final float value) {
   numericAtt.init(
       NumericUtils.floatToSortableInt(value), valSize = 32, precisionStep, -precisionStep);
   return this;
 }
 /**
  * Initializes the token stream with the supplied <code>double</code> value.
  *
  * @param value the value, for which this TokenStream should enumerate tokens.
  * @return this instance, because of this you can use it the following way: <code>
  *     new Field(name, new NumericTokenStream(precisionStep).setDoubleValue(value))</code>
  */
 public NumericTokenStream setDoubleValue(final double value) {
   numericAtt.init(
       NumericUtils.doubleToSortableLong(value), valSize = 64, precisionStep, -precisionStep);
   return this;
 }
Ejemplo n.º 29
0
 /** {@inheritDoc} */
 @Override
 public Field sortedField(String name, Float value) {
   int sortable = NumericUtils.floatToSortableInt(value);
   return new NumericDocValuesField(name, sortable);
 }
 @Override
 public BytesRef indexedValueForSearch(Object value) {
   BytesRef bytesRef = new BytesRef();
   NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match
   return bytesRef;
 }