Exemplo n.º 1
0
 public static long count(IndexSearcher searcher, Query query) throws IOException {
   TotalHitCountCollector countCollector = new TotalHitCountCollector();
   // we don't need scores, so wrap it in a constant score query
   if (!(query instanceof ConstantScoreQuery)) {
     query = new ConstantScoreQuery(query);
   }
   searcher.search(query, countCollector);
   return countCollector.getTotalHits();
 }
Exemplo n.º 2
0
    @Override
    public TopDocs topDocs(SearchContext context, FetchSubPhase.HitContext hitContext)
        throws IOException {
      final String field;
      final String term;
      if (isParentHit(hitContext.hit())) {
        field = ParentFieldMapper.NAME;
        term = Uid.createUid(hitContext.hit().type(), hitContext.hit().id());
      } else if (isChildHit(hitContext.hit())) {
        field = UidFieldMapper.NAME;
        SearchHitField parentField = hitContext.hit().field(ParentFieldMapper.NAME);
        if (parentField != null) {
          term = parentField.getValue();
        } else {
          SingleFieldsVisitor fieldsVisitor = new SingleFieldsVisitor(ParentFieldMapper.NAME);
          hitContext.reader().document(hitContext.docId(), fieldsVisitor);
          if (fieldsVisitor.fields().isEmpty()) {
            return Lucene.EMPTY_TOP_DOCS;
          }
          term = (String) fieldsVisitor.fields().get(ParentFieldMapper.NAME).get(0);
        }
      } else {
        return Lucene.EMPTY_TOP_DOCS;
      }
      // Only include docs that have the current hit as parent
      Filter filter = new TermFilter(new Term(field, term));
      // Only include docs that have this inner hits type
      Filter typeFilter = documentMapper.typeFilter();

      if (size() == 0) {
        TotalHitCountCollector collector = new TotalHitCountCollector();
        context
            .searcher()
            .search(
                new XFilteredQuery(query, new AndFilter(Arrays.asList(filter, typeFilter))),
                collector);
        return new TopDocs(collector.getTotalHits(), Lucene.EMPTY_SCORE_DOCS, 0);
      } else {
        int topN = from() + size();
        TopDocsCollector topDocsCollector;
        if (sort() != null) {
          topDocsCollector =
              TopFieldCollector.create(sort(), topN, true, trackScores(), trackScores(), false);
        } else {
          topDocsCollector = TopScoreDocCollector.create(topN, false);
        }
        context
            .searcher()
            .search(
                new XFilteredQuery(query, new AndFilter(Arrays.asList(filter, typeFilter))),
                topDocsCollector);
        return topDocsCollector.topDocs(from(), size());
      }
    }
  @Test
  public void testNestedChildrenFilter() throws Exception {
    int numParentDocs = scaledRandomIntBetween(0, 32);
    int maxChildDocsPerParent = scaledRandomIntBetween(8, 16);

    Directory dir = newDirectory();
    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
    for (int i = 0; i < numParentDocs; i++) {
      int numChildDocs = scaledRandomIntBetween(0, maxChildDocsPerParent);
      List<Document> docs = new ArrayList<>(numChildDocs + 1);
      for (int j = 0; j < numChildDocs; j++) {
        Document childDoc = new Document();
        childDoc.add(new StringField("type", "child", Field.Store.NO));
        docs.add(childDoc);
      }

      Document parenDoc = new Document();
      parenDoc.add(new StringField("type", "parent", Field.Store.NO));
      parenDoc.add(new IntField("num_child_docs", numChildDocs, Field.Store.YES));
      docs.add(parenDoc);
      writer.addDocuments(docs);
    }

    IndexReader reader = writer.getReader();
    writer.close();

    IndexSearcher searcher = new IndexSearcher(reader);
    FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext();
    BitDocIdSetFilter parentFilter =
        new BitDocIdSetCachingWrapperFilter(
            new QueryWrapperFilter(new TermQuery(new Term("type", "parent"))));
    Filter childFilter = new QueryWrapperFilter(new TermQuery(new Term("type", "child")));
    int checkedParents = 0;
    for (LeafReaderContext leaf : reader.leaves()) {
      DocIdSetIterator parents = parentFilter.getDocIdSet(leaf).iterator();
      for (int parentDoc = parents.nextDoc();
          parentDoc != DocIdSetIterator.NO_MORE_DOCS;
          parentDoc = parents.nextDoc()) {
        int expectedChildDocs =
            leaf.reader().document(parentDoc).getField("num_child_docs").numericValue().intValue();
        hitContext.reset(null, leaf, parentDoc, searcher);
        NestedChildrenFilter nestedChildrenFilter =
            new NestedChildrenFilter(parentFilter, childFilter, hitContext);
        TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
        searcher.search(new ConstantScoreQuery(nestedChildrenFilter), totalHitCountCollector);
        assertThat(totalHitCountCollector.getTotalHits(), equalTo(expectedChildDocs));
        checkedParents++;
      }
    }
    assertThat(checkedParents, equalTo(numParentDocs));
    reader.close();
    dir.close();
  }
Exemplo n.º 4
0
    @Override
    public TopDocs topDocs(SearchContext context, FetchSubPhase.HitContext hitContext)
        throws IOException {
      Filter rawParentFilter;
      if (parentObjectMapper == null) {
        rawParentFilter = NonNestedDocsFilter.INSTANCE;
      } else {
        rawParentFilter = parentObjectMapper.nestedTypeFilter();
      }
      FixedBitSetFilter parentFilter =
          context.fixedBitSetFilterCache().getFixedBitSetFilter(rawParentFilter);
      Filter childFilter = context.filterCache().cache(childObjectMapper.nestedTypeFilter());
      Query q =
          new XFilteredQuery(
              query, new NestedChildrenFilter(parentFilter, childFilter, hitContext));

      if (size() == 0) {
        TotalHitCountCollector collector = new TotalHitCountCollector();
        context.searcher().search(q, collector);
        return new TopDocs(collector.getTotalHits(), Lucene.EMPTY_SCORE_DOCS, 0);
      } else {
        int topN = from() + size();
        TopDocsCollector topDocsCollector;
        if (sort() != null) {
          try {
            topDocsCollector =
                TopFieldCollector.create(sort(), topN, true, trackScores(), trackScores(), true);
          } catch (IOException e) {
            throw ExceptionsHelper.convertToElastic(e);
          }
        } else {
          topDocsCollector = TopScoreDocCollector.create(topN, true);
        }
        context.searcher().search(q, topDocsCollector);
        return topDocsCollector.topDocs(from(), size());
      }
    }
  public void evaluate(
      final EvolutionState state,
      final Individual ind,
      final int subpopulation,
      final int threadnum) {

    if (ind.evaluated) return;

    GAFit fitness = (GAFit) ind.fitness;

    IntegerVectorIndividual intVectorIndividual = (IntegerVectorIndividual) ind;

    query = new BooleanQuery(true);
    for (int i = 0; i < (intVectorIndividual.genome.length - 1); i = i + 2) {

      if (intVectorIndividual.genome[i] < 0
          || intVectorIndividual.genome[i] >= wordArrayPos.length
          || intVectorIndividual.genome[i] >= wordArrayNeg.length
          || intVectorIndividual.genome[i + 1] < 0
          || intVectorIndividual.genome[i + 1] >= wordArrayPos.length
          || intVectorIndividual.genome[i + 1] >= wordArrayNeg.length) continue;

      int wordInd = intVectorIndividual.genome[i + 1];

      switch (intVectorIndividual.genome[i]) {
        case 0:
          query.add(
              new TermQuery(new Term(IndexInfoStaticG.FIELD_CONTENTS, wordArrayPos[wordInd])),
              BooleanClause.Occur.SHOULD);
          break;
        case 1:
          query.add(
              new TermQuery(new Term(IndexInfoStaticG.FIELD_CONTENTS, wordArrayPos[wordInd])),
              BooleanClause.Occur.MUST);
          break;
        case 2:
          query.add(
              new TermQuery(new Term(IndexInfoStaticG.FIELD_CONTENTS, wordArrayNeg[wordInd])),
              BooleanClause.Occur.MUST_NOT);
          break;
        default:
          query.add(
              new TermQuery(new Term(IndexInfoStaticG.FIELD_CONTENTS, wordArrayPos[wordInd])),
              BooleanClause.Occur.SHOULD);
      }
    }

    try {
      TotalHitCountCollector collector = new TotalHitCountCollector();
      searcher.search(query, IndexInfoStaticG.catTrainF, collector);
      final int positiveMatch = collector.getTotalHits();

      collector = new TotalHitCountCollector();
      searcher.search(query, IndexInfoStaticG.othersTrainF, collector);
      final int negativeMatch = collector.getTotalHits();

      F1train =
          ClassifyQuery.f1(positiveMatch, negativeMatch, IndexInfoStaticG.totalTrainDocsInCat);

      fitness.setTrainValues(positiveMatch, negativeMatch);
      fitness.setF1Train(F1train);
      fitness.setQuery(query);

    } catch (IOException e) {

      e.printStackTrace();
    }

    float rawfitness = F1train;

    ((SimpleFitness) intVectorIndividual.fitness).setFitness(state, rawfitness, false);

    ind.evaluated = true;
  }