예제 #1
0
  public Map<String, Long> countByField(IndexField indexField, FilterBuilder filter) {
    Map<String, Long> counts = new HashMap<>();

    SearchRequestBuilder request =
        client
            .prepareSearch(this.getIndexName())
            .setTypes(this.getIndexType())
            .setQuery(QueryBuilders.filteredQuery(QueryBuilders.matchAllQuery(), filter))
            .setSize(0)
            .addAggregation(
                AggregationBuilders.terms(indexField.field())
                    .field(indexField.field())
                    .order(Terms.Order.count(false))
                    .size(Integer.MAX_VALUE)
                    .minDocCount(0));

    SearchResponse response = request.get();

    Terms values = response.getAggregations().get(indexField.field());

    for (Terms.Bucket value : values.getBuckets()) {
      counts.put(value.getKey(), value.getDocCount());
    }
    return counts;
  }
  public void run() {
    Client client = null;
    TransportClient transportClient = null;
    try {
      transportClient = new TransportClient();
      client =
          transportClient.addTransportAddress(new InetSocketTransportAddress("192.168.1.40", 9300));

      SearchResponse response =
          client
              .prepareSearch("tms-allflat")
              .setTypes("personal")
              .setQuery(QueryBuilders.matchAllQuery())
              .addAggregation(
                  AggregationBuilders.terms("aggs1")
                      .field("skill_1")
                      .size(20)
                      .order(Terms.Order.count(false)))
              .execute()
              .actionGet();

      Terms terms = response.getAggregations().get("aggs1");
      terms
          .getBuckets()
          .stream()
          .forEach(s -> System.out.println(s.getKeyAsText() + "(" + s.getDocCount() + ")"));

    } finally {
      transportClient.close();
      client.close();
    }
  }
  @Test
  public void givenAnalyzedQuery_whenMakeAggregationOnTermCount_thenEachTokenCountsSeparately() {
    TermsBuilder aggregation = AggregationBuilders.terms("top_tags").field("title");
    SearchResponse response =
        client
            .prepareSearch("blog")
            .setTypes("article")
            .addAggregation(aggregation)
            .execute()
            .actionGet();

    Map<String, Aggregation> results = response.getAggregations().asMap();
    StringTerms topTags = (StringTerms) results.get("top_tags");

    List<String> keys = topTags.getBuckets().stream().map(b -> b.getKey()).collect(toList());
    Collections.sort(keys);
    assertEquals(
        asList(
            "about",
            "article",
            "data",
            "elasticsearch",
            "engines",
            "search",
            "second",
            "spring",
            "tutorial"),
        keys);
  }
예제 #4
0
 private AggregationBuilder<?> termsAgg(MethodField field) throws SqlParseException {
   String aggName = gettAggNameFromParamsOrAlias(field);
   TermsBuilder terms = AggregationBuilders.terms(aggName);
   String value = null;
   for (KVValue kv : field.getParams()) {
     value = kv.value.toString();
     switch (kv.key.toLowerCase()) {
       case "field":
         terms.field(value);
         break;
       case "size":
         terms.size(Integer.parseInt(value));
         break;
       case "shard_size":
         terms.shardSize(Integer.parseInt(value));
         break;
       case "min_doc_count":
         terms.minDocCount(Integer.parseInt(value));
         break;
       case "alias":
       case "nested":
       case "reverse_nested":
         break;
       default:
         throw new SqlParseException("terms aggregation err or not define field " + kv.toString());
     }
   }
   return terms;
 }
 /*
  * 统计每种兴趣下职工的平均年龄
  * 聚合也允许分级汇总
  */
 @Test
 public void subAggregationTest() {
   Client client = ClientTemplate.getClient();
   SearchResponse response =
       client
           .prepareSearch("fccs")
           .setTypes("employee")
           .addAggregation(
               AggregationBuilders.terms("by_interests")
                   .field("interests")
                   .subAggregation(AggregationBuilders.avg("avg_age").field("age")))
           .get();
   Terms terms = response.getAggregations().get("by_interests");
   List<Bucket> buckets = terms.getBuckets();
   for (Bucket bucket : buckets) {
     Avg avg = bucket.getAggregations().get("avg_age");
     System.out.println(
         "key:"
             + bucket.getKey()
             + " doc_count:"
             + bucket.getDocCount()
             + " avg_age:"
             + avg.getValue());
   }
   client.close();
 }
 public void testDisabledAggsDynamicScripts() {
   // dynamic scripts don't need to be enabled for an indexed script to be indexed and later on
   // executed
   assertAcked(
       client()
           .admin()
           .cluster()
           .preparePutStoredScript()
           .setScriptLang(GroovyScriptEngineService.NAME)
           .setId("script1")
           .setSource(new BytesArray("{\"script\":\"2\"}")));
   client().prepareIndex("test", "scriptTest", "1").setSource("{\"theField\":\"foo\"}").get();
   refresh();
   SearchResponse searchResponse =
       client()
           .prepareSearch("test")
           .setSource(
               new SearchSourceBuilder()
                   .aggregation(
                       AggregationBuilders.terms("test")
                           .script(new Script("script1", ScriptType.STORED, null, null))))
           .get();
   assertHitCount(searchResponse, 1);
   assertThat(searchResponse.getAggregations().get("test"), notNullValue());
 }
  void assertBasicAggregationWorks(String indexName) {
    // histogram on a long
    SearchResponse searchRsp =
        client()
            .prepareSearch(indexName)
            .addAggregation(AggregationBuilders.histogram("histo").field("long_sort").interval(10))
            .get();
    ElasticsearchAssertions.assertSearchResponse(searchRsp);
    Histogram histo = searchRsp.getAggregations().get("histo");
    assertNotNull(histo);
    long totalCount = 0;
    for (Histogram.Bucket bucket : histo.getBuckets()) {
      totalCount += bucket.getDocCount();
    }
    assertEquals(totalCount, searchRsp.getHits().getTotalHits());

    // terms on a boolean
    searchRsp =
        client()
            .prepareSearch(indexName)
            .addAggregation(AggregationBuilders.terms("bool_terms").field("bool"))
            .get();
    Terms terms = searchRsp.getAggregations().get("bool_terms");
    totalCount = 0;
    for (Terms.Bucket bucket : terms.getBuckets()) {
      totalCount += bucket.getDocCount();
    }
    assertEquals(totalCount, searchRsp.getHits().getTotalHits());
  }
  @Test
  public void testDynamicTemplateCopyTo() throws Exception {
    assertAcked(
        client()
            .admin()
            .indices()
            .prepareCreate("test-idx")
            .addMapping("doc", createDynamicTemplateMapping()));

    int recordCount = between(1, 200);

    for (int i = 0; i < recordCount * 2; i++) {
      client()
          .prepareIndex("test-idx", "doc", Integer.toString(i))
          .setSource("test_field", "test " + i, "even", i % 2 == 0)
          .get();
    }
    client().admin().indices().prepareRefresh("test-idx").execute().actionGet();

    SubAggCollectionMode aggCollectionMode = randomFrom(SubAggCollectionMode.values());

    SearchResponse response =
        client()
            .prepareSearch("test-idx")
            .setQuery(QueryBuilders.termQuery("even", true))
            .addAggregation(
                AggregationBuilders.terms("test")
                    .field("test_field")
                    .size(recordCount * 2)
                    .collectMode(aggCollectionMode))
            .addAggregation(
                AggregationBuilders.terms("test_raw")
                    .field("test_field_raw")
                    .size(recordCount * 2)
                    .collectMode(aggCollectionMode))
            .execute()
            .actionGet();

    assertThat(response.getHits().totalHits(), equalTo((long) recordCount));

    assertThat(
        ((Terms) response.getAggregations().get("test")).getBuckets().size(),
        equalTo(recordCount + 1));
    assertThat(
        ((Terms) response.getAggregations().get("test_raw")).getBuckets().size(),
        equalTo(recordCount));
  }
예제 #9
0
  public Map<QualityProfileKey, Multimap<String, FacetValue>> getStatsByProfileKey(
      List<QualityProfileKey> keys) {

    String[] stringKeys = new String[keys.size()];
    for (int i = 0; i < keys.size(); i++) {
      stringKeys[i] = keys.get(i).toString();
    }

    SearchResponse response =
        getClient()
            .prepareSearch(this.getIndexName())
            .setQuery(
                QueryBuilders.filteredQuery(
                    QueryBuilders.matchAllQuery(),
                    FilterBuilders.termsFilter(
                        ActiveRuleNormalizer.ActiveRuleField.PROFILE_KEY.field(), stringKeys)))
            .addAggregation(
                AggregationBuilders.terms(ActiveRuleNormalizer.ActiveRuleField.PROFILE_KEY.field())
                    .field(ActiveRuleNormalizer.ActiveRuleField.PROFILE_KEY.field())
                    .subAggregation(
                        AggregationBuilders.terms(
                                ActiveRuleNormalizer.ActiveRuleField.INHERITANCE.field())
                            .field(ActiveRuleNormalizer.ActiveRuleField.INHERITANCE.field()))
                    .subAggregation(
                        AggregationBuilders.terms(
                                ActiveRuleNormalizer.ActiveRuleField.SEVERITY.field())
                            .field(ActiveRuleNormalizer.ActiveRuleField.SEVERITY.field())))
            .setSize(0)
            .setTypes(this.getIndexType())
            .get();

    Map<QualityProfileKey, Multimap<String, FacetValue>> stats =
        new HashMap<QualityProfileKey, Multimap<String, FacetValue>>();
    Aggregation aggregation =
        response.getAggregations().get(ActiveRuleNormalizer.ActiveRuleField.PROFILE_KEY.field());
    for (Terms.Bucket value : ((Terms) aggregation).getBuckets()) {
      stats.put(
          QualityProfileKey.parse(value.getKey()),
          this.processAggregations(value.getAggregations()));
    }

    return stats;
  }
 /*
  * 所有姓氏为"Smith"的员工的最大的共同点(兴趣爱好)是什么
  * 聚合变成只包含和查询语句想匹配的文档
  */
 @Test
 public void queryAggregationTest() {
   Client client = ClientTemplate.getClient();
   SearchResponse response =
       client
           .prepareSearch("fccs")
           .setTypes("employee")
           .setQuery(QueryBuilders.matchQuery("lastName", "Smith"))
           .addAggregation(AggregationBuilders.terms("by_interests").field("interests"))
           .get();
   Terms terms = response.getAggregations().get("by_interests");
   for (Terms.Bucket bucket : terms.getBuckets()) {
     System.out.println("key:" + bucket.getKey() + " doc_count:" + bucket.getDocCount());
   }
   client.close();
 }
예제 #11
0
  /**
   * 分组查的聚合函数
   *
   * @param field
   * @return
   * @throws SqlParseException
   */
  public AggregationBuilder<?> makeGroupAgg(Field field) throws SqlParseException {
    if (field instanceof MethodField) {

      MethodField methodField = (MethodField) field;
      if (methodField.getName().equals("filter")) {
        Map<String, Object> paramsAsMap = methodField.getParamsAsMap();
        Where where = (Where) paramsAsMap.get("where");
        return AggregationBuilders.filter(paramsAsMap.get("alias").toString())
            .filter(FilterMaker.explan(where));
      }
      return makeRangeGroup(methodField);
    } else {
      TermsBuilder termsBuilder = AggregationBuilders.terms(field.getName()).field(field.getName());
      groupMap.put(field.getName(), new KVValue("KEY", termsBuilder));
      return termsBuilder;
    }
  }
  @Test
  public void
      givenNotAnalyzedQuery_whenMakeAggregationOnTermCount_thenEachTermCountsIndividually() {
    TermsBuilder aggregation =
        AggregationBuilders.terms("top_tags")
            .field("tags")
            .order(Terms.Order.aggregation("_count", false));
    SearchResponse response =
        client
            .prepareSearch("blog")
            .setTypes("article")
            .addAggregation(aggregation)
            .execute()
            .actionGet();

    Map<String, Aggregation> results = response.getAggregations().asMap();
    StringTerms topTags = (StringTerms) results.get("top_tags");

    List<String> keys = topTags.getBuckets().stream().map(b -> b.getKey()).collect(toList());
    assertEquals(asList("elasticsearch", "spring data", "search engines", "tutorial"), keys);
  }
  @Override
  public void execute(Client esClient, String index, boolean verbose) {
    FilterBuilder fb =
        new OrFilterBuilder(
            new TermFilterBuilder("eventName", "TopBrowser"),
            new TermFilterBuilder("data.playerEvent", "str-start"));

    AbstractAggregationBuilder aggregations =
        AggregationBuilders.terms("by_browser")
            .field("data.browser")
            .size(50)
            .subAggregation(AggregationBuilders.sum("sum").field("counter"))
            .order(Terms.Order.aggregation("sum", false));

    if (index.equals("")) {
      this.searchResponse =
          esClient
              .prepareSearch()
              .setQuery(
                  org.elasticsearch.index.query.QueryBuilders.filteredQuery(
                      new MatchAllQueryBuilder(), fb))
              .setSize(50)
              .setPostFilter(fb)
              .addAggregation(aggregations)
              .get();
    } else {
      this.searchResponse =
          esClient
              .prepareSearch(index)
              .setQuery(
                  org.elasticsearch.index.query.QueryBuilders.filteredQuery(
                      new MatchAllQueryBuilder(), fb))
              .setSize(50)
              .setPostFilter(fb)
              .addAggregation(aggregations)
              .get();
    }
    fileLog.info(String.valueOf(this.searchResponse.getTookInMillis()));
    if (verbose) printResult();
  }
  public void testStringSpecialValueVariable() throws Exception {
    // i.e. expression script for term aggregations, which is not allowed
    createIndex("test");
    ensureGreen("test");
    indexRandom(
        true,
        client().prepareIndex("test", "doc", "1").setSource("text", "hello"),
        client().prepareIndex("test", "doc", "2").setSource("text", "goodbye"),
        client().prepareIndex("test", "doc", "3").setSource("text", "hello"));

    SearchRequestBuilder req = client().prepareSearch().setIndices("test");
    req.setQuery(QueryBuilders.matchAllQuery())
        .addAggregation(
            AggregationBuilders.terms("term_agg")
                .field("text")
                .script(
                    new Script(
                        "_value", ScriptType.INLINE, ExpressionScriptEngineService.NAME, null)));

    String message;
    try {
      // shards that don't have docs with the "text" field will not fail,
      // so we may or may not get a total failure
      SearchResponse rsp = req.get();
      assertThat(
          rsp.getShardFailures().length,
          greaterThan(0)); // at least the shards containing the docs should have failed
      message = rsp.getShardFailures()[0].reason();
    } catch (SearchPhaseExecutionException e) {
      message = e.toString();
    }
    assertThat(
        message + "should have contained ExpressionScriptExecutionException",
        message.contains("ExpressionScriptExecutionException"),
        equalTo(true));
    assertThat(
        message + "should have contained text variable error",
        message.contains("text variable"),
        equalTo(true));
  }
예제 #15
0
 @Override
 public Map<String, Integer> countTermsByField(SearchCriteria searchCriteria, String field) {
   ElasticSearchCriteria criteria = (ElasticSearchCriteria) searchCriteria;
   if (criteria == null) return null;
   SearchRequestBuilder srb = criteria2builder(criteria);
   srb.setFrom(0);
   srb.setSize(0);
   TermsBuilder tb = AggregationBuilders.terms(field);
   tb.field(field);
   srb.addAggregation(tb);
   try {
     SearchResponse response = srb.execute().get();
     StringTerms aggr = response.getAggregations().get(field);
     Map<String, Integer> result = new LinkedHashMap<>();
     for (Terms.Bucket bucket : aggr.getBuckets()) {
       result.put(bucket.getKey(), (int) bucket.getDocCount());
     }
     return result;
   } catch (Exception e) {
     logger.error(e.getMessage(), e);
   }
   return Collections.emptyMap();
 }
예제 #16
0
  public static void parseOutputAggregation(
      AdvancedQueryPojo.QueryOutputPojo.AggregationOutputPojo aggregation,
      AliasLookupTable aliasLookup,
      boolean geoLowAccuracy,
      String[] entTypeFilterStrings,
      String[] assocVerbFilterStrings,
      SearchRequestBuilder searchSettings,
      BoolFilterBuilder parentFilterObj,
      String[] communityIdStrs) {
    // 1.] Go through aggregation list

    // 1.1] Apply "simple specifications" if necessary

    // Geo

    if ((null != aggregation)
        && (null != aggregation.geoNumReturn)
        && (aggregation.geoNumReturn > 0)) {
      CrossVersionFacetBuilder.TermsFacetBuilder fb =
          CrossVersionFacetBuilders.termsFacet("geo")
              .field(DocumentPojo.locs_)
              .size(aggregation.geoNumReturn);
      // Gross raw handling for facets
      if (null != parentFilterObj) {
        fb = fb.facetFilter(parentFilterObj);
      }
      searchSettings.addFacet(fb);
    } // (TESTED)

    // Temporal

    if ((null != aggregation) && (null != aggregation.timesInterval)) {
      if (aggregation.timesInterval.contains("m")) {
        aggregation.timesInterval = "month";
      }
      CrossVersionFacetBuilder.DateHistogramFacetBuilder fb =
          CrossVersionFacetBuilders.dateHistogramFacet("time")
              .field(DocumentPojo.publishedDate_)
              .interval(aggregation.timesInterval);
      // Gross raw handling for facets
      if (null != parentFilterObj) {
        fb = fb.facetFilter(parentFilterObj);
      }
      searchSettings.addFacet(fb);

      // TODO (INF-2688): if using certain types of moments then don't want this?
    } // (TESTED)

    // Temporal Moments

    if ((null != aggregation) && (null != aggregation.moments)) {
      if (null == aggregation.moments.timesInterval) {
        if (null != aggregation.timesInterval) {
          aggregation.moments.timesInterval = aggregation.timesInterval;
        } else {
          aggregation.moments.timesInterval = "m";
        }
      }
      if (aggregation.moments.timesInterval.contains("m")) {
        aggregation.moments.timesInterval = "month";
      }

      // TODO (INF-2688): Other cross filter type things
      if (!geoLowAccuracy
          && (null != aggregation.moments.geoNumReturn)
          && (aggregation.moments.geoNumReturn > 0)) {
        DateHistogramBuilder timeAgg =
            AggregationBuilders.dateHistogram("moments")
                .field(DocumentPojo.publishedDate_)
                .interval(new Interval(aggregation.moments.timesInterval));
        TermsBuilder geoAgg =
            AggregationBuilders.terms("geo")
                .field(DocumentPojo.locs_)
                .size(aggregation.moments.geoNumReturn);
        timeAgg.subAggregation(geoAgg);
        searchSettings.addAggregation(timeAgg);
      }

      // TODO (CORE-89)
      if (null != aggregation.moments.associationsNumReturn
          && aggregation.moments.associationsNumReturn >= 0) {
        // TODO need to check if indexes mapping use doc.associations.assoc_index == docValue
        // fail out or don't include those communities if they don't
        if (validateAssociationMapping(communityIdStrs)) {
          DateHistogramBuilder assocTimeAgg =
              AggregationBuilders.dateHistogram("moments.assoc")
                  .field(DocumentPojo.publishedDate_)
                  .interval(new Interval(aggregation.moments.timesInterval));
          TermsBuilder assocAgg =
              AggregationBuilders.terms("assoc")
                  .field(AssociationPojo.assoc_index_)
                  .size(aggregation.moments.associationsNumReturn);
          NestedBuilder nested =
              AggregationBuilders.nested("moments.assoc.nested")
                  .path(DocumentPojo.associations_)
                  .subAggregation(assocAgg);
          assocTimeAgg.subAggregation(nested);
          searchSettings.addAggregation(assocTimeAgg);
        }
      }

      if (null != aggregation.moments.entityList) {
        for (String entIndex : aggregation.moments.entityList) {

          CrossVersionFacetBuilder.DateHistogramFacetBuilder fb =
              CrossVersionFacetBuilders.dateHistogramFacet("moments." + entIndex)
                  .field(DocumentPojo.publishedDate_)
                  .interval(aggregation.moments.timesInterval);

          EntityFeaturePojo alias = null;
          if (null != aliasLookup) {
            alias = aliasLookup.getAliases(entIndex);
          }
          if (null == alias) { // no alias
            fb =
                fb.facetFilter(
                    FilterBuilders.nestedFilter(
                        DocumentPojo.entities_,
                        FilterBuilders.termFilter(EntityPojo.index_, entIndex)));
          } // TESTED
          else {
            QueryFilterBuilder qfb = null;
            if ((null != alias.getSemanticLinks()) && !alias.getSemanticLinks().isEmpty()) {
              BoolQueryBuilder qb = QueryBuilders.boolQuery();
              for (String textAlias : alias.getSemanticLinks()) {
                qb =
                    qb.should(
                        CrossVersionQueryBuilders.matchPhraseQuery(
                            DocumentPojo.fullText_, textAlias));
              }
              qfb = FilterBuilders.queryFilter(qb);
            } // TESTED
            if (!alias.getAlias().isEmpty()) {
              NestedFilterBuilder nfb =
                  FilterBuilders.nestedFilter(
                      DocumentPojo.entities_,
                      FilterBuilders.termsFilter(EntityPojo.index_, entIndex, alias.getAlias()));
              if (null == qfb) {
                fb = fb.facetFilter(nfb);
              } // TESTED
              else {
                BoolFilterBuilder bfb = FilterBuilders.boolFilter().should(nfb).should(qfb);
                fb = fb.facetFilter(bfb);
              } // TESTED
            } else if (null != qfb) {
              fb = fb.facetFilter(qfb);
            } // TESTED
          } // TESTED

          // Gross raw handling for facets
          if (null != parentFilterObj) {
            fb = fb.facetFilter(parentFilterObj);
          }
          searchSettings.addFacet(fb);
        }
      } // (end list over entities)
    } // TESTED

    // Entities - due to problems with significance, handled on a document by document basis, see
    // Significance helper class

    // Associations (Events/Facts)

    // Association verb category filter
    StringBuilder verbCatRegex = null;
    StringBuilder entTypeRegex = null;

    if (((null != aggregation)
            && (null != aggregation.eventsNumReturn)
            && (aggregation.eventsNumReturn > 0))
        || ((null != aggregation)
            && (null != aggregation.factsNumReturn)
            && (aggregation.factsNumReturn > 0))) {
      if (null != entTypeFilterStrings) {
        boolean bNegative = false;
        if ('-' != entTypeFilterStrings[0].charAt(0)) { // positive filtering
          entTypeRegex = new StringBuilder("(?:");
        } else {
          bNegative = true;
          entTypeRegex = new StringBuilder("(?!");
          // (this is a lookahead but will be fine because of the .*/ in front of it)
        }
        for (String entType : entTypeFilterStrings) {
          if (bNegative && ('-' == entType.charAt(0))) {
            entType = entType.substring(1);
          }
          entType = entType.replace("|", "%7C");
          entTypeRegex.append(".*?/").append(Pattern.quote(entType.toLowerCase())).append('|');
          // (can't match greedily because of the 2nd instance of entity type)
        }
        entTypeRegex.setLength(entTypeRegex.length() - 1); // (remove trailing |)
        entTypeRegex.append(")");
        if (bNegative) {
          entTypeRegex.append("[^|]*"); // (now the actual verb, if a -ve lookahead)				
        }
      } // TESTED

      if (null != assocVerbFilterStrings) {
        boolean bNegative = false;
        if ('-' != assocVerbFilterStrings[0].charAt(0)) { // positive filtering
          verbCatRegex = new StringBuilder("\\|(?:");
        } else {
          bNegative = true;
          verbCatRegex = new StringBuilder("\\|(?!");
          // (this is a lookahead but will be fine because of the "^[^|]*\\" in front of it)

          // eg say I have -VERB then subject|VERB|object will match because if the
        }
        for (String assocVerbFilterString : assocVerbFilterStrings) {
          if (bNegative && ('-' == assocVerbFilterString.charAt(0))) {
            assocVerbFilterString = assocVerbFilterString.substring(1);
          }
          assocVerbFilterString = assocVerbFilterString.replace("|", "%7C");
          verbCatRegex.append(Pattern.quote(assocVerbFilterString)).append('|');
        }
        verbCatRegex.setLength(verbCatRegex.length() - 1); // (remove trailing |)
        verbCatRegex.append(")");
        if (bNegative) {
          verbCatRegex.append("[^|]*"); // (now the actual verb, if a -ve lookahead)
        }
      } // TESTED
    }
    // TESTED (all combinations of 1/2 people, 1/2 verbs)

    if ((null != aggregation)
        && (null != aggregation.eventsNumReturn)
        && (aggregation.eventsNumReturn > 0)) {
      StringBuffer regex = new StringBuffer("^Event\\|");
      if (null != entTypeRegex) {
        regex.append(entTypeRegex);
      } else {
        regex.append("[^|]*");
      }
      if (null != verbCatRegex) {
        regex.append(verbCatRegex);
      } else if (null != entTypeRegex) {
        regex.append("\\|[^|]*");
      } else {
        regex.append(".*");
      }
      if (null != entTypeRegex) {
        regex.append("\\|").append(entTypeRegex);
        regex.append(".*");
      } else {
        regex.append("\\|.*");
      }
      // DEBUG
      // System.out.println("REGEX==" + regex.toString());
      // TESTED (all combinations of 1/2 people, 1/2 verbs)

      CrossVersionFacetBuilder.TermsFacetBuilder fb =
          CrossVersionFacetBuilders.termsFacet("events")
              .field(AssociationPojo.assoc_index_)
              .size(aggregation.eventsNumReturn)
              .nested(DocumentPojo.associations_);
      fb.regex(regex.toString());

      // Gross raw handling for facets
      if (null != parentFilterObj) {
        fb = fb.facetFilter(parentFilterObj);
      }
      searchSettings.addFacet(fb);
    }
    if ((null != aggregation)
        && (null != aggregation.factsNumReturn)
        && (aggregation.factsNumReturn > 0)) {
      StringBuffer regex = new StringBuffer("^Fact\\|");
      if (null != entTypeRegex) {
        regex.append(entTypeRegex);
      } else {
        regex.append("[^|]*");
      }
      if (null != verbCatRegex) {
        regex.append(verbCatRegex);
      } else if (null != entTypeRegex) {
        regex.append("\\|[^|]*");
      } else {
        regex.append(".*");
      }
      if (null != entTypeRegex) {
        regex.append("\\|").append(entTypeRegex);
        regex.append(".*");
      } else {
        regex.append("\\|.*");
      }
      // DEBUG
      // System.out.println("REGEX==" + regex.toString());
      // TESTED (all combinations of 1/2 people, 1/2 verbs)

      CrossVersionFacetBuilder.TermsFacetBuilder fb =
          CrossVersionFacetBuilders.termsFacet("facts")
              .field(AssociationPojo.assoc_index_)
              .size(aggregation.factsNumReturn)
              .nested(DocumentPojo.associations_);
      fb.regex(regex.toString());

      // Gross raw handling for facets
      if (null != parentFilterObj) {
        fb = fb.facetFilter(parentFilterObj);
      }
      searchSettings.addFacet(fb);
    }

    // Source management/monitoring

    if ((null != aggregation)
        && (null != aggregation.sourceMetadata)
        && (aggregation.sourceMetadata > 0)) {
      CrossVersionFacetBuilder.TermsFacetBuilder fb =
          CrossVersionFacetBuilders.termsFacet("sourceTags")
              .field(DocumentPojo.tags_)
              .size(aggregation.sourceMetadata)
              .facetFilter(parentFilterObj);
      CrossVersionFacetBuilder.TermsFacetBuilder fb1 =
          CrossVersionFacetBuilders.termsFacet("sourceTypes")
              .field(DocumentPojo.mediaType_)
              .size(aggregation.sourceMetadata)
              .facetFilter(parentFilterObj);
      // Gross raw handling for facets
      if (null != parentFilterObj) {
        fb = fb.facetFilter(parentFilterObj);
        fb1 = fb1.facetFilter(parentFilterObj);
      }
      searchSettings.addFacet(fb);
      searchSettings.addFacet(fb1);
    }

    if ((null != aggregation) && (null != aggregation.sources) && (aggregation.sources > 0)) {
      CrossVersionFacetBuilder.TermsFacetBuilder fb =
          CrossVersionFacetBuilders.termsFacet("sourceKeys")
              .field(DocumentPojo.sourceKey_)
              .size(aggregation.sources);
      // Gross raw handling for facets
      if (null != parentFilterObj) {
        fb = fb.facetFilter(parentFilterObj);
      }
      searchSettings.addFacet(fb);
    }
  } // TESTED
예제 #17
0
  @Test
  // Just test the integration with facets and aggregations, not the facet and aggregation
  // functionality!
  public void testFacetsAndAggregations() throws Exception {
    client().admin().indices().prepareCreate("test").execute().actionGet();
    ensureGreen();

    int numQueries = atLeast(250);
    int numUniqueQueries = between(1, numQueries / 2);
    String[] values = new String[numUniqueQueries];
    for (int i = 0; i < values.length; i++) {
      values[i] = "value" + i;
    }
    int[] expectedCount = new int[numUniqueQueries];

    logger.info("--> registering {} queries", numQueries);
    for (int i = 0; i < numQueries; i++) {
      String value = values[i % numUniqueQueries];
      expectedCount[i % numUniqueQueries]++;
      QueryBuilder queryBuilder = matchQuery("field1", value);
      client()
          .prepareIndex("test", PercolatorService.TYPE_NAME, Integer.toString(i))
          .setSource(
              jsonBuilder()
                  .startObject()
                  .field("query", queryBuilder)
                  .field("field2", "b")
                  .endObject())
          .execute()
          .actionGet();
    }
    client().admin().indices().prepareRefresh("test").execute().actionGet();

    for (int i = 0; i < numQueries; i++) {
      String value = values[i % numUniqueQueries];
      PercolateRequestBuilder percolateRequestBuilder =
          client()
              .preparePercolate()
              .setIndices("test")
              .setDocumentType("type")
              .setPercolateDoc(
                  docBuilder()
                      .setDoc(jsonBuilder().startObject().field("field1", value).endObject()));

      boolean useAggs = randomBoolean();
      if (useAggs) {
        percolateRequestBuilder.addAggregation(AggregationBuilders.terms("a").field("field2"));
      } else {
        percolateRequestBuilder.addFacet(FacetBuilders.termsFacet("a").field("field2"));
      }

      if (randomBoolean()) {
        percolateRequestBuilder.setPercolateQuery(matchAllQuery());
      }
      if (randomBoolean()) {
        percolateRequestBuilder.setScore(true);
      } else {
        percolateRequestBuilder.setSortByScore(true).setSize(numQueries);
      }

      boolean countOnly = randomBoolean();
      if (countOnly) {
        percolateRequestBuilder.setOnlyCount(countOnly);
      }

      PercolateResponse response = percolateRequestBuilder.execute().actionGet();
      assertMatchCount(response, expectedCount[i % numUniqueQueries]);
      if (!countOnly) {
        assertThat(response.getMatches(), arrayWithSize(expectedCount[i % numUniqueQueries]));
      }

      if (useAggs) {
        List<Aggregation> aggregations = response.getAggregations().asList();
        assertThat(aggregations.size(), equalTo(1));
        assertThat(aggregations.get(0).getName(), equalTo("a"));
        List<Terms.Bucket> buckets =
            new ArrayList<Terms.Bucket>(((Terms) aggregations.get(0)).buckets());
        assertThat(buckets.size(), equalTo(1));
        assertThat(buckets.get(0).getKey().string(), equalTo("b"));
        assertThat(buckets.get(0).getDocCount(), equalTo((long) expectedCount[i % values.length]));
      } else {
        assertThat(response.getFacets().facets().size(), equalTo(1));
        assertThat(response.getFacets().facets().get(0).getName(), equalTo("a"));
        assertThat(
            ((TermsFacet) response.getFacets().facets().get(0)).getEntries().size(), equalTo(1));
        assertThat(
            ((TermsFacet) response.getFacets().facets().get(0)).getEntries().get(0).getCount(),
            equalTo(expectedCount[i % values.length]));
        assertThat(
            ((TermsFacet) response.getFacets().facets().get(0))
                .getEntries()
                .get(0)
                .getTerm()
                .string(),
            equalTo("b"));
      }
    }
  }
 public void testAllOpsDisabledIndexedScripts() throws IOException {
   if (randomBoolean()) {
     client()
         .preparePutIndexedScript(
             ExpressionScriptEngineService.NAME, "script1", "{\"script\":\"2\"}")
         .get();
   } else {
     client()
         .prepareIndex(ScriptService.SCRIPT_INDEX, ExpressionScriptEngineService.NAME, "script1")
         .setSource("{\"script\":\"2\"}")
         .get();
   }
   client().prepareIndex("test", "scriptTest", "1").setSource("{\"theField\":\"foo\"}").get();
   try {
     client()
         .prepareUpdate("test", "scriptTest", "1")
         .setScript(
             new Script(
                 "script1",
                 ScriptService.ScriptType.INDEXED,
                 ExpressionScriptEngineService.NAME,
                 null))
         .get();
     fail("update script should have been rejected");
   } catch (Exception e) {
     assertThat(e.getMessage(), containsString("failed to execute script"));
     assertThat(
         e.getCause().getMessage(),
         containsString(
             "scripts of type [indexed], operation [update] and lang [expression] are disabled"));
   }
   try {
     client()
         .prepareSearch()
         .setSource(
             new SearchSourceBuilder()
                 .scriptField(
                     "test1", new Script("script1", ScriptType.INDEXED, "expression", null)))
         .setIndices("test")
         .setTypes("scriptTest")
         .get();
     fail("search script should have been rejected");
   } catch (Exception e) {
     assertThat(
         e.toString(),
         containsString(
             "scripts of type [indexed], operation [search] and lang [expression] are disabled"));
   }
   try {
     client()
         .prepareSearch("test")
         .setSource(
             new SearchSourceBuilder()
                 .aggregation(
                     AggregationBuilders.terms("test")
                         .script(new Script("script1", ScriptType.INDEXED, "expression", null))))
         .get();
   } catch (Exception e) {
     assertThat(
         e.toString(),
         containsString(
             "scripts of type [indexed], operation [aggs] and lang [expression] are disabled"));
   }
 }