@Override
  public Facet reduce(List<Facet> facets) {
    if (facets.size() == 1) {
      return facets.get(0);
    }
    InternalDoubleTermsFacet first = (InternalDoubleTermsFacet) facets.get(0);
    TDoubleIntHashMap aggregated = CacheRecycler.popDoubleIntMap();
    long missing = 0;
    long total = 0;
    for (Facet facet : facets) {
      InternalDoubleTermsFacet mFacet = (InternalDoubleTermsFacet) facet;
      missing += mFacet.getMissingCount();
      total += mFacet.getTotalCount();
      for (DoubleEntry entry : mFacet.entries) {
        aggregated.adjustOrPutValue(entry.term, entry.getCount(), entry.getCount());
      }
    }

    BoundedTreeSet<DoubleEntry> ordered =
        new BoundedTreeSet<DoubleEntry>(first.comparatorType.comparator(), first.requiredSize);
    for (TDoubleIntIterator it = aggregated.iterator(); it.hasNext(); ) {
      it.advance();
      ordered.add(new DoubleEntry(it.key(), it.value()));
    }
    first.entries = ordered;
    first.missing = missing;
    first.total = total;

    CacheRecycler.pushDoubleIntMap(aggregated);

    return first;
  }
Esempio n. 2
0
 @Override
 public void contextClear() {
   if (uidToScore != null) {
     CacheRecycler.pushObjectFloatMap(uidToScore);
   }
   uidToScore = null;
 }
Esempio n. 3
0
 @Override
 public void contextRewrite(SearchContext searchContext) throws Exception {
   searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
   uidToScore = CacheRecycler.popObjectFloatMap();
   ParentUidCollector collector = new ParentUidCollector(uidToScore, searchContext, parentType);
   Query parentQuery;
   if (rewrittenParentQuery == null) {
     parentQuery = rewrittenParentQuery = searchContext.searcher().rewrite(originalParentQuery);
   } else {
     parentQuery = rewrittenParentQuery;
   }
   searchContext.searcher().search(parentQuery, collector);
 }
  @Override
  public Facet reduce(List<Facet> facets) {
    if (facets.size() == 1) {
      return facets.get(0);
    }

    InternalLongTermsFacet first = null;

    TLongIntHashMap aggregated = CacheRecycler.popLongIntMap();
    long missing = 0;
    long total = 0;
    for (Facet facet : facets) {
      TermsFacet termsFacet = (TermsFacet) facet;
      // termsFacet could be of type InternalStringTermsFacet representing unmapped fields
      if (first == null && termsFacet instanceof InternalLongTermsFacet) {
        first = (InternalLongTermsFacet) termsFacet;
      }
      missing += termsFacet.getMissingCount();
      total += termsFacet.getTotalCount();
      for (Entry entry : termsFacet.getEntries()) {
        aggregated.adjustOrPutValue(((LongEntry) entry).term, entry.getCount(), entry.getCount());
      }
    }

    BoundedTreeSet<LongEntry> ordered =
        new BoundedTreeSet<LongEntry>(first.comparatorType.comparator(), first.requiredSize);
    for (TLongIntIterator it = aggregated.iterator(); it.hasNext(); ) {
      it.advance();
      ordered.add(new LongEntry(it.key(), it.value()));
    }
    first.entries = ordered;
    first.missing = missing;
    first.total = total;

    CacheRecycler.pushLongIntMap(aggregated);

    return first;
  }
 @Override
 public Facet facet() {
   TIntIntHashMap facets = aggregator.facets();
   if (facets.isEmpty()) {
     CacheRecycler.pushIntIntMap(facets);
     return new InternalIntTermsFacet(
         facetName,
         comparatorType,
         size,
         ImmutableList.<InternalIntTermsFacet.IntEntry>of(),
         aggregator.missing());
   } else {
     if (size < EntryPriorityQueue.LIMIT) {
       EntryPriorityQueue ordered = new EntryPriorityQueue(size, comparatorType.comparator());
       for (TIntIntIterator it = facets.iterator(); it.hasNext(); ) {
         it.advance();
         ordered.insertWithOverflow(new InternalIntTermsFacet.IntEntry(it.key(), it.value()));
       }
       InternalIntTermsFacet.IntEntry[] list = new InternalIntTermsFacet.IntEntry[ordered.size()];
       for (int i = ordered.size() - 1; i >= 0; i--) {
         list[i] = (InternalIntTermsFacet.IntEntry) ordered.pop();
       }
       CacheRecycler.pushIntIntMap(facets);
       return new InternalIntTermsFacet(
           facetName, comparatorType, size, Arrays.asList(list), aggregator.missing());
     } else {
       BoundedTreeSet<InternalIntTermsFacet.IntEntry> ordered =
           new BoundedTreeSet<InternalIntTermsFacet.IntEntry>(comparatorType.comparator(), size);
       for (TIntIntIterator it = facets.iterator(); it.hasNext(); ) {
         it.advance();
         ordered.add(new InternalIntTermsFacet.IntEntry(it.key(), it.value()));
       }
       CacheRecycler.pushIntIntMap(facets);
       return new InternalIntTermsFacet(
           facetName, comparatorType, size, ordered, aggregator.missing());
     }
   }
 }
  public ValueScriptHistogramFacetExecutor(
      IndexNumericFieldData indexFieldData,
      String scriptLang,
      String valueScript,
      Map<String, Object> params,
      long interval,
      HistogramFacet.ComparatorType comparatorType,
      SearchContext context) {
    this.comparatorType = comparatorType;
    this.indexFieldData = indexFieldData;
    this.interval = interval;
    this.valueScript =
        context.scriptService().search(context.lookup(), scriptLang, valueScript, params);

    this.entries = CacheRecycler.popLongObjectMap();
  }
  public static class DateHistogramProc implements LongValues.ValueInDocProc {

    private final TimeZoneRounding tzRounding;

    protected final SearchScript valueScript;

    final ExtTLongObjectHashMap<InternalFullDateHistogramFacet.FullEntry> entries =
        CacheRecycler.popLongObjectMap();

    public DateHistogramProc(TimeZoneRounding tzRounding, SearchScript valueScript) {
      this.tzRounding = tzRounding;
      this.valueScript = valueScript;
    }

    @Override
    public void onMissing(int docId) {}

    @Override
    public void onValue(int docId, long value) {
      valueScript.setNextDocId(docId);
      long time = tzRounding.calc(value);
      double scriptValue = valueScript.runAsDouble();

      InternalFullDateHistogramFacet.FullEntry entry = entries.get(time);
      if (entry == null) {
        entry =
            new InternalFullDateHistogramFacet.FullEntry(
                time, 1, scriptValue, scriptValue, 1, scriptValue);
        entries.put(time, entry);
      } else {
        entry.count++;
        entry.totalCount++;
        entry.total += scriptValue;
        if (scriptValue < entry.min) {
          entry.min = scriptValue;
        }
        if (scriptValue > entry.max) {
          entry.max = scriptValue;
        }
      }
    }
  }
 public ReaderAggregator(BytesValues.WithOrdinals values) {
   this.values = values;
   this.counts = CacheRecycler.popIntArray(values.ordinals().getNumOrds());
 }
  @Override
  public Facet facet() {
    if (current != null) {
      missing += current.counts[0];
      total += current.total - current.counts[0];
      // if we have values for this one, add it
      if (current.values.ordinals().getNumOrds() > 1) {
        aggregators.add(current);
      }
    }

    AggregatorPriorityQueue queue = new AggregatorPriorityQueue(aggregators.size());

    for (ReaderAggregator aggregator : aggregators) {
      if (aggregator.nextPosition()) {
        queue.add(aggregator);
      }
    }

    // YACK, we repeat the same logic, but once with an optimizer priority queue for smaller sizes
    if (size < EntryPriorityQueue.LIMIT) {
      // optimize to use priority size
      EntryPriorityQueue ordered = new EntryPriorityQueue(size, comparatorType.comparator());

      while (queue.size() > 0) {
        ReaderAggregator agg = queue.top();
        BytesRef value =
            agg.values.makeSafe(
                agg.current); // we need to makeSafe it, since we end up pushing it... (can we get
        // around this?)
        int count = 0;
        do {
          count += agg.counts[agg.position];
          if (agg.nextPosition()) {
            agg = queue.updateTop();
          } else {
            // we are done with this reader
            queue.pop();
            agg = queue.top();
          }
        } while (agg != null && value.equals(agg.current));

        if (count > minCount) {
          if (excluded != null && excluded.contains(value)) {
            continue;
          }
          // LUCENE 4 UPGRADE: use Lucene's RegexCapabilities
          if (matcher != null && !matcher.reset(value.utf8ToString()).matches()) {
            continue;
          }
          InternalStringTermsFacet.TermEntry entry =
              new InternalStringTermsFacet.TermEntry(value, count);
          ordered.insertWithOverflow(entry);
        }
      }
      InternalStringTermsFacet.TermEntry[] list =
          new InternalStringTermsFacet.TermEntry[ordered.size()];
      for (int i = ordered.size() - 1; i >= 0; i--) {
        list[i] = (InternalStringTermsFacet.TermEntry) ordered.pop();
      }

      for (ReaderAggregator aggregator : aggregators) {
        CacheRecycler.pushIntArray(aggregator.counts);
      }

      return new InternalStringTermsFacet(
          facetName, comparatorType, size, Arrays.asList(list), missing, total);
    }

    BoundedTreeSet<InternalStringTermsFacet.TermEntry> ordered =
        new BoundedTreeSet<InternalStringTermsFacet.TermEntry>(comparatorType.comparator(), size);

    while (queue.size() > 0) {
      ReaderAggregator agg = queue.top();
      BytesRef value =
          agg.values.makeSafe(
              agg.current); // we need to makeSafe it, since we end up pushing it... (can we work
      // around that?)
      int count = 0;
      do {
        count += agg.counts[agg.position];
        if (agg.nextPosition()) {
          agg = queue.updateTop();
        } else {
          // we are done with this reader
          queue.pop();
          agg = queue.top();
        }
      } while (agg != null && value.equals(agg.current));

      if (count > minCount) {
        if (excluded != null && excluded.contains(value)) {
          continue;
        }
        // LUCENE 4 UPGRADE: use Lucene's RegexCapabilities
        if (matcher != null && !matcher.reset(value.utf8ToString()).matches()) {
          continue;
        }
        InternalStringTermsFacet.TermEntry entry =
            new InternalStringTermsFacet.TermEntry(value, count);
        ordered.add(entry);
      }
    }

    for (ReaderAggregator aggregator : aggregators) {
      CacheRecycler.pushIntArray(aggregator.counts);
    }

    return new InternalStringTermsFacet(facetName, comparatorType, size, ordered, missing, total);
  }
 public ReaderAggregator(ShortFieldData fieldData) {
   this.values = fieldData.values();
   this.counts = CacheRecycler.popIntArray(fieldData.values().length);
 }
  @Override
  public Facet facet() {
    if (current != null) {
      missing += current.counts[0];
      total += current.total - current.counts[0];
      // if we have values for this one, add it
      if (current.values.length > 1) {
        aggregators.add(current);
      }
    }

    AggregatorPriorityQueue queue = new AggregatorPriorityQueue(aggregators.size());

    for (ReaderAggregator aggregator : aggregators) {
      if (aggregator.nextPosition()) {
        queue.add(aggregator);
      }
    }

    // YACK, we repeat the same logic, but once with an optimizer priority queue for smaller sizes
    if (size < EntryPriorityQueue.LIMIT) {
      // optimize to use priority size
      EntryPriorityQueue ordered = new EntryPriorityQueue(size, comparatorType.comparator());

      while (queue.size() > 0) {
        ReaderAggregator agg = queue.top();
        short value = agg.current;
        int count = 0;
        do {
          count += agg.counts[agg.position];
          if (agg.nextPosition()) {
            agg = queue.updateTop();
          } else {
            // we are done with this reader
            queue.pop();
            agg = queue.top();
          }
        } while (agg != null && value == agg.current);

        if (count > minCount) {
          if (excluded == null || !excluded.contains(value)) {
            InternalShortTermsFacet.ShortEntry entry =
                new InternalShortTermsFacet.ShortEntry(value, count);
            ordered.insertWithOverflow(entry);
          }
        }
      }
      InternalShortTermsFacet.ShortEntry[] list =
          new InternalShortTermsFacet.ShortEntry[ordered.size()];
      for (int i = ordered.size() - 1; i >= 0; i--) {
        list[i] = (InternalShortTermsFacet.ShortEntry) ordered.pop();
      }

      for (ReaderAggregator aggregator : aggregators) {
        CacheRecycler.pushIntArray(aggregator.counts);
      }

      return new InternalShortTermsFacet(
          facetName, comparatorType, size, Arrays.asList(list), missing, total);
    }

    BoundedTreeSet<InternalShortTermsFacet.ShortEntry> ordered =
        new BoundedTreeSet<InternalShortTermsFacet.ShortEntry>(comparatorType.comparator(), size);

    while (queue.size() > 0) {
      ReaderAggregator agg = queue.top();
      short value = agg.current;
      int count = 0;
      do {
        count += agg.counts[agg.position];
        if (agg.nextPosition()) {
          agg = queue.updateTop();
        } else {
          // we are done with this reader
          queue.pop();
          agg = queue.top();
        }
      } while (agg != null && value == agg.current);

      if (count > minCount) {
        if (excluded == null || !excluded.contains(value)) {
          InternalShortTermsFacet.ShortEntry entry =
              new InternalShortTermsFacet.ShortEntry(value, count);
          ordered.add(entry);
        }
      }
    }

    for (ReaderAggregator aggregator : aggregators) {
      CacheRecycler.pushIntArray(aggregator.counts);
    }

    return new InternalShortTermsFacet(facetName, comparatorType, size, ordered, missing, total);
  }
  public TermsIntFacetCollector(
      String facetName,
      String fieldName,
      int size,
      TermsFacet.ComparatorType comparatorType,
      boolean allTerms,
      SearchContext context,
      ImmutableSet<String> excluded,
      String scriptLang,
      String script,
      Map<String, Object> params) {
    super(facetName);
    this.fieldDataCache = context.fieldDataCache();
    this.size = size;
    this.comparatorType = comparatorType;
    this.numberOfShards = context.numberOfShards();

    MapperService.SmartNameFieldMappers smartMappers = context.mapperService().smartName(fieldName);
    if (smartMappers == null || !smartMappers.hasMapper()) {
      throw new ElasticSearchIllegalArgumentException(
          "Field ["
              + fieldName
              + "] doesn't have a type, can't run terms int facet collector on it");
    } else {
      // add type filter if there is exact doc mapper associated with it
      if (smartMappers.hasDocMapper()) {
        setFilter(context.filterCache().cache(smartMappers.docMapper().typeFilter()));
      }

      if (smartMappers.mapper().fieldDataType() != FieldDataType.DefaultTypes.INT) {
        throw new ElasticSearchIllegalArgumentException(
            "Field ["
                + fieldName
                + "] is not of int type, can't run terms int facet collector on it");
      }

      this.indexFieldName = smartMappers.mapper().names().indexName();
      this.fieldDataType = smartMappers.mapper().fieldDataType();
    }

    if (script != null) {
      this.script = context.scriptService().search(context.lookup(), scriptLang, script, params);
    } else {
      this.script = null;
    }

    if (this.script == null && excluded.isEmpty()) {
      aggregator = new StaticAggregatorValueProc(CacheRecycler.popIntIntMap());
    } else {
      aggregator = new AggregatorValueProc(CacheRecycler.popIntIntMap(), excluded, this.script);
    }

    if (allTerms) {
      try {
        for (IndexReader reader : context.searcher().subReaders()) {
          IntFieldData fieldData =
              (IntFieldData) fieldDataCache.cache(fieldDataType, reader, indexFieldName);
          fieldData.forEachValue(aggregator);
        }
      } catch (Exception e) {
        throw new FacetPhaseExecutionException(facetName, "failed to load all terms", e);
      }
    }
  }
Esempio n. 13
0
  public void close() {
    if (lifecycle.started()) {
      stop();
    }
    if (!lifecycle.moveToClosed()) {
      return;
    }

    ESLogger logger = Loggers.getLogger(Node.class, settings.get("name"));
    logger.info("{{}}[{}]: closing ...", Version.full(), JvmInfo.jvmInfo().pid());

    StopWatch stopWatch = new StopWatch("node_close");
    stopWatch.start("http");
    if (settings.getAsBoolean("http.enabled", true)) {
      injector.getInstance(HttpServer.class).close();
    }
    stopWatch.stop().start("client");
    injector.getInstance(Client.class).close();
    stopWatch.stop().start("indices_cluster");
    injector.getInstance(IndicesClusterStateService.class).close();
    stopWatch.stop().start("indices");
    injector.getInstance(IndexingMemoryBufferController.class).close();
    injector.getInstance(IndicesService.class).close();
    stopWatch.stop().start("routing");
    injector.getInstance(RoutingService.class).close();
    stopWatch.stop().start("cluster");
    injector.getInstance(ClusterService.class).close();
    stopWatch.stop().start("discovery");
    injector.getInstance(DiscoveryService.class).close();
    stopWatch.stop().start("monitor");
    injector.getInstance(MonitorService.class).close();
    stopWatch.stop().start("gateway");
    injector.getInstance(GatewayService.class).close();
    stopWatch.stop().start("search");
    injector.getInstance(SearchService.class).close();
    stopWatch.stop().start("indexers");
    injector.getInstance(RiversManager.class).close();
    stopWatch.stop().start("rest");
    injector.getInstance(RestController.class).close();
    stopWatch.stop().start("transport");
    injector.getInstance(TransportService.class).close();

    for (Class<? extends LifecycleComponent> plugin : pluginsService.services()) {
      stopWatch.stop().start("plugin(" + plugin.getName() + ")");
      injector.getInstance(plugin).close();
    }

    stopWatch.stop().start("node_cache");
    injector.getInstance(NodeCache.class).close();

    stopWatch.stop().start("script");
    injector.getInstance(ScriptService.class).close();

    stopWatch.stop().start("thread_pool");
    injector.getInstance(ThreadPool.class).shutdown();
    try {
      injector.getInstance(ThreadPool.class).awaitTermination(10, TimeUnit.SECONDS);
    } catch (InterruptedException e) {
      // ignore
    }
    stopWatch.stop().start("thread_pool_force_shutdown");
    try {
      injector.getInstance(ThreadPool.class).shutdownNow();
    } catch (Exception e) {
      // ignore
    }
    stopWatch.stop();

    CacheRecycler.clear();
    CachedStreams.clear();
    ThreadLocals.clearReferencesThreadLocals();

    if (logger.isTraceEnabled()) {
      logger.trace("Close times for each service:\n{}", stopWatch.prettyPrint());
    }

    injector.getInstance(NodeEnvironment.class).close();
    Injectors.close(injector);

    logger.info("{{}}[{}]: closed", Version.full(), JvmInfo.jvmInfo().pid());
  }