@Override public Facet facet() { if (current != null) { missing += current.counts[0]; total += current.total - current.counts[0]; // if we have values for this one, add it if (current.values.ordinals().getNumOrds() > 1) { aggregators.add(current); } } AggregatorPriorityQueue queue = new AggregatorPriorityQueue(aggregators.size()); for (ReaderAggregator aggregator : aggregators) { if (aggregator.nextPosition()) { queue.add(aggregator); } } // YACK, we repeat the same logic, but once with an optimizer priority queue for smaller sizes if (size < EntryPriorityQueue.LIMIT) { // optimize to use priority size EntryPriorityQueue ordered = new EntryPriorityQueue(size, comparatorType.comparator()); while (queue.size() > 0) { ReaderAggregator agg = queue.top(); BytesRef value = agg.values.makeSafe( agg.current); // we need to makeSafe it, since we end up pushing it... (can we get // around this?) int count = 0; do { count += agg.counts[agg.position]; if (agg.nextPosition()) { agg = queue.updateTop(); } else { // we are done with this reader queue.pop(); agg = queue.top(); } } while (agg != null && value.equals(agg.current)); if (count > minCount) { if (excluded != null && excluded.contains(value)) { continue; } // LUCENE 4 UPGRADE: use Lucene's RegexCapabilities if (matcher != null && !matcher.reset(value.utf8ToString()).matches()) { continue; } InternalStringTermsFacet.TermEntry entry = new InternalStringTermsFacet.TermEntry(value, count); ordered.insertWithOverflow(entry); } } InternalStringTermsFacet.TermEntry[] list = new InternalStringTermsFacet.TermEntry[ordered.size()]; for (int i = ordered.size() - 1; i >= 0; i--) { list[i] = (InternalStringTermsFacet.TermEntry) ordered.pop(); } for (ReaderAggregator aggregator : aggregators) { CacheRecycler.pushIntArray(aggregator.counts); } return new InternalStringTermsFacet( facetName, comparatorType, size, Arrays.asList(list), missing, total); } BoundedTreeSet<InternalStringTermsFacet.TermEntry> ordered = new BoundedTreeSet<InternalStringTermsFacet.TermEntry>(comparatorType.comparator(), size); while (queue.size() > 0) { ReaderAggregator agg = queue.top(); BytesRef value = agg.values.makeSafe( agg.current); // we need to makeSafe it, since we end up pushing it... (can we work // around that?) int count = 0; do { count += agg.counts[agg.position]; if (agg.nextPosition()) { agg = queue.updateTop(); } else { // we are done with this reader queue.pop(); agg = queue.top(); } } while (agg != null && value.equals(agg.current)); if (count > minCount) { if (excluded != null && excluded.contains(value)) { continue; } // LUCENE 4 UPGRADE: use Lucene's RegexCapabilities if (matcher != null && !matcher.reset(value.utf8ToString()).matches()) { continue; } InternalStringTermsFacet.TermEntry entry = new InternalStringTermsFacet.TermEntry(value, count); ordered.add(entry); } } for (ReaderAggregator aggregator : aggregators) { CacheRecycler.pushIntArray(aggregator.counts); } return new InternalStringTermsFacet(facetName, comparatorType, size, ordered, missing, total); }
@Override public Facet facet() { if (current != null) { missing += current.counts[0]; total += current.total - current.counts[0]; // if we have values for this one, add it if (current.values.length > 1) { aggregators.add(current); } } AggregatorPriorityQueue queue = new AggregatorPriorityQueue(aggregators.size()); for (ReaderAggregator aggregator : aggregators) { if (aggregator.nextPosition()) { queue.add(aggregator); } } // YACK, we repeat the same logic, but once with an optimizer priority queue for smaller sizes if (size < EntryPriorityQueue.LIMIT) { // optimize to use priority size EntryPriorityQueue ordered = new EntryPriorityQueue(size, comparatorType.comparator()); while (queue.size() > 0) { ReaderAggregator agg = queue.top(); short value = agg.current; int count = 0; do { count += agg.counts[agg.position]; if (agg.nextPosition()) { agg = queue.updateTop(); } else { // we are done with this reader queue.pop(); agg = queue.top(); } } while (agg != null && value == agg.current); if (count > minCount) { if (excluded == null || !excluded.contains(value)) { InternalShortTermsFacet.ShortEntry entry = new InternalShortTermsFacet.ShortEntry(value, count); ordered.insertWithOverflow(entry); } } } InternalShortTermsFacet.ShortEntry[] list = new InternalShortTermsFacet.ShortEntry[ordered.size()]; for (int i = ordered.size() - 1; i >= 0; i--) { list[i] = (InternalShortTermsFacet.ShortEntry) ordered.pop(); } for (ReaderAggregator aggregator : aggregators) { CacheRecycler.pushIntArray(aggregator.counts); } return new InternalShortTermsFacet( facetName, comparatorType, size, Arrays.asList(list), missing, total); } BoundedTreeSet<InternalShortTermsFacet.ShortEntry> ordered = new BoundedTreeSet<InternalShortTermsFacet.ShortEntry>(comparatorType.comparator(), size); while (queue.size() > 0) { ReaderAggregator agg = queue.top(); short value = agg.current; int count = 0; do { count += agg.counts[agg.position]; if (agg.nextPosition()) { agg = queue.updateTop(); } else { // we are done with this reader queue.pop(); agg = queue.top(); } } while (agg != null && value == agg.current); if (count > minCount) { if (excluded == null || !excluded.contains(value)) { InternalShortTermsFacet.ShortEntry entry = new InternalShortTermsFacet.ShortEntry(value, count); ordered.add(entry); } } } for (ReaderAggregator aggregator : aggregators) { CacheRecycler.pushIntArray(aggregator.counts); } return new InternalShortTermsFacet(facetName, comparatorType, size, ordered, missing, total); }