@Override public void setNextReader(AtomicReaderContext context) throws IOException { values = indexFieldData.load(context).getBytesValues(parentType); if (values != null) { ordinals = values.ordinals(); final long maxOrd = ordinals.getMaxOrd(); if (parentIdsIndex == null) { parentIdsIndex = bigArrays.newLongArray(BigArrays.overSize(maxOrd), false); } else if (parentIdsIndex.size() < maxOrd) { parentIdsIndex = bigArrays.grow(parentIdsIndex, maxOrd); } parentIdsIndex.fill(0, maxOrd, -1L); } }
public PercolateContext( PercolateShardRequest request, SearchShardTarget searchShardTarget, IndexShard indexShard, IndexService indexService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, ScriptService scriptService, Query aliasFilter, ParseFieldMatcher parseFieldMatcher) { super(parseFieldMatcher, request); this.indexShard = indexShard; this.indexService = indexService; this.fieldDataService = indexService.fieldData(); this.searchShardTarget = searchShardTarget; this.percolateQueryRegistry = indexShard.percolateRegistry(); this.types = new String[] {request.documentType()}; this.pageCacheRecycler = pageCacheRecycler; this.bigArrays = bigArrays.withCircuitBreaking(); this.querySearchResult = new QuerySearchResult(0, searchShardTarget); this.engineSearcher = indexShard.acquireSearcher("percolate"); this.searcher = new ContextIndexSearcher(this, engineSearcher); this.scriptService = scriptService; this.numberOfShards = request.getNumberOfShards(); this.aliasFilter = aliasFilter; this.startTime = request.getStartTime(); }
public DefaultSearchContext( long id, ShardSearchRequest request, SearchShardTarget shardTarget, Engine.Searcher engineSearcher, IndexService indexService, IndexShard indexShard, ScriptService scriptService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, Counter timeEstimateCounter) { this.id = id; this.request = request; this.searchType = request.searchType(); this.shardTarget = shardTarget; this.engineSearcher = engineSearcher; this.scriptService = scriptService; this.pageCacheRecycler = pageCacheRecycler; // SearchContexts use a BigArrays that can circuit break this.bigArrays = bigArrays.withCircuitBreaking(); this.dfsResult = new DfsSearchResult(id, shardTarget); this.queryResult = new QuerySearchResult(id, shardTarget); this.fetchResult = new FetchSearchResult(id, shardTarget); this.indexShard = indexShard; this.indexService = indexService; this.searcher = new ContextIndexSearcher(this, engineSearcher); // initialize the filtering alias based on the provided filters aliasFilter = indexService.aliasesService().aliasFilter(request.filteringAliases()); this.timeEstimateCounter = timeEstimateCounter; }
@Override public void postCollect() { final FixedBitSet allVisitedOrds = new FixedBitSet(maxOrd); for (long bucket = visitedOrds.size() - 1; bucket >= 0; --bucket) { final FixedBitSet bits = visitedOrds.get(bucket); if (bits != null) { allVisitedOrds.or(bits); } } final org.elasticsearch.common.hash.MurmurHash3.Hash128 hash = new org.elasticsearch.common.hash.MurmurHash3.Hash128(); try (LongArray hashes = bigArrays.newLongArray(maxOrd, false)) { for (int ord = allVisitedOrds.nextSetBit(0); ord != -1; ord = ord + 1 < maxOrd ? allVisitedOrds.nextSetBit(ord + 1) : -1) { final BytesRef value = values.lookupOrd(ord); org.elasticsearch.common.hash.MurmurHash3.hash128( value.bytes, value.offset, value.length, 0, hash); hashes.set(ord, hash.h1); } for (long bucket = visitedOrds.size() - 1; bucket >= 0; --bucket) { final FixedBitSet bits = visitedOrds.get(bucket); if (bits != null) { for (int ord = bits.nextSetBit(0); ord != -1; ord = ord + 1 < maxOrd ? bits.nextSetBit(ord + 1) : -1) { counts.collect(bucket, hashes.get(ord)); } } } } }
public DefaultSearchContext( long id, ShardSearchRequest request, SearchShardTarget shardTarget, Engine.Searcher engineSearcher, IndexService indexService, IndexShard indexShard, ScriptService scriptService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, Counter timeEstimateCounter, ParseFieldMatcher parseFieldMatcher, TimeValue timeout) { super(parseFieldMatcher, request); this.id = id; this.request = request; this.searchType = request.searchType(); this.shardTarget = shardTarget; this.engineSearcher = engineSearcher; this.scriptService = scriptService; this.pageCacheRecycler = pageCacheRecycler; // SearchContexts use a BigArrays that can circuit break this.bigArrays = bigArrays.withCircuitBreaking(); this.dfsResult = new DfsSearchResult(id, shardTarget); this.queryResult = new QuerySearchResult(id, shardTarget); this.fetchResult = new FetchSearchResult(id, shardTarget); this.indexShard = indexShard; this.indexService = indexService; this.searcher = new ContextIndexSearcher(this, engineSearcher); this.timeEstimateCounter = timeEstimateCounter; this.timeoutInMillis = timeout.millis(); }
OrdinalsCollector(HyperLogLogPlusPlus counts, RandomAccessOrds values, BigArrays bigArrays) { Preconditions.checkArgument(values.getValueCount() <= Integer.MAX_VALUE); maxOrd = (int) values.getValueCount(); this.bigArrays = bigArrays; this.counts = counts; this.values = values; visitedOrds = bigArrays.newObjectArray(1); }
private ParentIdAndScoreCollector( ParentChildIndexFieldData indexFieldData, String parentType, SearchContext searchContext) { this.parentType = parentType; this.indexFieldData = indexFieldData; this.bigArrays = searchContext.bigArrays(); this.parentIds = new BytesRefHash(512, bigArrays); this.scores = bigArrays.newFloatArray(512, false); }
@Override public void reset() { // shrink list of pages if (bytes.size() > BigArrays.PAGE_SIZE_IN_BYTES) { bytes = bigarrays.resize(bytes, BigArrays.PAGE_SIZE_IN_BYTES); } // go back to start count = 0; }
@Override public void collect(int doc, long bucketOrd) { visitedOrds = bigArrays.grow(visitedOrds, bucketOrd + 1); FixedBitSet bits = visitedOrds.get(bucketOrd); if (bits == null) { bits = new FixedBitSet(maxOrd); visitedOrds.set(bucketOrd, bits); } values.setDocument(doc); final int valueCount = values.cardinality(); for (int i = 0; i < valueCount; ++i) { bits.set((int) values.ordAt(i)); } }
public MinAggregator( String name, long estimatedBucketsCount, NumericValuesSource valuesSource, AggregationContext context, Aggregator parent) { super(name, estimatedBucketsCount, context, parent); this.valuesSource = valuesSource; if (valuesSource != null) { final long initialSize = estimatedBucketsCount < 2 ? 1 : estimatedBucketsCount; mins = BigArrays.newDoubleArray(initialSize, context.pageCacheRecycler(), false); mins.fill(0, mins.size(), Double.POSITIVE_INFINITY); } }
public void testEquals() { int length = randomIntBetween(100, PAGE_SIZE * randomIntBetween(2, 5)); ByteArray ba1 = bigarrays.newByteArray(length, false); ByteArray ba2 = bigarrays.newByteArray(length, false); // copy contents for (long i = 0; i < length; i++) { ba2.set(i, ba1.get(i)); } // get refs & compare BytesReference pbr = new PagedBytesReference(bigarrays, ba1, length); BytesReference pbr2 = new PagedBytesReference(bigarrays, ba2, length); assertEquals(pbr, pbr2); }
@Override public void collect(int doc, long owningBucketOrdinal) throws IOException { assert valuesSource != null : "collect must only be called if #shouldCollect returns true"; DoubleValues values = valuesSource.doubleValues(); if (values == null || values.setDocument(doc) == 0) { return; } if (owningBucketOrdinal >= mins.size()) { long from = mins.size(); mins = BigArrays.grow(mins, owningBucketOrdinal + 1); mins.fill(from, mins.size(), Double.POSITIVE_INFINITY); } mins.set(owningBucketOrdinal, Math.min(values.nextValue(), mins.get(owningBucketOrdinal))); }
public void testSliceEquals() { int length = randomIntBetween(100, PAGE_SIZE * randomIntBetween(2, 5)); ByteArray ba1 = bigarrays.newByteArray(length, false); BytesReference pbr = new PagedBytesReference(bigarrays, ba1, length); // test equality of slices int sliceFrom = randomIntBetween(0, pbr.length()); int sliceLength = randomIntBetween(pbr.length() - sliceFrom, pbr.length() - sliceFrom); BytesReference slice1 = pbr.slice(sliceFrom, sliceLength); BytesReference slice2 = pbr.slice(sliceFrom, sliceLength); assertArrayEquals(slice1.toBytes(), slice2.toBytes()); // test a slice with same offset but different length, // unless randomized testing gave us a 0-length slice. if (sliceLength > 0) { BytesReference slice3 = pbr.slice(sliceFrom, sliceLength / 2); assertFalse(Arrays.equals(slice1.toBytes(), slice3.toBytes())); } }
@Override public void collect(int doc) throws IOException { if (values != null) { long ord = ordinals.getOrd(doc); long parentIdx = parentIdsIndex.get(ord); if (parentIdx < 0) { final BytesRef bytes = values.getValueByOrd(ord); final int hash = values.currentValueHash(); parentIdx = parentIds.add(bytes, hash); if (parentIdx < 0) { parentIdx = -parentIdx - 1; doScore(parentIdx); } else { scores = bigArrays.grow(scores, parentIdx + 1); scores.set(parentIdx, scorer.score()); } parentIdsIndex.set(ord, parentIdx); } else { doScore(parentIdx); } } }
protected BytesStreamOutput(int expectedSize, BigArrays bigarrays) { this.bigarrays = bigarrays; this.bytes = bigarrays.newByteArray(expectedSize); }
private void ensureCapacity(int offset) { bytes = bigarrays.grow(bytes, offset); }