public void addDeletion(SearchRequestBuilder searchRequest) { searchRequest .addSort("_doc", SortOrder.ASC) .setScroll(TimeValue.timeValueMinutes(5)) .setSize(100) // load only doc ids, not _source fields .setFetchSource(false); // this search is synchronous. An optimization would be to be non-blocking, // but it requires to tracking pending requests in close(). // Same semaphore can't be reused because of potential deadlock (requires to acquire // two locks) SearchResponse searchResponse = searchRequest.get(); while (true) { SearchHit[] hits = searchResponse.getHits().getHits(); for (SearchHit hit : hits) { DeleteRequestBuilder deleteRequestBuilder = client.prepareDelete(hit.index(), hit.type(), hit.getId()); SearchHitField routing = hit.field("_routing"); if (routing != null) { deleteRequestBuilder.setRouting(routing.getValue()); } add(deleteRequestBuilder.request()); } String scrollId = searchResponse.getScrollId(); searchResponse = client.prepareSearchScroll(scrollId).setScroll(TimeValue.timeValueMinutes(5)).get(); if (hits.length == 0) { client.nativeClient().prepareClearScroll().addScrollId(scrollId).get(); break; } } }
@Override public void hitExecute(SearchContext context, HitContext hitContext) { if (context.getFetchSubPhaseContext(CONTEXT_FACTORY).hitExecutionNeeded() == false) { return; } String field = context.getFetchSubPhaseContext(CONTEXT_FACTORY).getField(); if (hitContext.hit().fieldsOrNull() == null) { hitContext.hit().fields(new HashMap<>()); } SearchHitField hitField = hitContext.hit().fields().get(NAMES[0]); if (hitField == null) { hitField = new InternalSearchHitField(NAMES[0], new ArrayList<>(1)); hitContext.hit().fields().put(NAMES[0], hitField); } TermVectorsResponse termVector = TermVectorsService.getTermVectors( context.indexShard(), new TermVectorsRequest( context.indexShard().shardId().getIndex().getName(), hitContext.hit().type(), hitContext.hit().id())); try { Map<String, Integer> tv = new HashMap<>(); TermsEnum terms = termVector.getFields().terms(field).iterator(); BytesRef term; while ((term = terms.next()) != null) { tv.put(term.utf8ToString(), terms.postings(null, PostingsEnum.ALL).freq()); } hitField.values().add(tv); } catch (IOException e) { ESLoggerFactory.getLogger(FetchSubPhasePluginIT.class.getName()) .info("Swallowed exception", e); } }
private InternalSearchHit createSearchHit( SearchContext context, FieldsVisitor fieldsVisitor, int docId, int subDocId, List<String> extractFieldNames, LeafReaderContext subReaderContext) { loadStoredFields(context, subReaderContext, fieldsVisitor, subDocId); fieldsVisitor.postProcess(context.mapperService()); Map<String, SearchHitField> searchFields = null; if (!fieldsVisitor.fields().isEmpty()) { searchFields = new HashMap<>(fieldsVisitor.fields().size()); for (Map.Entry<String, List<Object>> entry : fieldsVisitor.fields().entrySet()) { searchFields.put( entry.getKey(), new InternalSearchHitField(entry.getKey(), entry.getValue())); } } DocumentMapper documentMapper = context.mapperService().documentMapper(fieldsVisitor.uid().type()); Text typeText; if (documentMapper == null) { typeText = new StringAndBytesText(fieldsVisitor.uid().type()); } else { typeText = documentMapper.typeText(); } InternalSearchHit searchHit = new InternalSearchHit(docId, fieldsVisitor.uid().id(), typeText, searchFields); // go over and extract fields that are not mapped / stored SourceLookup sourceLookup = context.lookup().source(); sourceLookup.setSegmentAndDocument(subReaderContext, subDocId); if (fieldsVisitor.source() != null) { sourceLookup.setSource(fieldsVisitor.source()); } if (extractFieldNames != null) { for (String extractFieldName : extractFieldNames) { List<Object> values = context.lookup().source().extractRawValues(extractFieldName); if (!values.isEmpty()) { if (searchHit.fieldsOrNull() == null) { searchHit.fields(new HashMap<String, SearchHitField>(2)); } SearchHitField hitField = searchHit.fields().get(extractFieldName); if (hitField == null) { hitField = new InternalSearchHitField(extractFieldName, new ArrayList<>(2)); searchHit.fields().put(extractFieldName, hitField); } for (Object value : values) { hitField.values().add(value); } } } } return searchHit; }
@Override public TopDocs topDocs(SearchContext context, FetchSubPhase.HitContext hitContext) throws IOException { final String field; final String term; if (isParentHit(hitContext.hit())) { field = ParentFieldMapper.NAME; term = Uid.createUid(hitContext.hit().type(), hitContext.hit().id()); } else if (isChildHit(hitContext.hit())) { field = UidFieldMapper.NAME; SearchHitField parentField = hitContext.hit().field(ParentFieldMapper.NAME); if (parentField != null) { term = parentField.getValue(); } else { SingleFieldsVisitor fieldsVisitor = new SingleFieldsVisitor(ParentFieldMapper.NAME); hitContext.reader().document(hitContext.docId(), fieldsVisitor); if (fieldsVisitor.fields().isEmpty()) { return Lucene.EMPTY_TOP_DOCS; } term = (String) fieldsVisitor.fields().get(ParentFieldMapper.NAME).get(0); } } else { return Lucene.EMPTY_TOP_DOCS; } // Only include docs that have the current hit as parent Filter filter = new TermFilter(new Term(field, term)); // Only include docs that have this inner hits type Filter typeFilter = documentMapper.typeFilter(); if (size() == 0) { TotalHitCountCollector collector = new TotalHitCountCollector(); context .searcher() .search( new XFilteredQuery(query, new AndFilter(Arrays.asList(filter, typeFilter))), collector); return new TopDocs(collector.getTotalHits(), Lucene.EMPTY_SCORE_DOCS, 0); } else { int topN = from() + size(); TopDocsCollector topDocsCollector; if (sort() != null) { topDocsCollector = TopFieldCollector.create(sort(), topN, true, trackScores(), trackScores(), false); } else { topDocsCollector = TopScoreDocCollector.create(topN, false); } context .searcher() .search( new XFilteredQuery(query, new AndFilter(Arrays.asList(filter, typeFilter))), topDocsCollector); return topDocsCollector.topDocs(from(), size()); } }
private String buildJSONFromFields(Collection<SearchHitField> values) { JsonFactory nodeFactory = new JsonFactory(); try { ByteArrayOutputStream stream = new ByteArrayOutputStream(); JsonGenerator generator = nodeFactory.createGenerator(stream, JsonEncoding.UTF8); generator.writeStartObject(); for (SearchHitField value : values) { if (value.getValues().size() > 1) { generator.writeArrayFieldStart(value.getName()); for (Object val : value.getValues()) { generator.writeObject(val); } generator.writeEndArray(); } else { generator.writeObjectField(value.getName(), value.getValue()); } } generator.writeEndObject(); generator.flush(); return new String(stream.toByteArray(), Charset.forName("UTF-8")); } catch (IOException e) { return null; } }
private InternalSearchHit createNestedSearchHit( SearchContext context, int nestedTopDocId, int nestedSubDocId, int rootSubDocId, List<String> extractFieldNames, boolean loadAllStored, Set<String> fieldNames, LeafReaderContext subReaderContext) throws IOException { // Also if highlighting is requested on nested documents we need to fetch the _source from the // root document, // otherwise highlighting will attempt to fetch the _source from the nested doc, which will // fail, // because the entire _source is only stored with the root document. final FieldsVisitor rootFieldsVisitor = new FieldsVisitor( context.sourceRequested() || extractFieldNames != null || context.highlight() != null); loadStoredFields(context, subReaderContext, rootFieldsVisitor, rootSubDocId); rootFieldsVisitor.postProcess(context.mapperService()); Map<String, SearchHitField> searchFields = getSearchFields(context, nestedSubDocId, loadAllStored, fieldNames, subReaderContext); DocumentMapper documentMapper = context.mapperService().documentMapper(rootFieldsVisitor.uid().type()); SourceLookup sourceLookup = context.lookup().source(); sourceLookup.setSegmentAndDocument(subReaderContext, nestedSubDocId); ObjectMapper nestedObjectMapper = documentMapper.findNestedObjectMapper(nestedSubDocId, context, subReaderContext); assert nestedObjectMapper != null; InternalSearchHit.InternalNestedIdentity nestedIdentity = getInternalNestedIdentity( context, nestedSubDocId, subReaderContext, documentMapper, nestedObjectMapper); BytesReference source = rootFieldsVisitor.source(); if (source != null) { Tuple<XContentType, Map<String, Object>> tuple = XContentHelper.convertToMap(source, true); Map<String, Object> sourceAsMap = tuple.v2(); List<Map<String, Object>> nestedParsedSource; SearchHit.NestedIdentity nested = nestedIdentity; do { Object extractedValue = XContentMapValues.extractValue(nested.getField().string(), sourceAsMap); if (extractedValue == null) { // The nested objects may not exist in the _source, because it was filtered because of // _source filtering break; } else if (extractedValue instanceof List) { // nested field has an array value in the _source nestedParsedSource = (List<Map<String, Object>>) extractedValue; } else if (extractedValue instanceof Map) { // nested field has an object value in the _source. This just means the nested field has // just one inner object, which is valid, but uncommon. nestedParsedSource = ImmutableList.of((Map<String, Object>) extractedValue); } else { throw new IllegalStateException("extracted source isn't an object or an array"); } sourceAsMap = nestedParsedSource.get(nested.getOffset()); nested = nested.getChild(); } while (nested != null); context.lookup().source().setSource(sourceAsMap); XContentType contentType = tuple.v1(); BytesReference nestedSource = contentBuilder(contentType).map(sourceAsMap).bytes(); context.lookup().source().setSource(nestedSource); context.lookup().source().setSourceContentType(contentType); } InternalSearchHit searchHit = new InternalSearchHit( nestedTopDocId, rootFieldsVisitor.uid().id(), documentMapper.typeText(), nestedIdentity, searchFields); if (extractFieldNames != null) { for (String extractFieldName : extractFieldNames) { List<Object> values = context.lookup().source().extractRawValues(extractFieldName); if (!values.isEmpty()) { if (searchHit.fieldsOrNull() == null) { searchHit.fields(new HashMap<String, SearchHitField>(2)); } SearchHitField hitField = searchHit.fields().get(extractFieldName); if (hitField == null) { hitField = new InternalSearchHitField(extractFieldName, new ArrayList<>(2)); searchHit.fields().put(extractFieldName, hitField); } for (Object value : values) { hitField.values().add(value); } } } } return searchHit; }
public void writeTo(StreamOutput out, InternalSearchHits.StreamContext context) throws IOException { out.writeFloat(score); out.writeText(id); out.writeText(type); out.writeOptionalStreamable(nestedIdentity); out.writeLong(version); out.writeBytesReference(source); if (explanation == null) { out.writeBoolean(false); } else { out.writeBoolean(true); writeExplanation(out, explanation); } if (fields == null) { out.writeVInt(0); } else { out.writeVInt(fields.size()); for (SearchHitField hitField : fields().values()) { hitField.writeTo(out); } } if (highlightFields == null) { out.writeVInt(0); } else { out.writeVInt(highlightFields.size()); for (HighlightField highlightField : highlightFields.values()) { highlightField.writeTo(out); } } if (sortValues.length == 0) { out.writeVInt(0); } else { out.writeVInt(sortValues.length); for (Object sortValue : sortValues) { if (sortValue == null) { out.writeByte((byte) 0); } else { Class type = sortValue.getClass(); if (type == String.class) { out.writeByte((byte) 1); out.writeString((String) sortValue); } else if (type == Integer.class) { out.writeByte((byte) 2); out.writeInt((Integer) sortValue); } else if (type == Long.class) { out.writeByte((byte) 3); out.writeLong((Long) sortValue); } else if (type == Float.class) { out.writeByte((byte) 4); out.writeFloat((Float) sortValue); } else if (type == Double.class) { out.writeByte((byte) 5); out.writeDouble((Double) sortValue); } else if (type == Byte.class) { out.writeByte((byte) 6); out.writeByte((Byte) sortValue); } else if (type == Short.class) { out.writeByte((byte) 7); out.writeShort((Short) sortValue); } else if (type == Boolean.class) { out.writeByte((byte) 8); out.writeBoolean((Boolean) sortValue); } else if (sortValue instanceof Text) { out.writeByte((byte) 9); out.writeText((Text) sortValue); } else { throw new IOException("Can't handle sort field value of type [" + type + "]"); } } } } if (matchedQueries.length == 0) { out.writeVInt(0); } else { out.writeVInt(matchedQueries.length); for (String matchedFilter : matchedQueries) { out.writeString(matchedFilter); } } if (context.streamShardTarget() == InternalSearchHits.StreamContext.ShardTargetType.STREAM) { if (shard == null) { out.writeBoolean(false); } else { out.writeBoolean(true); shard.writeTo(out); } } else if (context.streamShardTarget() == InternalSearchHits.StreamContext.ShardTargetType.LOOKUP) { if (shard == null) { out.writeVInt(0); } else { out.writeVInt(context.shardHandleLookup().get(shard)); } } if (innerHits == null) { out.writeVInt(0); } else { out.writeVInt(innerHits.size()); for (Map.Entry<String, InternalSearchHits> entry : innerHits.entrySet()) { out.writeString(entry.getKey()); entry .getValue() .writeTo( out, InternalSearchHits.streamContext() .streamShardTarget(InternalSearchHits.StreamContext.ShardTargetType.NO_STREAM)); } } }
public void readFrom(StreamInput in, InternalSearchHits.StreamContext context) throws IOException { score = in.readFloat(); id = in.readText(); type = in.readText(); nestedIdentity = in.readOptionalStreamable(new InternalNestedIdentity()); version = in.readLong(); source = in.readBytesReference(); if (source.length() == 0) { source = null; } if (in.readBoolean()) { explanation = readExplanation(in); } int size = in.readVInt(); if (size == 0) { fields = ImmutableMap.of(); } else if (size == 1) { SearchHitField hitField = readSearchHitField(in); fields = ImmutableMap.of(hitField.name(), hitField); } else if (size == 2) { SearchHitField hitField1 = readSearchHitField(in); SearchHitField hitField2 = readSearchHitField(in); fields = ImmutableMap.of(hitField1.name(), hitField1, hitField2.name(), hitField2); } else if (size == 3) { SearchHitField hitField1 = readSearchHitField(in); SearchHitField hitField2 = readSearchHitField(in); SearchHitField hitField3 = readSearchHitField(in); fields = ImmutableMap.of( hitField1.name(), hitField1, hitField2.name(), hitField2, hitField3.name(), hitField3); } else if (size == 4) { SearchHitField hitField1 = readSearchHitField(in); SearchHitField hitField2 = readSearchHitField(in); SearchHitField hitField3 = readSearchHitField(in); SearchHitField hitField4 = readSearchHitField(in); fields = ImmutableMap.of( hitField1.name(), hitField1, hitField2.name(), hitField2, hitField3.name(), hitField3, hitField4.name(), hitField4); } else if (size == 5) { SearchHitField hitField1 = readSearchHitField(in); SearchHitField hitField2 = readSearchHitField(in); SearchHitField hitField3 = readSearchHitField(in); SearchHitField hitField4 = readSearchHitField(in); SearchHitField hitField5 = readSearchHitField(in); fields = ImmutableMap.of( hitField1.name(), hitField1, hitField2.name(), hitField2, hitField3.name(), hitField3, hitField4.name(), hitField4, hitField5.name(), hitField5); } else { ImmutableMap.Builder<String, SearchHitField> builder = ImmutableMap.builder(); for (int i = 0; i < size; i++) { SearchHitField hitField = readSearchHitField(in); builder.put(hitField.name(), hitField); } fields = builder.build(); } size = in.readVInt(); if (size == 0) { highlightFields = ImmutableMap.of(); } else if (size == 1) { HighlightField field = readHighlightField(in); highlightFields = ImmutableMap.of(field.name(), field); } else if (size == 2) { HighlightField field1 = readHighlightField(in); HighlightField field2 = readHighlightField(in); highlightFields = ImmutableMap.of(field1.name(), field1, field2.name(), field2); } else if (size == 3) { HighlightField field1 = readHighlightField(in); HighlightField field2 = readHighlightField(in); HighlightField field3 = readHighlightField(in); highlightFields = ImmutableMap.of(field1.name(), field1, field2.name(), field2, field3.name(), field3); } else if (size == 4) { HighlightField field1 = readHighlightField(in); HighlightField field2 = readHighlightField(in); HighlightField field3 = readHighlightField(in); HighlightField field4 = readHighlightField(in); highlightFields = ImmutableMap.of( field1.name(), field1, field2.name(), field2, field3.name(), field3, field4.name(), field4); } else { ImmutableMap.Builder<String, HighlightField> builder = ImmutableMap.builder(); for (int i = 0; i < size; i++) { HighlightField field = readHighlightField(in); builder.put(field.name(), field); } highlightFields = builder.build(); } size = in.readVInt(); if (size > 0) { sortValues = new Object[size]; for (int i = 0; i < sortValues.length; i++) { byte type = in.readByte(); if (type == 0) { sortValues[i] = null; } else if (type == 1) { sortValues[i] = in.readString(); } else if (type == 2) { sortValues[i] = in.readInt(); } else if (type == 3) { sortValues[i] = in.readLong(); } else if (type == 4) { sortValues[i] = in.readFloat(); } else if (type == 5) { sortValues[i] = in.readDouble(); } else if (type == 6) { sortValues[i] = in.readByte(); } else if (type == 7) { sortValues[i] = in.readShort(); } else if (type == 8) { sortValues[i] = in.readBoolean(); } else if (type == 9) { sortValues[i] = in.readText(); } else { throw new IOException("Can't match type [" + type + "]"); } } } size = in.readVInt(); if (size > 0) { matchedQueries = new String[size]; for (int i = 0; i < size; i++) { matchedQueries[i] = in.readString(); } } if (context.streamShardTarget() == InternalSearchHits.StreamContext.ShardTargetType.STREAM) { if (in.readBoolean()) { shard = readSearchShardTarget(in); } } else if (context.streamShardTarget() == InternalSearchHits.StreamContext.ShardTargetType.LOOKUP) { int lookupId = in.readVInt(); if (lookupId > 0) { shard = context.handleShardLookup().get(lookupId); } } size = in.readVInt(); if (size > 0) { innerHits = new HashMap<>(size); for (int i = 0; i < size; i++) { String key = in.readString(); InternalSearchHits value = InternalSearchHits.readSearchHits( in, InternalSearchHits.streamContext() .streamShardTarget(InternalSearchHits.StreamContext.ShardTargetType.NO_STREAM)); innerHits.put(key, value); } } }
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { List<SearchHitField> metaFields = Lists.newArrayList(); List<SearchHitField> otherFields = Lists.newArrayList(); if (fields != null && !fields.isEmpty()) { for (SearchHitField field : fields.values()) { if (field.values().isEmpty()) { continue; } if (field.isMetadataField()) { metaFields.add(field); } else { otherFields.add(field); } } } builder.startObject(); // For inner_hit hits shard is null and that is ok, because the parent search hit has all this // information. // Even if this was included in the inner_hit hits this would be the same, so better leave it // out. if (explanation() != null && shard != null) { builder.field("_shard", shard.shardId()); builder.field("_node", shard.nodeIdText()); } if (shard != null) { builder.field(Fields._INDEX, shard.indexText()); } builder.field(Fields._TYPE, type); builder.field(Fields._ID, id); if (nestedIdentity != null) { nestedIdentity.toXContent(builder, params); } if (version != -1) { builder.field(Fields._VERSION, version); } if (Float.isNaN(score)) { builder.nullField(Fields._SCORE); } else { builder.field(Fields._SCORE, score); } for (SearchHitField field : metaFields) { builder.field(field.name(), field.value()); } if (source != null) { XContentHelper.writeRawField("_source", source, builder, params); } if (!otherFields.isEmpty()) { builder.startObject(Fields.FIELDS); for (SearchHitField field : otherFields) { builder.startArray(field.name()); for (Object value : field.getValues()) { builder.value(value); } builder.endArray(); } builder.endObject(); } if (highlightFields != null && !highlightFields.isEmpty()) { builder.startObject(Fields.HIGHLIGHT); for (HighlightField field : highlightFields.values()) { builder.field(field.name()); if (field.fragments() == null) { builder.nullValue(); } else { builder.startArray(); for (Text fragment : field.fragments()) { builder.value(fragment); } builder.endArray(); } } builder.endObject(); } if (sortValues != null && sortValues.length > 0) { builder.startArray(Fields.SORT); for (Object sortValue : sortValues) { if (sortValue != null && sortValue.equals(MAX_TERM_AS_TEXT)) { // We don't display MAX_TERM in JSON responses in case some clients have UTF-8 parsers // that wouldn't accept a // non-character in the response, even though this is valid UTF-8 builder.nullValue(); } else { builder.value(sortValue); } } builder.endArray(); } if (matchedQueries.length > 0) { builder.startArray(Fields.MATCHED_QUERIES); for (String matchedFilter : matchedQueries) { builder.value(matchedFilter); } builder.endArray(); } if (explanation() != null) { builder.field(Fields._EXPLANATION); buildExplanation(builder, explanation()); } if (innerHits != null) { builder.startObject(Fields.INNER_HITS); for (Map.Entry<String, InternalSearchHits> entry : innerHits.entrySet()) { builder.startObject(entry.getKey()); entry.getValue().toXContent(builder, params); builder.endObject(); } builder.endObject(); } builder.endObject(); return builder; }
private <T> T fieldValue(String fieldName) { SearchHitField field = delegate.field(fieldName); return field == null ? null : field.value(); }
public void readFrom(StreamInput in, InternalSearchHits.StreamContext context) throws IOException { score = in.readFloat(); id = in.readText(); type = in.readText(); nestedIdentity = in.readOptionalStreamable(InternalNestedIdentity::new); version = in.readLong(); source = in.readBytesReference(); if (source.length() == 0) { source = null; } if (in.readBoolean()) { explanation = readExplanation(in); } int size = in.readVInt(); if (size == 0) { fields = emptyMap(); } else if (size == 1) { SearchHitField hitField = readSearchHitField(in); fields = singletonMap(hitField.name(), hitField); } else { Map<String, SearchHitField> fields = new HashMap<>(); for (int i = 0; i < size; i++) { SearchHitField hitField = readSearchHitField(in); fields.put(hitField.name(), hitField); } this.fields = unmodifiableMap(fields); } size = in.readVInt(); if (size == 0) { highlightFields = emptyMap(); } else if (size == 1) { HighlightField field = readHighlightField(in); highlightFields = singletonMap(field.name(), field); } else { Map<String, HighlightField> highlightFields = new HashMap<>(); for (int i = 0; i < size; i++) { HighlightField field = readHighlightField(in); highlightFields.put(field.name(), field); } this.highlightFields = unmodifiableMap(highlightFields); } size = in.readVInt(); if (size > 0) { sortValues = new Object[size]; for (int i = 0; i < sortValues.length; i++) { byte type = in.readByte(); if (type == 0) { sortValues[i] = null; } else if (type == 1) { sortValues[i] = in.readString(); } else if (type == 2) { sortValues[i] = in.readInt(); } else if (type == 3) { sortValues[i] = in.readLong(); } else if (type == 4) { sortValues[i] = in.readFloat(); } else if (type == 5) { sortValues[i] = in.readDouble(); } else if (type == 6) { sortValues[i] = in.readByte(); } else if (type == 7) { sortValues[i] = in.readShort(); } else if (type == 8) { sortValues[i] = in.readBoolean(); } else { throw new IOException("Can't match type [" + type + "]"); } } } size = in.readVInt(); if (size > 0) { matchedQueries = new String[size]; for (int i = 0; i < size; i++) { matchedQueries[i] = in.readString(); } } if (context.streamShardTarget() == ShardTargetType.STREAM) { if (in.readBoolean()) { shard = new SearchShardTarget(in); } } else if (context.streamShardTarget() == ShardTargetType.LOOKUP) { int lookupId = in.readVInt(); if (lookupId > 0) { shard = context.handleShardLookup().get(lookupId); } } size = in.readVInt(); if (size > 0) { innerHits = new HashMap<>(size); for (int i = 0; i < size; i++) { String key = in.readString(); ShardTargetType shardTarget = context.streamShardTarget(); InternalSearchHits value = InternalSearchHits.readSearchHits( in, context.streamShardTarget(ShardTargetType.NO_STREAM)); context.streamShardTarget(shardTarget); innerHits.put(key, value); } } }