Query parsePercolatorDocument(String id, BytesReference source) { String type = null; BytesReference querySource = null; XContentParser parser = null; try { parser = XContentHelper.createParser(source); String currentFieldName = null; XContentParser.Token token = parser.nextToken(); // move the START_OBJECT if (token != XContentParser.Token.START_OBJECT) { throw new ElasticsearchException( "failed to parse query [" + id + "], not starting with OBJECT"); } while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("query".equals(currentFieldName)) { if (type != null) { return parseQuery(type, null, parser); } else { XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType()); builder.copyCurrentStructure(parser); querySource = builder.bytes(); builder.close(); } } else { parser.skipChildren(); } } else if (token == XContentParser.Token.START_ARRAY) { parser.skipChildren(); } else if (token.isValue()) { if ("type".equals(currentFieldName)) { type = parser.text(); } } } return parseQuery(type, querySource, null); } catch (Exception e) { throw new PercolatorException(shardId().index(), "failed to parse query [" + id + "]", e); } finally { if (parser != null) { parser.close(); } } }
public static IndexMetaData fromXContent(XContentParser parser) throws IOException { if (parser.currentToken() == XContentParser.Token.START_OBJECT) { parser.nextToken(); } Builder builder = new Builder(parser.currentName()); String currentFieldName = null; XContentParser.Token token = parser.nextToken(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("settings".equals(currentFieldName)) { builder.settings( ImmutableSettings.settingsBuilder() .put(SettingsLoader.Helper.loadNestedFromMap(parser.mapOrdered()))); } else if ("mappings".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { String mappingType = currentFieldName; Map<String, Object> mappingSource = MapBuilder.<String, Object>newMapBuilder() .put(mappingType, parser.mapOrdered()) .map(); builder.putMapping(new MappingMetaData(mappingType, mappingSource)); } } } else if ("aliases".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { builder.putAlias(AliasMetaData.Builder.fromXContent(parser)); } } else { // check if its a custom index metadata Custom.Factory<Custom> factory = lookupFactory(currentFieldName); if (factory == null) { // TODO warn parser.skipChildren(); } else { builder.putCustom(factory.type(), factory.fromXContent(parser)); } } } else if (token == XContentParser.Token.START_ARRAY) { if ("mappings".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { builder.putMapping(new MappingMetaData(new CompressedString(parser.binaryValue()))); } else { Map<String, Object> mapping = parser.mapOrdered(); if (mapping.size() == 1) { String mappingType = mapping.keySet().iterator().next(); builder.putMapping(new MappingMetaData(mappingType, mapping)); } } } } } else if (token.isValue()) { if ("state".equals(currentFieldName)) { builder.state(State.fromString(parser.text())); } else if ("version".equals(currentFieldName)) { builder.version(parser.longValue()); } } } return builder.build(); }
private ParsedDocument parseRequest( IndexService documentIndexService, PercolateShardRequest request, PercolateContext context) throws ElasticsearchException { BytesReference source = request.source(); if (source == null || source.length() == 0) { return null; } // TODO: combine all feature parse elements into one map Map<String, ? extends SearchParseElement> hlElements = highlightPhase.parseElements(); Map<String, ? extends SearchParseElement> facetElements = facetPhase.parseElements(); Map<String, ? extends SearchParseElement> aggregationElements = aggregationPhase.parseElements(); ParsedDocument doc = null; XContentParser parser = null; // Some queries (function_score query when for decay functions) rely on a SearchContext being // set: // We switch types because this context needs to be in the context of the percolate queries in // the shard and // not the in memory percolate doc String[] previousTypes = context.types(); context.types(new String[] {TYPE_NAME}); SearchContext.setCurrent(context); try { parser = XContentFactory.xContent(source).createParser(source); String currentFieldName = null; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); // we need to check the "doc" here, so the next token will be START_OBJECT which is // the actual document starting if ("doc".equals(currentFieldName)) { if (doc != null) { throw new ElasticsearchParseException("Either specify doc or get, not both"); } MapperService mapperService = documentIndexService.mapperService(); DocumentMapper docMapper = mapperService.documentMapperWithAutoCreate(request.documentType()); doc = docMapper.parse(source(parser).type(request.documentType()).flyweight(true)); // the document parsing exists the "doc" object, so we need to set the new current // field. currentFieldName = parser.currentName(); } } else if (token == XContentParser.Token.START_OBJECT) { SearchParseElement element = hlElements.get(currentFieldName); if (element == null) { element = facetElements.get(currentFieldName); if (element == null) { element = aggregationElements.get(currentFieldName); } } if ("query".equals(currentFieldName)) { if (context.percolateQuery() != null) { throw new ElasticsearchParseException("Either specify query or filter, not both"); } context.percolateQuery(documentIndexService.queryParserService().parse(parser).query()); } else if ("filter".equals(currentFieldName)) { if (context.percolateQuery() != null) { throw new ElasticsearchParseException("Either specify query or filter, not both"); } Filter filter = documentIndexService.queryParserService().parseInnerFilter(parser).filter(); context.percolateQuery(new XConstantScoreQuery(filter)); } else if ("sort".equals(currentFieldName)) { parseSort(parser, context); } else if (element != null) { element.parse(parser, context); } } else if (token == XContentParser.Token.START_ARRAY) { if ("sort".equals(currentFieldName)) { parseSort(parser, context); } } else if (token == null) { break; } else if (token.isValue()) { if ("size".equals(currentFieldName)) { context.size(parser.intValue()); if (context.size() < 0) { throw new ElasticsearchParseException( "size is set to [" + context.size() + "] and is expected to be higher or equal to 0"); } } else if ("sort".equals(currentFieldName)) { parseSort(parser, context); } else if ("track_scores".equals(currentFieldName) || "trackScores".equals(currentFieldName)) { context.trackScores(parser.booleanValue()); } } } // We need to get the actual source from the request body for highlighting, so parse the // request body again // and only get the doc source. if (context.highlight() != null) { parser.close(); currentFieldName = null; parser = XContentFactory.xContent(source).createParser(source); token = parser.nextToken(); assert token == XContentParser.Token.START_OBJECT; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("doc".equals(currentFieldName)) { BytesStreamOutput bStream = new BytesStreamOutput(); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.SMILE, bStream); builder.copyCurrentStructure(parser); builder.close(); doc.setSource(bStream.bytes()); break; } else { parser.skipChildren(); } } else if (token == null) { break; } } } } catch (Throwable e) { throw new ElasticsearchParseException("failed to parse request", e); } finally { context.types(previousTypes); SearchContext.removeCurrent(); if (parser != null) { parser.close(); } } return doc; }
private ParsedDocument innerParseDocument(SourceToParse source) throws MapperParsingException { if (docMapper.type().equals(MapperService.DEFAULT_MAPPING)) { throw new IllegalArgumentException( "It is forbidden to index into the default mapping [" + MapperService.DEFAULT_MAPPING + "]"); } ParseContext.InternalParseContext context = cache.get(); final Mapping mapping = docMapper.mapping(); if (source.type() != null && !source.type().equals(docMapper.type())) { throw new MapperParsingException( "Type mismatch, provide type [" + source.type() + "] but mapper is of type [" + docMapper.type() + "]"); } source.type(docMapper.type()); XContentParser parser = source.parser(); try { if (parser == null) { parser = XContentHelper.createParser(source.source()); } context.reset(parser, new ParseContext.Document(), source); // will result in START_OBJECT XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.START_OBJECT) { throw new MapperParsingException("Malformed content, must start with an object"); } boolean emptyDoc = false; if (mapping.root.isEnabled()) { token = parser.nextToken(); if (token == XContentParser.Token.END_OBJECT) { // empty doc, we can handle it... emptyDoc = true; } else if (token != XContentParser.Token.FIELD_NAME) { throw new MapperParsingException( "Malformed content, after first object, either the type field or the actual properties should exist"); } } for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) { metadataMapper.preParse(context); } if (mapping.root.isEnabled() == false) { // entire type is disabled parser.skipChildren(); } else if (emptyDoc == false) { Mapper update = parseObject(context, mapping.root, true); if (update != null) { context.addDynamicMappingsUpdate(update); } } for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) { metadataMapper.postParse(context); } // try to parse the next token, this should be null if the object is ended properly // but will throw a JSON exception if the extra tokens is not valid JSON (this will be handled // by the catch) if (Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0_beta1) && source.parser() == null && parser != null) { // only check for end of tokens if we created the parser here token = parser.nextToken(); if (token != null) { throw new IllegalArgumentException( "Malformed content, found extra data after parsing: " + token); } } } catch (Throwable e) { // if its already a mapper parsing exception, no need to wrap it... if (e instanceof MapperParsingException) { throw (MapperParsingException) e; } // Throw a more meaningful message if the document is empty. if (source.source() != null && source.source().length() == 0) { throw new MapperParsingException("failed to parse, document is empty"); } throw new MapperParsingException("failed to parse", e); } finally { // only close the parser when its not provided externally if (source.parser() == null && parser != null) { parser.close(); } } // reverse the order of docs for nested docs support, parent should be last if (context.docs().size() > 1) { Collections.reverse(context.docs()); } // apply doc boost if (context.docBoost() != 1.0f) { Set<String> encounteredFields = new HashSet<>(); for (ParseContext.Document doc : context.docs()) { encounteredFields.clear(); for (IndexableField field : doc) { if (field.fieldType().indexOptions() != IndexOptions.NONE && !field.fieldType().omitNorms()) { if (!encounteredFields.contains(field.name())) { ((Field) field).setBoost(context.docBoost() * field.boost()); encounteredFields.add(field.name()); } } } } } Mapper rootDynamicUpdate = context.dynamicMappingsUpdate(); Mapping update = null; if (rootDynamicUpdate != null) { update = mapping.mappingUpdate(rootDynamicUpdate); } ParsedDocument doc = new ParsedDocument( context.uid(), context.version(), context.id(), context.type(), source.routing(), source.timestamp(), source.ttl(), context.docs(), context.source(), update) .parent(source.parent()); // reset the context to free up memory context.reset(null, null, null); return doc; }
public static MetaData fromXContent(XContentParser parser) throws IOException { Builder builder = new Builder(); // we might get here after the meta-data element, or on a fresh parser XContentParser.Token token = parser.currentToken(); String currentFieldName = parser.currentName(); if (!"meta-data".equals(currentFieldName)) { token = parser.nextToken(); if (token == XContentParser.Token.START_OBJECT) { // move to the field name (meta-data) token = parser.nextToken(); if (token != XContentParser.Token.FIELD_NAME) { throw new IllegalArgumentException("Expected a field name but got " + token); } // move to the next object token = parser.nextToken(); } currentFieldName = parser.currentName(); } if (!"meta-data".equals(parser.currentName())) { throw new IllegalArgumentException( "Expected [meta-data] as a field name but got " + currentFieldName); } if (token != XContentParser.Token.START_OBJECT) { throw new IllegalArgumentException("Expected a START_OBJECT but got " + token); } while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("settings".equals(currentFieldName)) { builder.persistentSettings( Settings.settingsBuilder() .put(SettingsLoader.Helper.loadNestedFromMap(parser.mapOrdered())) .build()); } else if ("indices".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { builder.put(IndexMetaData.Builder.fromXContent(parser), false); } } else if ("templates".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { builder.put(IndexTemplateMetaData.Builder.fromXContent(parser, parser.currentName())); } } else { // check if its a custom index metadata Custom proto = lookupPrototype(currentFieldName); if (proto == null) { // TODO warn parser.skipChildren(); } else { Custom custom = proto.fromXContent(parser); builder.putCustom(custom.type(), custom); } } } else if (token.isValue()) { if ("version".equals(currentFieldName)) { builder.version = parser.longValue(); } else if ("cluster_uuid".equals(currentFieldName) || "uuid".equals(currentFieldName)) { builder.clusterUUID = parser.text(); } else { throw new IllegalArgumentException("Unexpected field [" + currentFieldName + "]"); } } else { throw new IllegalArgumentException("Unexpected token " + token); } } return builder.build(); }
public static IndexMetaData fromXContent(XContentParser parser) throws IOException { if (parser.currentToken() == null) { // fresh parser? move to the first token parser.nextToken(); } if (parser.currentToken() == XContentParser.Token.START_OBJECT) { // on a start object move to next token parser.nextToken(); } if (parser.currentToken() != XContentParser.Token.FIELD_NAME) { throw new IllegalArgumentException( "expected field name but got a " + parser.currentToken()); } Builder builder = new Builder(parser.currentName()); String currentFieldName = null; XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.START_OBJECT) { throw new IllegalArgumentException("expected object but got a " + token); } while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("settings".equals(currentFieldName)) { builder.settings( Settings.settingsBuilder() .put(SettingsLoader.Helper.loadNestedFromMap(parser.mapOrdered()))); } else if ("mappings".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { String mappingType = currentFieldName; Map<String, Object> mappingSource = MapBuilder.<String, Object>newMapBuilder() .put(mappingType, parser.mapOrdered()) .map(); builder.putMapping(new MappingMetaData(mappingType, mappingSource)); } else { throw new IllegalArgumentException("Unexpected token: " + token); } } } else if ("aliases".equals(currentFieldName)) { while (parser.nextToken() != XContentParser.Token.END_OBJECT) { builder.putAlias(AliasMetaData.Builder.fromXContent(parser)); } } else if (KEY_ACTIVE_ALLOCATIONS.equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_ARRAY) { String shardId = currentFieldName; Set<String> allocationIds = new HashSet<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_STRING) { allocationIds.add(parser.text()); } } builder.putActiveAllocationIds(Integer.valueOf(shardId), allocationIds); } else { throw new IllegalArgumentException("Unexpected token: " + token); } } } else if ("warmers".equals(currentFieldName)) { // TODO: do this in 4.0: // throw new IllegalArgumentException("Warmers are not supported anymore - are you // upgrading from 1.x?"); // ignore: warmers have been removed in 3.0 and are // simply ignored when upgrading from 2.x assert Version.CURRENT.major <= 3; parser.skipChildren(); } else { // check if its a custom index metadata Custom proto = lookupPrototype(currentFieldName); if (proto == null) { // TODO warn parser.skipChildren(); } else { Custom custom = proto.fromXContent(parser); builder.putCustom(custom.type(), custom); } } } else if (token == XContentParser.Token.START_ARRAY) { if ("mappings".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { builder.putMapping( new MappingMetaData(new CompressedXContent(parser.binaryValue()))); } else { Map<String, Object> mapping = parser.mapOrdered(); if (mapping.size() == 1) { String mappingType = mapping.keySet().iterator().next(); builder.putMapping(new MappingMetaData(mappingType, mapping)); } } } } else { throw new IllegalArgumentException("Unexpected field for an array " + currentFieldName); } } else if (token.isValue()) { if ("state".equals(currentFieldName)) { builder.state(State.fromString(parser.text())); } else if ("version".equals(currentFieldName)) { builder.version(parser.longValue()); } else { throw new IllegalArgumentException("Unexpected field [" + currentFieldName + "]"); } } else { throw new IllegalArgumentException("Unexpected token " + token); } } return builder.build(); }
public static Snapshot fromXContent(XContentParser parser) throws IOException { String name = null; Version version = Version.CURRENT; SnapshotState state = SnapshotState.IN_PROGRESS; String reason = null; List<String> indices = Collections.emptyList(); long startTime = 0; long endTime = 0; int totalShard = 0; int successfulShards = 0; List<SnapshotShardFailure> shardFailures = NO_FAILURES; if (parser.currentToken() == null) { // fresh parser? move to the first token parser.nextToken(); } if (parser.currentToken() == XContentParser.Token.START_OBJECT) { // on a start object move to next token parser.nextToken(); } XContentParser.Token token; if ((token = parser.nextToken()) == XContentParser.Token.START_OBJECT) { String currentFieldName = parser.currentName(); if ("snapshot".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); token = parser.nextToken(); if (token.isValue()) { if ("name".equals(currentFieldName)) { name = parser.text(); } else if ("state".equals(currentFieldName)) { state = SnapshotState.valueOf(parser.text()); } else if ("reason".equals(currentFieldName)) { reason = parser.text(); } else if ("start_time".equals(currentFieldName)) { startTime = parser.longValue(); } else if ("end_time".equals(currentFieldName)) { endTime = parser.longValue(); } else if ("total_shards".equals(currentFieldName)) { totalShard = parser.intValue(); } else if ("successful_shards".equals(currentFieldName)) { successfulShards = parser.intValue(); } else if ("version_id".equals(currentFieldName)) { version = Version.fromId(parser.intValue()); } } else if (token == XContentParser.Token.START_ARRAY) { if ("indices".equals(currentFieldName)) { ArrayList<String> indicesArray = new ArrayList<>(); while (parser.nextToken() != XContentParser.Token.END_ARRAY) { indicesArray.add(parser.text()); } indices = Collections.unmodifiableList(indicesArray); } else if ("failures".equals(currentFieldName)) { ArrayList<SnapshotShardFailure> shardFailureArrayList = new ArrayList<>(); while (parser.nextToken() != XContentParser.Token.END_ARRAY) { shardFailureArrayList.add(SnapshotShardFailure.fromXContent(parser)); } shardFailures = Collections.unmodifiableList(shardFailureArrayList); } else { // It was probably created by newer version - ignoring parser.skipChildren(); } } else if (token == XContentParser.Token.START_OBJECT) { // It was probably created by newer version - ignoring parser.skipChildren(); } } } } } else { throw new ElasticsearchParseException("unexpected token [" + token + "]"); } return new Snapshot( name, indices, state, reason, version, startTime, endTime, totalShard, successfulShards, shardFailures); }