public Builder put(MetadataFieldMapper.Builder<?, ?> mapper) { MetadataFieldMapper metadataMapper = mapper.build(builderContext); rootMappers.put(metadataMapper.getClass(), metadataMapper); return this; }
private ParsedDocument innerParseDocument(SourceToParse source) throws MapperParsingException { if (docMapper.type().equals(MapperService.DEFAULT_MAPPING)) { throw new IllegalArgumentException( "It is forbidden to index into the default mapping [" + MapperService.DEFAULT_MAPPING + "]"); } ParseContext.InternalParseContext context = cache.get(); final Mapping mapping = docMapper.mapping(); if (source.type() != null && !source.type().equals(docMapper.type())) { throw new MapperParsingException( "Type mismatch, provide type [" + source.type() + "] but mapper is of type [" + docMapper.type() + "]"); } source.type(docMapper.type()); XContentParser parser = source.parser(); try { if (parser == null) { parser = XContentHelper.createParser(source.source()); } context.reset(parser, new ParseContext.Document(), source); // will result in START_OBJECT XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.START_OBJECT) { throw new MapperParsingException("Malformed content, must start with an object"); } boolean emptyDoc = false; if (mapping.root.isEnabled()) { token = parser.nextToken(); if (token == XContentParser.Token.END_OBJECT) { // empty doc, we can handle it... emptyDoc = true; } else if (token != XContentParser.Token.FIELD_NAME) { throw new MapperParsingException( "Malformed content, after first object, either the type field or the actual properties should exist"); } } for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) { metadataMapper.preParse(context); } if (mapping.root.isEnabled() == false) { // entire type is disabled parser.skipChildren(); } else if (emptyDoc == false) { Mapper update = parseObject(context, mapping.root, true); if (update != null) { context.addDynamicMappingsUpdate(update); } } for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) { metadataMapper.postParse(context); } // try to parse the next token, this should be null if the object is ended properly // but will throw a JSON exception if the extra tokens is not valid JSON (this will be handled // by the catch) if (Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0_beta1) && source.parser() == null && parser != null) { // only check for end of tokens if we created the parser here token = parser.nextToken(); if (token != null) { throw new IllegalArgumentException( "Malformed content, found extra data after parsing: " + token); } } } catch (Throwable e) { // if its already a mapper parsing exception, no need to wrap it... if (e instanceof MapperParsingException) { throw (MapperParsingException) e; } // Throw a more meaningful message if the document is empty. if (source.source() != null && source.source().length() == 0) { throw new MapperParsingException("failed to parse, document is empty"); } throw new MapperParsingException("failed to parse", e); } finally { // only close the parser when its not provided externally if (source.parser() == null && parser != null) { parser.close(); } } // reverse the order of docs for nested docs support, parent should be last if (context.docs().size() > 1) { Collections.reverse(context.docs()); } // apply doc boost if (context.docBoost() != 1.0f) { Set<String> encounteredFields = new HashSet<>(); for (ParseContext.Document doc : context.docs()) { encounteredFields.clear(); for (IndexableField field : doc) { if (field.fieldType().indexOptions() != IndexOptions.NONE && !field.fieldType().omitNorms()) { if (!encounteredFields.contains(field.name())) { ((Field) field).setBoost(context.docBoost() * field.boost()); encounteredFields.add(field.name()); } } } } } Mapper rootDynamicUpdate = context.dynamicMappingsUpdate(); Mapping update = null; if (rootDynamicUpdate != null) { update = mapping.mappingUpdate(rootDynamicUpdate); } ParsedDocument doc = new ParsedDocument( context.uid(), context.version(), context.id(), context.type(), source.routing(), source.timestamp(), source.ttl(), context.docs(), context.source(), update) .parent(source.parent()); // reset the context to free up memory context.reset(null, null, null); return doc; }
@Override public void preParse(ParseContext context) throws IOException { super.parse(context); }