public void close() {
   cache.remove();
   rootObjectMapper.close();
   for (RootMapper rootMapper : rootMappersOrdered) {
     rootMapper.close();
   }
 }
  public ParsedDocument parse(SourceToParse source, @Nullable ParseListener listener)
      throws MapperParsingException {
    ParseContext context = cache.get();

    if (source.type() != null && !source.type().equals(this.type)) {
      throw new MapperParsingException(
          "Type mismatch, provide type ["
              + source.type()
              + "] but mapper is of type ["
              + this.type
              + "]");
    }
    source.type(this.type);

    XContentParser parser = source.parser();
    try {
      if (parser == null) {
        parser = XContentHelper.createParser(source.source());
      }
      context.reset(parser, new Document(), source, listener);
      // on a newly created instance of document mapper, we always consider it as new mappers that
      // have been added
      if (initMappersAdded) {
        context.setMappingsModified();
        initMappersAdded = false;
      }

      // will result in START_OBJECT
      int countDownTokens = 0;
      XContentParser.Token token = parser.nextToken();
      if (token != XContentParser.Token.START_OBJECT) {
        throw new MapperParsingException("Malformed content, must start with an object");
      }
      boolean emptyDoc = false;
      token = parser.nextToken();
      if (token == XContentParser.Token.END_OBJECT) {
        // empty doc, we can handle it...
        emptyDoc = true;
      } else if (token != XContentParser.Token.FIELD_NAME) {
        throw new MapperParsingException(
            "Malformed content, after first object, either the type field or the actual properties should exist");
      }
      if (type.equals(parser.currentName())) {
        // first field is the same as the type, this might be because the type is provided, and the
        // object exists within it
        // or because there is a valid field that by chance is named as the type

        // Note, in this case, we only handle plain value types, an object type will be analyzed as
        // if it was the type itself
        // and other same level fields will be ignored
        token = parser.nextToken();
        countDownTokens++;
        // commented out, allow for same type with START_OBJECT, we do our best to handle it except
        // for the above corner case
        //                if (token != XContentParser.Token.START_OBJECT) {
        //                    throw new MapperException("Malformed content, a field with the same
        // name as the type must be an object with the properties/fields within it");
        //                }
      }

      for (RootMapper rootMapper : rootMappersOrdered) {
        rootMapper.preParse(context);
      }

      if (!emptyDoc) {
        rootObjectMapper.parse(context);
      }

      for (int i = 0; i < countDownTokens; i++) {
        parser.nextToken();
      }

      // fire up any new mappers if exists
      if (!context.newFieldMappers().mappers.isEmpty()) {
        addFieldMappers(context.newFieldMappers().mappers);
        context.newFieldMappers().mappers.clear();
      }
      if (!context.newObjectMappers().mappers.isEmpty()) {
        addObjectMappers(context.newObjectMappers().mappers);
        context.newObjectMappers().mappers.clear();
      }

      for (RootMapper rootMapper : rootMappersOrdered) {
        rootMapper.postParse(context);
      }

      for (RootMapper rootMapper : rootMappersOrdered) {
        rootMapper.validate(context);
      }
    } catch (Throwable e) {
      // we have to fire up any new mappers even on a failure, because they
      // have been added internally to each compound mapper...
      // ... we have no option to "rollback" a change, which is very tricky in our copy on change
      // system...
      if (!context.newFieldMappers().mappers.isEmpty()) {
        addFieldMappers(context.newFieldMappers().mappers);
        context.newFieldMappers().mappers.clear();
      }
      if (!context.newObjectMappers().mappers.isEmpty()) {
        addObjectMappers(context.newObjectMappers().mappers);
        context.newObjectMappers().mappers.clear();
      }

      // if its already a mapper parsing exception, no need to wrap it...
      if (e instanceof MapperParsingException) {
        throw (MapperParsingException) e;
      }

      throw new MapperParsingException("failed to parse", e);
    } finally {
      // only close the parser when its not provided externally
      if (source.parser() == null && parser != null) {
        parser.close();
      }
    }
    // reverse the order of docs for nested docs support, parent should be last
    if (context.docs().size() > 1) {
      Collections.reverse(context.docs());
    }
    // apply doc boost
    if (context.docBoost() != 1.0f) {
      Set<String> encounteredFields = Sets.newHashSet();
      for (Document doc : context.docs()) {
        encounteredFields.clear();
        for (IndexableField field : doc) {
          if (field.fieldType().indexed() && !field.fieldType().omitNorms()) {
            if (!encounteredFields.contains(field.name())) {
              ((Field) field).setBoost(context.docBoost() * field.boost());
              encounteredFields.add(field.name());
            }
          }
        }
      }
    }

    ParsedDocument doc =
        new ParsedDocument(
                context.uid(),
                context.id(),
                context.type(),
                source.routing(),
                source.timestamp(),
                source.ttl(),
                context.docs(),
                context.analyzer(),
                context.source(),
                context.mappingsModified())
            .parent(source.parent());
    // reset the context to free up memory
    context.reset(null, null, null, null);
    return doc;
  }