public void testCombineTemplates() throws Exception {
    // clean all templates setup by the framework.
    client().admin().indices().prepareDeleteTemplate("*").get();

    // check get all templates on an empty index.
    GetIndexTemplatesResponse response = client().admin().indices().prepareGetTemplates().get();
    assertThat(response.getIndexTemplates(), empty());

    // Now, a complete mapping with two separated templates is error
    // base template
    client()
        .admin()
        .indices()
        .preparePutTemplate("template_1")
        .setTemplate("*")
        .setSettings(
            "    {\n"
                + "        \"index\" : {\n"
                + "            \"analysis\" : {\n"
                + "                \"analyzer\" : {\n"
                + "                    \"custom_1\" : {\n"
                + "                        \"tokenizer\" : \"whitespace\"\n"
                + "                    }\n"
                + "                }\n"
                + "            }\n"
                + "         }\n"
                + "    }\n")
        .get();

    // put template using custom_1 analyzer
    MapperParsingException e =
        expectThrows(
            MapperParsingException.class,
            () ->
                client()
                    .admin()
                    .indices()
                    .preparePutTemplate("template_2")
                    .setTemplate("test*")
                    .setCreate(true)
                    .setOrder(1)
                    .addMapping(
                        "type1",
                        XContentFactory.jsonBuilder()
                            .startObject()
                            .startObject("type1")
                            .startObject("properties")
                            .startObject("field2")
                            .field("type", "string")
                            .field("analyzer", "custom_1")
                            .endObject()
                            .endObject()
                            .endObject()
                            .endObject())
                    .get());
    assertThat(e.getMessage(), containsString("analyzer [custom_1] not found for field [field2]"));

    response = client().admin().indices().prepareGetTemplates().get();
    assertThat(response.getIndexTemplates(), hasSize(1));
  }
  public void testPercolatorFieldMapper_noQuery() throws Exception {
    addQueryMapping();
    ParsedDocument doc =
        mapperService
            .documentMapper(typeName)
            .parse(
                "test",
                typeName,
                "1",
                XContentFactory.jsonBuilder().startObject().endObject().bytes());
    assertThat(doc.rootDoc().getFields(fieldType.getQueryBuilderFieldName()).length, equalTo(0));

    try {
      mapperService
          .documentMapper(typeName)
          .parse(
              "test",
              typeName,
              "1",
              XContentFactory.jsonBuilder().startObject().nullField(fieldName).endObject().bytes());
    } catch (MapperParsingException e) {
      assertThat(
          e.getDetailedMessage(), containsString("query malformed, must start with start_object"));
    }
  }
  @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/8802")
  public void testBrokenMapping() throws Exception {
    // clean all templates setup by the framework.
    client().admin().indices().prepareDeleteTemplate("*").get();

    // check get all templates on an empty index.
    GetIndexTemplatesResponse response = client().admin().indices().prepareGetTemplates().get();
    assertThat(response.getIndexTemplates(), empty());

    MapperParsingException e =
        expectThrows(
            MapperParsingException.class,
            () ->
                client()
                    .admin()
                    .indices()
                    .preparePutTemplate("template_1")
                    .setTemplate("te*")
                    .addMapping("type1", "abcde")
                    .get());
    assertThat(e.getMessage(), containsString("Failed to parse mapping "));

    response = client().admin().indices().prepareGetTemplates().get();
    assertThat(response.getIndexTemplates(), hasSize(0));
  }
  public void testUnsupportedFeatures() throws IOException {
    XContentBuilder mapping =
        XContentBuilder.builder(JsonXContent.jsonXContent)
            .startObject()
            .startObject("type")
            .startObject(FieldNamesFieldMapper.NAME)
            // by setting randomly index to no we also test the pre-1.3 behavior
            .field("index", randomFrom("no", "not_analyzed"))
            .field("store", randomFrom("no", "yes"))
            .endObject()
            .endObject()
            .endObject();

    try {
      assertAcked(
          prepareCreate("test")
              .setSettings(
                  Settings.builder()
                      .put(
                          "index.routing.allocation.exclude._name",
                          backwardsCluster().newNodePattern())
                      .put(indexSettings()))
              .addMapping("type", mapping));
    } catch (MapperParsingException ex) {
      assertThat(ex.getCause(), instanceOf(IllegalArgumentException.class));
      assertThat(
          ExceptionsHelper.detailedMessage(ex)
              .contains(
                  "type=_field_names is not supported on indices created before version 1.3.0"),
          equalTo(true));
    }
  }
  public void testIncludeInObjectNotAllowed() throws Exception {
    String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .endObject()
            .endObject()
            .string();
    DocumentMapper docMapper =
        createIndex("test").mapperService().documentMapperParser().parse(mapping);

    try {
      docMapper.parse(
          "test",
          "type",
          "1",
          XContentFactory.jsonBuilder().startObject().field("_all", "foo").endObject().bytes());
      fail("Expected failure to parse metadata field");
    } catch (MapperParsingException e) {
      assertTrue(
          e.getMessage(),
          e.getMessage()
              .contains("Field [_all] is a metadata field and cannot be added inside a document"));
    }
  }
  public void testAllowNoAdditionalSettings() throws Exception {
    addQueryMapping();
    IndexService indexService = createIndex("test1", Settings.EMPTY);
    MapperService mapperService = indexService.mapperService();

    String percolatorMapper =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject(typeName)
            .startObject("properties")
            .startObject(fieldName)
            .field("type", "percolator")
            .field("index", "no")
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();
    try {
      mapperService.merge(
          typeName,
          new CompressedXContent(percolatorMapper),
          MapperService.MergeReason.MAPPING_UPDATE,
          true);
      fail("MapperParsingException expected");
    } catch (MapperParsingException e) {
      assertThat(
          e.getMessage(),
          equalTo(
              "Mapping definition for ["
                  + fieldName
                  + "] has unsupported parameters:  [index : no]"));
    }
  }
  public void testIgnoreMalformed() throws Exception {
    String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            .startObject("field")
            .field("type", "ip")
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();

    DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));

    assertEquals(mapping, mapper.mappingSource().toString());

    ThrowingRunnable runnable =
        () ->
            mapper.parse(
                "test",
                "type",
                "1",
                XContentFactory.jsonBuilder()
                    .startObject()
                    .field("field", ":1")
                    .endObject()
                    .bytes());
    MapperParsingException e = expectThrows(MapperParsingException.class, runnable);
    assertThat(e.getCause().getMessage(), containsString("':1' is not an IP string literal"));

    mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            .startObject("field")
            .field("type", "ip")
            .field("ignore_malformed", true)
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();

    DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping));

    ParsedDocument doc =
        mapper2.parse(
            "test",
            "type",
            "1",
            XContentFactory.jsonBuilder().startObject().field("field", ":1").endObject().bytes());

    IndexableField[] fields = doc.rootDoc().getFields("field");
    assertEquals(0, fields.length);
  }
 // related to https://github.com/elasticsearch/elasticsearch/issues/5864
 public void testMistypedTypeInRoot() throws IOException {
   String mapping =
       copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mistyped_type_in_root.json");
   try {
     createIndex("test").mapperService().documentMapperParser().parse("test", mapping);
     fail("Expected MapperParsingException");
   } catch (MapperParsingException e) {
     assertThat(
         e.getMessage(), containsString("Root mapping definition has unsupported parameters"));
     assertThat(e.getMessage(), containsString("type=string"));
   }
 }
  @Test
  public void testPointsOnly() throws Exception {
    String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type1")
            .startObject("properties")
            .startObject("location")
            .field("type", "geo_shape")
            .field("tree", randomBoolean() ? "quadtree" : "geohash")
            .field("tree_levels", "6")
            .field("distance_error_pct", "0.01")
            .field("points_only", true)
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();

    assertAcked(prepareCreate("geo_points_only").addMapping("type1", mapping));
    ensureGreen();

    ShapeBuilder shape = RandomShapeGenerator.createShape(random());
    try {
      index(
          "geo_points_only",
          "type1",
          "1",
          jsonBuilder().startObject().field("location", shape).endObject());
    } catch (MapperParsingException e) {
      // RandomShapeGenerator created something other than a POINT type, verify the correct
      // exception is thrown
      assertThat(e.getCause().getMessage(), containsString("is configured for points only"));
      return;
    }

    refresh();
    // test that point was inserted
    SearchResponse response =
        client()
            .prepareSearch()
            .setQuery(geoIntersectionQuery("location", shape))
            .execute()
            .actionGet();

    assertEquals(1, response.getHits().getTotalHits());
  }
  public void testDocValuesNotAllowed() throws IOException {
    String mapping =
        jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("_all")
            .field("doc_values", true)
            .endObject()
            .endObject()
            .endObject()
            .string();
    try {
      createIndex("test").mapperService().documentMapperParser().parse(mapping);
      fail();
    } catch (MapperParsingException e) {
      assertThat(
          e.getDetailedMessage(),
          containsString("[_all] is always tokenized and cannot have doc values"));
    }

    mapping =
        jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("_all")
            .startObject("fielddata")
            .field("format", "doc_values")
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();
    Settings legacySettings =
        Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
    try {
      createIndex("test_old", legacySettings).mapperService().documentMapperParser().parse(mapping);
      fail();
    } catch (MapperParsingException e) {
      assertThat(
          e.getDetailedMessage(),
          containsString("[_all] is always tokenized and cannot have doc values"));
    }
  }
 public void testDynamicDateDetectionIn2xDoesNotSupportEpochs() throws Exception {
   try {
     XContentBuilder mapping =
         jsonBuilder()
             .startObject()
             .startArray("dynamic_date_formats")
             .value("dateOptionalTime")
             .value("epoch_seconds")
             .endArray()
             .endObject();
     createIndex(Version.CURRENT, mapping);
     fail("Expected a MapperParsingException, but did not happen");
   } catch (MapperParsingException e) {
     assertThat(e.getMessage(), containsString("Failed to parse mapping [" + type + "]"));
     assertThat(
         e.getMessage(),
         containsString("Epoch [epoch_seconds] is not supported as dynamic date format"));
   }
 }
  public void testNoDocumentSent() throws Exception {
    IndexService indexService = createIndex("test");
    Settings settings = indexService.getIndexSettings().getSettings();
    DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
    DocumentMapper docMapper =
        doc(
                settings,
                rootObject("person")
                    .add(object("name").add(stringField("first").store(true).index(false))),
                indexService.mapperService())
            .build(indexService.mapperService(), mapperParser);

    BytesReference json = new BytesArray("".getBytes(StandardCharsets.UTF_8));
    try {
      docMapper.parse("test", "person", "1", json).rootDoc();
      fail("this point is never reached");
    } catch (MapperParsingException e) {
      assertThat(e.getMessage(), equalTo("failed to parse, document is empty"));
    }
  }
 public void testHazardousFieldNames() throws Exception {
   IndexService indexService = createIndex("test");
   DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
   String mapping =
       XContentFactory.jsonBuilder()
           .startObject()
           .startObject("type")
           .startObject("properties")
           .startObject("foo.bar")
           .field("type", "string")
           .endObject()
           .endObject()
           .endObject()
           .string();
   try {
     mapperParser.parse(mapping);
     fail("Mapping parse should have failed");
   } catch (MapperParsingException e) {
     assertTrue(e.getMessage(), e.getMessage().contains("cannot contain '.'"));
   }
 }
 public void testAcceptDocValuesFormat() throws IOException {
   String mapping =
       XContentFactory.jsonBuilder()
           .startObject()
           .startObject("type")
           .startObject("properties")
           .startObject("field")
           .field("type", "string")
           .field("doc_values_format", Codec.getDefault().docValuesFormat().getName())
           .endObject()
           .endObject()
           .endObject()
           .endObject()
           .string();
   int i = 0;
   for (Version v : VersionUtils.allVersions()) {
     if (v.onOrAfter(Version.V_2_0_0) == false) {
       // no need to test, we don't support upgrading from these versions
       continue;
     }
     IndexService indexService =
         createIndex(
             "test-" + i++,
             Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, v).build());
     DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
     try {
       parser.parse("type", new CompressedXContent(mapping));
       if (v.onOrAfter(Version.V_2_0_0_beta1)) {
         fail("Elasticsearch 2.0 should not support custom postings formats");
       }
     } catch (MapperParsingException e) {
       if (v.before(Version.V_2_0_0_beta1)) {
         // Elasticsearch 1.x should ignore custom postings formats
         throw e;
       }
       Assert.assertThat(
           e.getMessage(), containsString("unsupported parameters:  [doc_values_format"));
     }
   }
 }
 public void testPercolatorFieldMapperUnMappedField() throws Exception {
   addQueryMapping();
   MapperParsingException exception =
       expectThrows(
           MapperParsingException.class,
           () -> {
             mapperService
                 .documentMapper(typeName)
                 .parse(
                     "test",
                     typeName,
                     "1",
                     XContentFactory.jsonBuilder()
                         .startObject()
                         .field(fieldName, termQuery("unmapped_field", "value"))
                         .endObject()
                         .bytes());
           });
   assertThat(exception.getCause(), instanceOf(QueryShardException.class));
   assertThat(
       exception.getCause().getMessage(),
       equalTo("No field mapping can be found for the field with name [unmapped_field]"));
 }
  // percolator field can be nested under an object field, but only one query can be specified per
  // document
  public void testNestedPercolatorField() throws Exception {
    String typeName = "another_type";
    String percolatorMapper =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject(typeName)
            .startObject("_field_names")
            .field("enabled", false)
            .endObject() // makes testing easier
            .startObject("properties")
            .startObject("object_field")
            .field("type", "object")
            .startObject("properties")
            .startObject("query_field")
            .field("type", "percolator")
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();
    mapperService.merge(
        typeName,
        new CompressedXContent(percolatorMapper),
        MapperService.MergeReason.MAPPING_UPDATE,
        true);

    QueryBuilder queryBuilder = matchQuery("field", "value");
    ParsedDocument doc =
        mapperService
            .documentMapper(typeName)
            .parse(
                "test",
                typeName,
                "1",
                jsonBuilder()
                    .startObject()
                    .startObject("object_field")
                    .field("query_field", queryBuilder)
                    .endObject()
                    .endObject()
                    .bytes());
    assertThat(
        doc.rootDoc().getFields().size(),
        equalTo(8)); // also includes _uid (1), type (2), source (1)
    BytesRef queryBuilderAsBytes =
        doc.rootDoc().getField("object_field.query_field.query_builder_field").binaryValue();
    assertQueryBuilder(queryBuilderAsBytes, queryBuilder);

    doc =
        mapperService
            .documentMapper(typeName)
            .parse(
                "test",
                typeName,
                "1",
                jsonBuilder()
                    .startObject()
                    .startArray("object_field")
                    .startObject()
                    .field("query_field", queryBuilder)
                    .endObject()
                    .endArray()
                    .endObject()
                    .bytes());
    assertThat(
        doc.rootDoc().getFields().size(),
        equalTo(8)); // also includes _uid (1), type (2), source (1)
    queryBuilderAsBytes =
        doc.rootDoc().getField("object_field.query_field.query_builder_field").binaryValue();
    assertQueryBuilder(queryBuilderAsBytes, queryBuilder);

    MapperParsingException e =
        expectThrows(
            MapperParsingException.class,
            () -> {
              mapperService
                  .documentMapper(typeName)
                  .parse(
                      "test",
                      typeName,
                      "1",
                      jsonBuilder()
                          .startObject()
                          .startArray("object_field")
                          .startObject()
                          .field("query_field", queryBuilder)
                          .endObject()
                          .startObject()
                          .field("query_field", queryBuilder)
                          .endObject()
                          .endArray()
                          .endObject()
                          .bytes());
            });
    assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
    assertThat(
        e.getCause().getMessage(), equalTo("a document can only contain one percolator query"));
  }
  @Test
  public void testIgnoreMalformedOption() throws Exception {
    String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            .startObject("field1")
            .field("type", "integer")
            .field("ignore_malformed", true)
            .endObject()
            .startObject("field2")
            .field("type", "integer")
            .field("ignore_malformed", false)
            .endObject()
            .startObject("field3")
            .field("type", "integer")
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();

    DocumentMapper defaultMapper = MapperTestUtils.newParser().parse(mapping);

    ParsedDocument doc =
        defaultMapper.parse(
            "type",
            "1",
            XContentFactory.jsonBuilder()
                .startObject()
                .field("field1", "a")
                .field("field2", "1")
                .endObject()
                .bytes());
    assertThat(doc.rootDoc().getField("field1"), nullValue());
    assertThat(doc.rootDoc().getField("field2"), notNullValue());

    try {
      defaultMapper.parse(
          "type",
          "1",
          XContentFactory.jsonBuilder().startObject().field("field2", "a").endObject().bytes());
    } catch (MapperParsingException e) {
      assertThat(e.getCause(), instanceOf(NumberFormatException.class));
    }

    // Verify that the default is false
    try {
      defaultMapper.parse(
          "type",
          "1",
          XContentFactory.jsonBuilder().startObject().field("field3", "a").endObject().bytes());
    } catch (MapperParsingException e) {
      assertThat(e.getCause(), instanceOf(NumberFormatException.class));
    }

    // Unless the global ignore_malformed option is set to true
    Settings indexSettings = settingsBuilder().put("index.mapping.ignore_malformed", true).build();
    defaultMapper = MapperTestUtils.newParser(indexSettings).parse(mapping);
    doc =
        defaultMapper.parse(
            "type",
            "1",
            XContentFactory.jsonBuilder().startObject().field("field3", "a").endObject().bytes());
    assertThat(doc.rootDoc().getField("field3"), nullValue());

    // This should still throw an exception, since field2 is specifically set to
    // ignore_malformed=false
    try {
      defaultMapper.parse(
          "type",
          "1",
          XContentFactory.jsonBuilder().startObject().field("field2", "a").endObject().bytes());
    } catch (MapperParsingException e) {
      assertThat(e.getCause(), instanceOf(NumberFormatException.class));
    }
  }
  @Override
  public Mapper parse(ParseContext context) throws IOException {
    byte[] content = null;
    String contentType = null;
    int indexedChars = defaultIndexedChars;
    boolean langDetect = defaultLangDetect;
    String name = null;
    String language = null;

    XContentParser parser = context.parser();
    XContentParser.Token token = parser.currentToken();
    if (token == XContentParser.Token.VALUE_STRING) {
      content = parser.binaryValue();
    } else {
      String currentFieldName = null;
      while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
        if (token == XContentParser.Token.FIELD_NAME) {
          currentFieldName = parser.currentName();
        } else if (token == XContentParser.Token.VALUE_STRING) {
          if ("_content".equals(currentFieldName)) {
            content = parser.binaryValue();
          } else if ("_content_type".equals(currentFieldName)) {
            contentType = parser.text();
          } else if ("_name".equals(currentFieldName)) {
            name = parser.text();
          } else if ("_language".equals(currentFieldName)) {
            language = parser.text();
          }
        } else if (token == XContentParser.Token.VALUE_NUMBER) {
          if ("_indexed_chars".equals(currentFieldName)
              || "_indexedChars".equals(currentFieldName)) {
            indexedChars = parser.intValue();
          }
        } else if (token == XContentParser.Token.VALUE_BOOLEAN) {
          if ("_detect_language".equals(currentFieldName)
              || "_detectLanguage".equals(currentFieldName)) {
            langDetect = parser.booleanValue();
          }
        }
      }
    }

    // Throw clean exception when no content is provided Fix #23
    if (content == null) {
      throw new MapperParsingException("No content is provided.");
    }

    Metadata metadata = new Metadata();
    if (contentType != null) {
      metadata.add(Metadata.CONTENT_TYPE, contentType);
    }
    if (name != null) {
      metadata.add(Metadata.RESOURCE_NAME_KEY, name);
    }

    String parsedContent;
    try {
      parsedContent = TikaImpl.parse(content, metadata, indexedChars);
    } catch (Throwable e) {
      // #18: we could ignore errors when Tika does not parse data
      if (!ignoreErrors) {
        logger.trace("exception caught", e);
        throw new MapperParsingException(
            "Failed to extract ["
                + indexedChars
                + "] characters of text for ["
                + name
                + "] : "
                + e.getMessage(),
            e);
      } else {
        logger.debug(
            "Failed to extract [{}] characters of text for [{}]: [{}]",
            indexedChars,
            name,
            e.getMessage());
        logger.trace("exception caught", e);
      }
      return null;
    }

    context = context.createExternalValueContext(parsedContent);
    contentMapper.parse(context);

    if (langDetect) {
      try {
        if (language != null) {
          metadata.add(Metadata.CONTENT_LANGUAGE, language);
        } else {
          LanguageIdentifier identifier = new LanguageIdentifier(parsedContent);
          language = identifier.getLanguage();
        }
        context = context.createExternalValueContext(language);
        languageMapper.parse(context);
      } catch (Throwable t) {
        logger.debug("Cannot detect language: [{}]", t.getMessage());
      }
    }

    if (name != null) {
      try {
        context = context.createExternalValueContext(name);
        nameMapper.parse(context);
      } catch (MapperParsingException e) {
        if (!ignoreErrors) throw e;
        if (logger.isDebugEnabled())
          logger.debug(
              "Ignoring MapperParsingException catch while parsing name: [{}]", e.getMessage());
      }
    }

    if (metadata.get(Metadata.DATE) != null) {
      try {
        context = context.createExternalValueContext(metadata.get(Metadata.DATE));
        dateMapper.parse(context);
      } catch (MapperParsingException e) {
        if (!ignoreErrors) throw e;
        if (logger.isDebugEnabled())
          logger.debug(
              "Ignoring MapperParsingException catch while parsing date: [{}]: [{}]",
              e.getMessage(),
              context.externalValue());
      }
    }

    if (metadata.get(Metadata.TITLE) != null) {
      try {
        context = context.createExternalValueContext(metadata.get(Metadata.TITLE));
        titleMapper.parse(context);
      } catch (MapperParsingException e) {
        if (!ignoreErrors) throw e;
        if (logger.isDebugEnabled())
          logger.debug(
              "Ignoring MapperParsingException catch while parsing title: [{}]: [{}]",
              e.getMessage(),
              context.externalValue());
      }
    }

    if (metadata.get(Metadata.AUTHOR) != null) {
      try {
        context = context.createExternalValueContext(metadata.get(Metadata.AUTHOR));
        authorMapper.parse(context);
      } catch (MapperParsingException e) {
        if (!ignoreErrors) throw e;
        if (logger.isDebugEnabled())
          logger.debug(
              "Ignoring MapperParsingException catch while parsing author: [{}]: [{}]",
              e.getMessage(),
              context.externalValue());
      }
    }

    if (metadata.get(Metadata.KEYWORDS) != null) {
      try {
        context = context.createExternalValueContext(metadata.get(Metadata.KEYWORDS));
        keywordsMapper.parse(context);
      } catch (MapperParsingException e) {
        if (!ignoreErrors) throw e;
        if (logger.isDebugEnabled())
          logger.debug(
              "Ignoring MapperParsingException catch while parsing keywords: [{}]: [{}]",
              e.getMessage(),
              context.externalValue());
      }
    }

    if (contentType == null) {
      contentType = metadata.get(Metadata.CONTENT_TYPE);
    }
    if (contentType != null) {
      try {
        context = context.createExternalValueContext(contentType);
        contentTypeMapper.parse(context);
      } catch (MapperParsingException e) {
        if (!ignoreErrors) throw e;
        if (logger.isDebugEnabled())
          logger.debug(
              "Ignoring MapperParsingException catch while parsing content_type: [{}]: [{}]",
              e.getMessage(),
              context.externalValue());
      }
    }

    int length = content.length;
    // If we have CONTENT_LENGTH from Tika we use it
    if (metadata.get(Metadata.CONTENT_LENGTH) != null) {
      length = Integer.parseInt(metadata.get(Metadata.CONTENT_LENGTH));
    }

    try {
      context = context.createExternalValueContext(length);
      contentLengthMapper.parse(context);
    } catch (MapperParsingException e) {
      if (!ignoreErrors) throw e;
      if (logger.isDebugEnabled())
        logger.debug(
            "Ignoring MapperParsingException catch while parsing content_length: [{}]: [{}]",
            e.getMessage(),
            context.externalValue());
    }

    //        multiFields.parse(this, context);

    return null;
  }