// related to https://github.com/elasticsearch/elasticsearch/issues/5864
 public void testMistypedTypeInRoot() throws IOException {
   String mapping =
       copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mistyped_type_in_root.json");
   try {
     createIndex("test").mapperService().documentMapperParser().parse("test", mapping);
     fail("Expected MapperParsingException");
   } catch (MapperParsingException e) {
     assertThat(
         e.getMessage(), containsString("Root mapping definition has unsupported parameters"));
     assertThat(e.getMessage(), containsString("type=string"));
   }
 }
  public void testIncludeInObjectNotAllowed() throws Exception {
    String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .endObject()
            .endObject()
            .string();
    DocumentMapper docMapper =
        createIndex("test").mapperService().documentMapperParser().parse(mapping);

    try {
      docMapper.parse(
          "test",
          "type",
          "1",
          XContentFactory.jsonBuilder().startObject().field("_all", "foo").endObject().bytes());
      fail("Expected failure to parse metadata field");
    } catch (MapperParsingException e) {
      assertTrue(
          e.getMessage(),
          e.getMessage()
              .contains("Field [_all] is a metadata field and cannot be added inside a document"));
    }
  }
  public void testAllowNoAdditionalSettings() throws Exception {
    addQueryMapping();
    IndexService indexService = createIndex("test1", Settings.EMPTY);
    MapperService mapperService = indexService.mapperService();

    String percolatorMapper =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject(typeName)
            .startObject("properties")
            .startObject(fieldName)
            .field("type", "percolator")
            .field("index", "no")
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();
    try {
      mapperService.merge(
          typeName,
          new CompressedXContent(percolatorMapper),
          MapperService.MergeReason.MAPPING_UPDATE,
          true);
      fail("MapperParsingException expected");
    } catch (MapperParsingException e) {
      assertThat(
          e.getMessage(),
          equalTo(
              "Mapping definition for ["
                  + fieldName
                  + "] has unsupported parameters:  [index : no]"));
    }
  }
  @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/8802")
  public void testBrokenMapping() throws Exception {
    // clean all templates setup by the framework.
    client().admin().indices().prepareDeleteTemplate("*").get();

    // check get all templates on an empty index.
    GetIndexTemplatesResponse response = client().admin().indices().prepareGetTemplates().get();
    assertThat(response.getIndexTemplates(), empty());

    MapperParsingException e =
        expectThrows(
            MapperParsingException.class,
            () ->
                client()
                    .admin()
                    .indices()
                    .preparePutTemplate("template_1")
                    .setTemplate("te*")
                    .addMapping("type1", "abcde")
                    .get());
    assertThat(e.getMessage(), containsString("Failed to parse mapping "));

    response = client().admin().indices().prepareGetTemplates().get();
    assertThat(response.getIndexTemplates(), hasSize(0));
  }
  public void testCombineTemplates() throws Exception {
    // clean all templates setup by the framework.
    client().admin().indices().prepareDeleteTemplate("*").get();

    // check get all templates on an empty index.
    GetIndexTemplatesResponse response = client().admin().indices().prepareGetTemplates().get();
    assertThat(response.getIndexTemplates(), empty());

    // Now, a complete mapping with two separated templates is error
    // base template
    client()
        .admin()
        .indices()
        .preparePutTemplate("template_1")
        .setTemplate("*")
        .setSettings(
            "    {\n"
                + "        \"index\" : {\n"
                + "            \"analysis\" : {\n"
                + "                \"analyzer\" : {\n"
                + "                    \"custom_1\" : {\n"
                + "                        \"tokenizer\" : \"whitespace\"\n"
                + "                    }\n"
                + "                }\n"
                + "            }\n"
                + "         }\n"
                + "    }\n")
        .get();

    // put template using custom_1 analyzer
    MapperParsingException e =
        expectThrows(
            MapperParsingException.class,
            () ->
                client()
                    .admin()
                    .indices()
                    .preparePutTemplate("template_2")
                    .setTemplate("test*")
                    .setCreate(true)
                    .setOrder(1)
                    .addMapping(
                        "type1",
                        XContentFactory.jsonBuilder()
                            .startObject()
                            .startObject("type1")
                            .startObject("properties")
                            .startObject("field2")
                            .field("type", "string")
                            .field("analyzer", "custom_1")
                            .endObject()
                            .endObject()
                            .endObject()
                            .endObject())
                    .get());
    assertThat(e.getMessage(), containsString("analyzer [custom_1] not found for field [field2]"));

    response = client().admin().indices().prepareGetTemplates().get();
    assertThat(response.getIndexTemplates(), hasSize(1));
  }
 public void testDynamicDateDetectionIn2xDoesNotSupportEpochs() throws Exception {
   try {
     XContentBuilder mapping =
         jsonBuilder()
             .startObject()
             .startArray("dynamic_date_formats")
             .value("dateOptionalTime")
             .value("epoch_seconds")
             .endArray()
             .endObject();
     createIndex(Version.CURRENT, mapping);
     fail("Expected a MapperParsingException, but did not happen");
   } catch (MapperParsingException e) {
     assertThat(e.getMessage(), containsString("Failed to parse mapping [" + type + "]"));
     assertThat(
         e.getMessage(),
         containsString("Epoch [epoch_seconds] is not supported as dynamic date format"));
   }
 }
 public void testHazardousFieldNames() throws Exception {
   IndexService indexService = createIndex("test");
   DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
   String mapping =
       XContentFactory.jsonBuilder()
           .startObject()
           .startObject("type")
           .startObject("properties")
           .startObject("foo.bar")
           .field("type", "string")
           .endObject()
           .endObject()
           .endObject()
           .string();
   try {
     mapperParser.parse(mapping);
     fail("Mapping parse should have failed");
   } catch (MapperParsingException e) {
     assertTrue(e.getMessage(), e.getMessage().contains("cannot contain '.'"));
   }
 }
  public void testNoDocumentSent() throws Exception {
    IndexService indexService = createIndex("test");
    Settings settings = indexService.getIndexSettings().getSettings();
    DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
    DocumentMapper docMapper =
        doc(
                settings,
                rootObject("person")
                    .add(object("name").add(stringField("first").store(true).index(false))),
                indexService.mapperService())
            .build(indexService.mapperService(), mapperParser);

    BytesReference json = new BytesArray("".getBytes(StandardCharsets.UTF_8));
    try {
      docMapper.parse("test", "person", "1", json).rootDoc();
      fail("this point is never reached");
    } catch (MapperParsingException e) {
      assertThat(e.getMessage(), equalTo("failed to parse, document is empty"));
    }
  }
示例#9
0
 public void testAcceptDocValuesFormat() throws IOException {
   String mapping =
       XContentFactory.jsonBuilder()
           .startObject()
           .startObject("type")
           .startObject("properties")
           .startObject("field")
           .field("type", "string")
           .field("doc_values_format", Codec.getDefault().docValuesFormat().getName())
           .endObject()
           .endObject()
           .endObject()
           .endObject()
           .string();
   int i = 0;
   for (Version v : VersionUtils.allVersions()) {
     if (v.onOrAfter(Version.V_2_0_0) == false) {
       // no need to test, we don't support upgrading from these versions
       continue;
     }
     IndexService indexService =
         createIndex(
             "test-" + i++,
             Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, v).build());
     DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
     try {
       parser.parse("type", new CompressedXContent(mapping));
       if (v.onOrAfter(Version.V_2_0_0_beta1)) {
         fail("Elasticsearch 2.0 should not support custom postings formats");
       }
     } catch (MapperParsingException e) {
       if (v.before(Version.V_2_0_0_beta1)) {
         // Elasticsearch 1.x should ignore custom postings formats
         throw e;
       }
       Assert.assertThat(
           e.getMessage(), containsString("unsupported parameters:  [doc_values_format"));
     }
   }
 }
示例#10
0
  @Override
  public Mapper parse(ParseContext context) throws IOException {
    byte[] content = null;
    String contentType = null;
    int indexedChars = defaultIndexedChars;
    boolean langDetect = defaultLangDetect;
    String name = null;
    String language = null;

    XContentParser parser = context.parser();
    XContentParser.Token token = parser.currentToken();
    if (token == XContentParser.Token.VALUE_STRING) {
      content = parser.binaryValue();
    } else {
      String currentFieldName = null;
      while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
        if (token == XContentParser.Token.FIELD_NAME) {
          currentFieldName = parser.currentName();
        } else if (token == XContentParser.Token.VALUE_STRING) {
          if ("_content".equals(currentFieldName)) {
            content = parser.binaryValue();
          } else if ("_content_type".equals(currentFieldName)) {
            contentType = parser.text();
          } else if ("_name".equals(currentFieldName)) {
            name = parser.text();
          } else if ("_language".equals(currentFieldName)) {
            language = parser.text();
          }
        } else if (token == XContentParser.Token.VALUE_NUMBER) {
          if ("_indexed_chars".equals(currentFieldName)
              || "_indexedChars".equals(currentFieldName)) {
            indexedChars = parser.intValue();
          }
        } else if (token == XContentParser.Token.VALUE_BOOLEAN) {
          if ("_detect_language".equals(currentFieldName)
              || "_detectLanguage".equals(currentFieldName)) {
            langDetect = parser.booleanValue();
          }
        }
      }
    }

    // Throw clean exception when no content is provided Fix #23
    if (content == null) {
      throw new MapperParsingException("No content is provided.");
    }

    Metadata metadata = new Metadata();
    if (contentType != null) {
      metadata.add(Metadata.CONTENT_TYPE, contentType);
    }
    if (name != null) {
      metadata.add(Metadata.RESOURCE_NAME_KEY, name);
    }

    String parsedContent;
    try {
      parsedContent = TikaImpl.parse(content, metadata, indexedChars);
    } catch (Throwable e) {
      // #18: we could ignore errors when Tika does not parse data
      if (!ignoreErrors) {
        logger.trace("exception caught", e);
        throw new MapperParsingException(
            "Failed to extract ["
                + indexedChars
                + "] characters of text for ["
                + name
                + "] : "
                + e.getMessage(),
            e);
      } else {
        logger.debug(
            "Failed to extract [{}] characters of text for [{}]: [{}]",
            indexedChars,
            name,
            e.getMessage());
        logger.trace("exception caught", e);
      }
      return null;
    }

    context = context.createExternalValueContext(parsedContent);
    contentMapper.parse(context);

    if (langDetect) {
      try {
        if (language != null) {
          metadata.add(Metadata.CONTENT_LANGUAGE, language);
        } else {
          LanguageIdentifier identifier = new LanguageIdentifier(parsedContent);
          language = identifier.getLanguage();
        }
        context = context.createExternalValueContext(language);
        languageMapper.parse(context);
      } catch (Throwable t) {
        logger.debug("Cannot detect language: [{}]", t.getMessage());
      }
    }

    if (name != null) {
      try {
        context = context.createExternalValueContext(name);
        nameMapper.parse(context);
      } catch (MapperParsingException e) {
        if (!ignoreErrors) throw e;
        if (logger.isDebugEnabled())
          logger.debug(
              "Ignoring MapperParsingException catch while parsing name: [{}]", e.getMessage());
      }
    }

    if (metadata.get(Metadata.DATE) != null) {
      try {
        context = context.createExternalValueContext(metadata.get(Metadata.DATE));
        dateMapper.parse(context);
      } catch (MapperParsingException e) {
        if (!ignoreErrors) throw e;
        if (logger.isDebugEnabled())
          logger.debug(
              "Ignoring MapperParsingException catch while parsing date: [{}]: [{}]",
              e.getMessage(),
              context.externalValue());
      }
    }

    if (metadata.get(Metadata.TITLE) != null) {
      try {
        context = context.createExternalValueContext(metadata.get(Metadata.TITLE));
        titleMapper.parse(context);
      } catch (MapperParsingException e) {
        if (!ignoreErrors) throw e;
        if (logger.isDebugEnabled())
          logger.debug(
              "Ignoring MapperParsingException catch while parsing title: [{}]: [{}]",
              e.getMessage(),
              context.externalValue());
      }
    }

    if (metadata.get(Metadata.AUTHOR) != null) {
      try {
        context = context.createExternalValueContext(metadata.get(Metadata.AUTHOR));
        authorMapper.parse(context);
      } catch (MapperParsingException e) {
        if (!ignoreErrors) throw e;
        if (logger.isDebugEnabled())
          logger.debug(
              "Ignoring MapperParsingException catch while parsing author: [{}]: [{}]",
              e.getMessage(),
              context.externalValue());
      }
    }

    if (metadata.get(Metadata.KEYWORDS) != null) {
      try {
        context = context.createExternalValueContext(metadata.get(Metadata.KEYWORDS));
        keywordsMapper.parse(context);
      } catch (MapperParsingException e) {
        if (!ignoreErrors) throw e;
        if (logger.isDebugEnabled())
          logger.debug(
              "Ignoring MapperParsingException catch while parsing keywords: [{}]: [{}]",
              e.getMessage(),
              context.externalValue());
      }
    }

    if (contentType == null) {
      contentType = metadata.get(Metadata.CONTENT_TYPE);
    }
    if (contentType != null) {
      try {
        context = context.createExternalValueContext(contentType);
        contentTypeMapper.parse(context);
      } catch (MapperParsingException e) {
        if (!ignoreErrors) throw e;
        if (logger.isDebugEnabled())
          logger.debug(
              "Ignoring MapperParsingException catch while parsing content_type: [{}]: [{}]",
              e.getMessage(),
              context.externalValue());
      }
    }

    int length = content.length;
    // If we have CONTENT_LENGTH from Tika we use it
    if (metadata.get(Metadata.CONTENT_LENGTH) != null) {
      length = Integer.parseInt(metadata.get(Metadata.CONTENT_LENGTH));
    }

    try {
      context = context.createExternalValueContext(length);
      contentLengthMapper.parse(context);
    } catch (MapperParsingException e) {
      if (!ignoreErrors) throw e;
      if (logger.isDebugEnabled())
        logger.debug(
            "Ignoring MapperParsingException catch while parsing content_length: [{}]: [{}]",
            e.getMessage(),
            context.externalValue());
    }

    //        multiFields.parse(this, context);

    return null;
  }