public void testMergingMappings() throws Exception {
    String enabledMapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("_field_names")
            .field("enabled", true)
            .endObject()
            .endObject()
            .endObject()
            .string();
    String disabledMapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("_field_names")
            .field("enabled", false)
            .endObject()
            .endObject()
            .endObject()
            .string();
    DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();

    DocumentMapper mapperEnabled = parser.parse(enabledMapping);
    DocumentMapper mapperDisabled = parser.parse(disabledMapping);
    mapperEnabled.merge(mapperDisabled, DocumentMapper.MergeFlags.mergeFlags().simulate(false));
    assertFalse(mapperEnabled.rootMapper(FieldNamesFieldMapper.class).enabled());

    mapperEnabled = parser.parse(enabledMapping);
    mapperDisabled.merge(mapperEnabled, DocumentMapper.MergeFlags.mergeFlags().simulate(false));
    assertTrue(mapperEnabled.rootMapper(FieldNamesFieldMapper.class).enabled());
  }
Exemplo n.º 2
0
  public void testEmptyName() throws IOException {
    String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            .startObject("")
            .field("type", "ip")
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();

    IllegalArgumentException e =
        expectThrows(
            IllegalArgumentException.class,
            () -> parser.parse("type", new CompressedXContent(mapping)));
    assertThat(e.getMessage(), containsString("name cannot be empty string"));

    // before 5.x
    Version oldVersion =
        VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5);
    Settings oldIndexSettings =
        Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build();
    indexService = createIndex("test_old", oldIndexSettings);
    parser = indexService.mapperService().documentMapperParser();

    DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
    assertEquals(mapping, defaultMapper.mappingSource().string());
  }
  public void testThatDisablingWorksWhenMerging() throws Exception {
    String enabledMapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("_index")
            .field("enabled", true)
            .endObject()
            .endObject()
            .endObject()
            .string();
    DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
    DocumentMapper enabledMapper = parser.parse(enabledMapping);

    String disabledMapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("_index")
            .field("enabled", false)
            .endObject()
            .endObject()
            .endObject()
            .string();
    DocumentMapper disabledMapper = parser.parse(disabledMapping);

    enabledMapper.merge(disabledMapper.mapping(), false);
    assertThat(enabledMapper.indexMapper().enabled(), is(false));
  }
  public void testThatMergingFieldMappingAllowsDisabling() throws Exception {
    String mappingWithIndexEnabled =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("_index")
            .field("enabled", true)
            .endObject()
            .endObject()
            .endObject()
            .string();
    DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
    DocumentMapper mapperEnabled = parser.parse(mappingWithIndexEnabled);

    String mappingWithIndexDisabled =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("_index")
            .field("enabled", false)
            .endObject()
            .endObject()
            .endObject()
            .string();
    DocumentMapper mapperDisabled = parser.parse(mappingWithIndexDisabled);

    mapperEnabled.merge(mapperDisabled.mapping(), false);
    assertThat(mapperEnabled.IndexFieldMapper().enabled(), is(false));
  }
Exemplo n.º 5
0
  public void testIgnoreMalformed() throws Exception {
    String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            .startObject("field")
            .field("type", "ip")
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();

    DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));

    assertEquals(mapping, mapper.mappingSource().toString());

    ThrowingRunnable runnable =
        () ->
            mapper.parse(
                "test",
                "type",
                "1",
                XContentFactory.jsonBuilder()
                    .startObject()
                    .field("field", ":1")
                    .endObject()
                    .bytes());
    MapperParsingException e = expectThrows(MapperParsingException.class, runnable);
    assertThat(e.getCause().getMessage(), containsString("':1' is not an IP string literal"));

    mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            .startObject("field")
            .field("type", "ip")
            .field("ignore_malformed", true)
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();

    DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping));

    ParsedDocument doc =
        mapper2.parse(
            "test",
            "type",
            "1",
            XContentFactory.jsonBuilder().startObject().field("field", ":1").endObject().bytes());

    IndexableField[] fields = doc.rootDoc().getFields("field");
    assertEquals(0, fields.length);
  }
Exemplo n.º 6
0
  public void testIncludeInAll() throws Exception {
    String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            .startObject("field")
            .field("type", "ip")
            .field("include_in_all", true)
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();

    DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));

    assertEquals(mapping, mapper.mappingSource().toString());

    ParsedDocument doc =
        mapper.parse(
            "test",
            "type",
            "1",
            XContentFactory.jsonBuilder().startObject().field("field", "::1").endObject().bytes());

    IndexableField[] fields = doc.rootDoc().getFields("_all");
    assertEquals(1, fields.length);
    assertEquals("::1", fields[0].stringValue());

    mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            .startObject("field")
            .field("type", "ip")
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();

    mapper = parser.parse("type", new CompressedXContent(mapping));

    assertEquals(mapping, mapper.mappingSource().toString());

    doc =
        mapper.parse(
            "test",
            "type",
            "1",
            XContentFactory.jsonBuilder().startObject().field("field", "::1").endObject().bytes());

    fields = doc.rootDoc().getFields("_all");
    assertEquals(0, fields.length);
  }
Exemplo n.º 7
0
  public void testAttributes() throws Exception {
    String mapping =
        copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json");
    DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
    DocumentMapper docMapper = parser.parse(mapping);

    assertThat((String) docMapper.meta().get("param1"), equalTo("value1"));

    String builtMapping = docMapper.mappingSource().string();
    DocumentMapper builtDocMapper = parser.parse(builtMapping);
    assertThat((String) builtDocMapper.meta().get("param1"), equalTo("value1"));
  }
  @Before
  public void createMapper() throws IOException {
    DocumentMapperParser mapperParser =
        MapperTestUtils.newMapperService(
                createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper())
            .documentMapperParser();

    String mapping =
        copyToStringFromClasspath(
            "/org/elasticsearch/index/mapper/attachment/test/unit/various-doc/test-mapping.json");
    docMapper = mapperParser.parse("person", new CompressedXContent(mapping));
  }
Exemplo n.º 9
0
  public void testNoDocValues() throws Exception {
    String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            .startObject("field")
            .field("type", "ip")
            .field("doc_values", false)
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();

    DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));

    assertEquals(mapping, mapper.mappingSource().toString());

    ParsedDocument doc =
        mapper.parse(
            "test",
            "type",
            "1",
            XContentFactory.jsonBuilder().startObject().field("field", "::1").endObject().bytes());

    IndexableField[] fields = doc.rootDoc().getFields("field");
    assertEquals(1, fields.length);
    IndexableField pointField = fields[0];
    assertEquals(1, pointField.fieldType().pointDimensionCount());
    assertEquals(
        new BytesRef(InetAddressPoint.encode(InetAddresses.forString("::1"))),
        pointField.binaryValue());
  }
Exemplo n.º 10
0
  public void testNotIndexed() throws Exception {
    String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            .startObject("field")
            .field("type", "ip")
            .field("index", false)
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();

    DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));

    assertEquals(mapping, mapper.mappingSource().toString());

    ParsedDocument doc =
        mapper.parse(
            "test",
            "type",
            "1",
            XContentFactory.jsonBuilder().startObject().field("field", "::1").endObject().bytes());

    IndexableField[] fields = doc.rootDoc().getFields("field");
    assertEquals(1, fields.length);
    IndexableField dvField = fields[0];
    assertEquals(DocValuesType.SORTED_SET, dvField.fieldType().docValuesType());
  }
Exemplo n.º 11
0
  public void testSerializeDefaults() throws Exception {
    String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            .startObject("field")
            .field("type", "ip")
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();

    DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping));
    IpFieldMapper mapper = (IpFieldMapper) docMapper.root().getMapper("field");
    XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
    mapper.doXContentBody(builder, true, ToXContent.EMPTY_PARAMS);
    String got = builder.endObject().string();

    // it would be nice to check the entire serialized default mapper, but there are
    // a whole lot of bogus settings right now it picks up from calling super.doXContentBody...
    assertTrue(got, got.contains("\"null_value\":null"));
    assertTrue(got, got.contains("\"ignore_malformed\":false"));
    assertTrue(got, got.contains("\"include_in_all\":false"));
  }
  @Test
  public void testMerge() throws IOException {
    String stage1Mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("person")
            .startObject("properties")
            .startObject("tc")
            .field("type", "token_count")
            .field("analyzer", "keyword")
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();
    DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
    DocumentMapper stage1 = parser.parse(stage1Mapping);

    String stage2Mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("person")
            .startObject("properties")
            .startObject("tc")
            .field("type", "token_count")
            .field("analyzer", "standard")
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();
    DocumentMapper stage2 = parser.parse(stage2Mapping);

    MergeResult mergeResult = stage1.merge(stage2.mapping(), true, false);
    assertThat(mergeResult.hasConflicts(), equalTo(false));
    // Just simulated so merge hasn't happened yet
    assertThat(
        ((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(),
        equalTo("keyword"));

    mergeResult = stage1.merge(stage2.mapping(), false, false);
    assertThat(mergeResult.hasConflicts(), equalTo(false));
    // Just simulated so merge hasn't happened yet
    assertThat(
        ((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(),
        equalTo("standard"));
  }
  public void testTypeLevel() throws Exception {
    String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("_all")
            .field("enabled", false)
            .endObject()
            .endObject()
            .endObject()
            .string();

    DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
    DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
    assertThat(mapper.type(), equalTo("type"));
    assertThat(mapper.allFieldMapper().enabled(), equalTo(false));
  }
Exemplo n.º 14
0
  public void testSimpleAllMappersWithReparse() throws Exception {
    DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
    String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping.json");
    DocumentMapper docMapper = parser.parse(mapping);
    String builtMapping = docMapper.mappingSource().string();
    // reparse it
    DocumentMapper builtDocMapper = parser.parse(builtMapping);
    byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
    Document doc = builtDocMapper.parse("test", "person", "1", new BytesArray(json)).rootDoc();

    AllField field = (AllField) doc.getField("_all");
    AllEntries allEntries = field.getAllEntries();
    assertThat(allEntries.fields().toString(), allEntries.fields().size(), equalTo(3));
    assertThat(allEntries.fields().contains("address.last.location"), equalTo(true));
    assertThat(allEntries.fields().contains("name.last"), equalTo(true));
    assertThat(allEntries.fields().contains("simple1"), equalTo(true));
    assertThat(field.fieldType().omitNorms(), equalTo(true));
  }
Exemplo n.º 15
0
 public void testParseToJsonAndParse() throws Exception {
   String mapping =
       copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json");
   DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
   DocumentMapper docMapper = parser.parse(mapping);
   String builtMapping = docMapper.mappingSource().string();
   // reparse it
   DocumentMapper builtDocMapper = parser.parse(builtMapping);
   BytesReference json =
       new BytesArray(
           copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json"));
   Document doc = builtDocMapper.parse("test", "person", "1", json).rootDoc();
   assertThat(
       doc.get(docMapper.uidMapper().fieldType().names().indexName()),
       equalTo(Uid.createUid("person", "1")));
   assertThat(
       doc.get(docMapper.mappers().getMapper("name.first").fieldType().names().indexName()),
       equalTo("shay"));
 }
Exemplo n.º 16
0
 // issue https://github.com/elasticsearch/elasticsearch/issues/5864
 // test that RootObjectMapping still works
 public void testRootObjectMapperPropertiesDoNotCauseException() throws IOException {
   DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
   String mapping =
       copyToStringFromClasspath(
           "/org/elasticsearch/index/mapper/all/type_dynamic_template_mapping.json");
   parser.parse("test", mapping);
   mapping =
       copyToStringFromClasspath(
           "/org/elasticsearch/index/mapper/all/type_dynamic_date_formats_mapping.json");
   parser.parse("test", mapping);
   mapping =
       copyToStringFromClasspath(
           "/org/elasticsearch/index/mapper/all/type_date_detection_mapping.json");
   parser.parse("test", mapping);
   mapping =
       copyToStringFromClasspath(
           "/org/elasticsearch/index/mapper/all/type_numeric_detection_mapping.json");
   parser.parse("test", mapping);
 }
Exemplo n.º 17
0
  @Test
  public void testSimpleMappings() throws Exception {
    String mapping =
        copyToStringFromClasspath(
            "/org/elasticsearch/index/mapper/attachment/test/unit/date/date-mapping.json");
    DocumentMapper docMapper = mapperParser.parse(mapping);

    // Our mapping should be kept as a String
    assertThat(
        docMapper.mappers().fullName("file.date").mapper(), instanceOf(StringFieldMapper.class));
  }
Exemplo n.º 18
0
 public void testHazardousFieldNames() throws Exception {
   IndexService indexService = createIndex("test");
   DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
   String mapping =
       XContentFactory.jsonBuilder()
           .startObject()
           .startObject("type")
           .startObject("properties")
           .startObject("foo.bar")
           .field("type", "string")
           .endObject()
           .endObject()
           .endObject()
           .string();
   try {
     mapperParser.parse(mapping);
     fail("Mapping parse should have failed");
   } catch (MapperParsingException e) {
     assertTrue(e.getMessage(), e.getMessage().contains("cannot contain '.'"));
   }
 }
Exemplo n.º 19
0
 public void testAcceptDocValuesFormat() throws IOException {
   String mapping =
       XContentFactory.jsonBuilder()
           .startObject()
           .startObject("type")
           .startObject("properties")
           .startObject("field")
           .field("type", "string")
           .field("doc_values_format", Codec.getDefault().docValuesFormat().getName())
           .endObject()
           .endObject()
           .endObject()
           .endObject()
           .string();
   int i = 0;
   for (Version v : VersionUtils.allVersions()) {
     if (v.onOrAfter(Version.V_2_0_0) == false) {
       // no need to test, we don't support upgrading from these versions
       continue;
     }
     IndexService indexService =
         createIndex(
             "test-" + i++,
             Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, v).build());
     DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
     try {
       parser.parse("type", new CompressedXContent(mapping));
       if (v.onOrAfter(Version.V_2_0_0_beta1)) {
         fail("Elasticsearch 2.0 should not support custom postings formats");
       }
     } catch (MapperParsingException e) {
       if (v.before(Version.V_2_0_0_beta1)) {
         // Elasticsearch 1.x should ignore custom postings formats
         throw e;
       }
       Assert.assertThat(
           e.getMessage(), containsString("unsupported parameters:  [doc_values_format"));
     }
   }
 }
Exemplo n.º 20
0
 @Override
 public Mapper.Builder parse(
     String fieldName, Map<String, Object> node, ParserContext parserContext)
     throws MapperParsingException {
   TextFieldMapper.Builder builder = new TextFieldMapper.Builder(fieldName);
   builder.fieldType().setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer());
   builder
       .fieldType()
       .setSearchAnalyzer(parserContext.analysisService().defaultSearchAnalyzer());
   builder
       .fieldType()
       .setSearchQuoteAnalyzer(parserContext.analysisService().defaultSearchQuoteAnalyzer());
   parseTextField(builder, fieldName, node, parserContext);
   for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator();
       iterator.hasNext(); ) {
     Map.Entry<String, Object> entry = iterator.next();
     String propName = entry.getKey();
     Object propNode = entry.getValue();
     if (propName.equals("position_increment_gap")) {
       int newPositionIncrementGap = XContentMapValues.nodeIntegerValue(propNode, -1);
       builder.positionIncrementGap(newPositionIncrementGap);
       iterator.remove();
     } else if (propName.equals("fielddata")) {
       builder.fielddata(XContentMapValues.nodeBooleanValue(propNode));
       iterator.remove();
     } else if (propName.equals("eager_global_ordinals")) {
       builder.eagerGlobalOrdinals(XContentMapValues.nodeBooleanValue(propNode));
       iterator.remove();
     } else if (propName.equals("fielddata_frequency_filter")) {
       Map<?, ?> frequencyFilter = (Map<?, ?>) propNode;
       double minFrequency = XContentMapValues.nodeDoubleValue(frequencyFilter.remove("min"), 0);
       double maxFrequency =
           XContentMapValues.nodeDoubleValue(frequencyFilter.remove("max"), Integer.MAX_VALUE);
       int minSegmentSize =
           XContentMapValues.nodeIntegerValue(frequencyFilter.remove("min_segment_size"), 0);
       builder.fielddataFrequencyFilter(minFrequency, maxFrequency, minSegmentSize);
       DocumentMapperParser.checkNoRemainingFields(
           propName, frequencyFilter, parserContext.indexVersionCreated());
       iterator.remove();
     }
   }
   return builder;
 }
Exemplo n.º 21
0
  public void testStore() throws Exception {
    String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            .startObject("field")
            .field("type", "ip")
            .field("store", true)
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();

    DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));

    assertEquals(mapping, mapper.mappingSource().toString());

    ParsedDocument doc =
        mapper.parse(
            "test",
            "type",
            "1",
            XContentFactory.jsonBuilder().startObject().field("field", "::1").endObject().bytes());

    IndexableField[] fields = doc.rootDoc().getFields("field");
    assertEquals(3, fields.length);
    IndexableField pointField = fields[0];
    assertEquals(1, pointField.fieldType().pointDimensionCount());
    IndexableField dvField = fields[1];
    assertEquals(DocValuesType.SORTED_SET, dvField.fieldType().docValuesType());
    IndexableField storedField = fields[2];
    assertTrue(storedField.fieldType().stored());
    assertEquals(
        new BytesRef(InetAddressPoint.encode(InetAddress.getByName("::1"))),
        storedField.binaryValue());
  }
Exemplo n.º 22
0
  public static void parseField(
      FieldMapper.Builder builder,
      String name,
      Map<String, Object> fieldNode,
      Mapper.TypeParser.ParserContext parserContext) {
    NamedAnalyzer indexAnalyzer = builder.fieldType().indexAnalyzer();
    NamedAnalyzer searchAnalyzer = builder.fieldType().searchAnalyzer();
    Version indexVersionCreated = parserContext.indexVersionCreated();
    for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator();
        iterator.hasNext(); ) {
      Map.Entry<String, Object> entry = iterator.next();
      final String propName = Strings.toUnderscoreCase(entry.getKey());
      final Object propNode = entry.getValue();
      if (propName.equals("index_name") && indexVersionCreated.before(Version.V_2_0_0_beta1)) {
        builder.indexName(propNode.toString());
        iterator.remove();
      } else if (propName.equals("store")) {
        builder.store(parseStore(name, propNode.toString()));
        iterator.remove();
      } else if (propName.equals("index")) {
        parseIndex(name, propNode.toString(), builder);
        iterator.remove();
      } else if (propName.equals("tokenized")) {
        builder.tokenized(nodeBooleanValue(propNode));
        iterator.remove();
      } else if (propName.equals(DOC_VALUES)) {
        builder.docValues(nodeBooleanValue(propNode));
        iterator.remove();
      } else if (propName.equals("term_vector")) {
        parseTermVector(name, propNode.toString(), builder);
        iterator.remove();
      } else if (propName.equals("boost")) {
        builder.boost(nodeFloatValue(propNode));
        iterator.remove();
      } else if (propName.equals("store_term_vectors")) {
        builder.storeTermVectors(nodeBooleanValue(propNode));
        iterator.remove();
      } else if (propName.equals("store_term_vector_offsets")) {
        builder.storeTermVectorOffsets(nodeBooleanValue(propNode));
        iterator.remove();
      } else if (propName.equals("store_term_vector_positions")) {
        builder.storeTermVectorPositions(nodeBooleanValue(propNode));
        iterator.remove();
      } else if (propName.equals("store_term_vector_payloads")) {
        builder.storeTermVectorPayloads(nodeBooleanValue(propNode));
        iterator.remove();
      } else if (propName.equals(CQL_COLLECTION)) {
        switch (StringUtils.lowerCase(propNode.toString())) {
          case "list":
            builder.cqlCollection(CqlCollection.LIST);
            break;
          case "set":
            builder.cqlCollection(CqlCollection.SET);
            break;
          case "singleton":
            builder.cqlCollection(CqlCollection.SINGLETON);
            break;
        }
        iterator.remove();
      } else if (propName.equals(CQL_STRUCT)) {
        switch (StringUtils.lowerCase(propNode.toString())) {
          case "map":
            builder.cqlStruct(CqlStruct.MAP);
            break;
          case "udt":
            builder.cqlStruct(CqlStruct.UDT);
            break;
          case "tuple":
            builder.cqlStruct(CqlStruct.TUPLE);
            break;
        }
        iterator.remove();
      } else if (propName.equals(CQL_PARTIAL_UPDATE)) {
        builder.cqlPartialUpdate(nodeBooleanValue(propNode));
        iterator.remove();
      } else if (propName.equals("omit_norms")) {
        builder.omitNorms(nodeBooleanValue(propNode));
        iterator.remove();
      } else if (propName.equals("norms")) {
        final Map<String, Object> properties = nodeMapValue(propNode, "norms");
        for (Iterator<Entry<String, Object>> propsIterator = properties.entrySet().iterator();
            propsIterator.hasNext(); ) {
          Entry<String, Object> entry2 = propsIterator.next();
          final String propName2 = Strings.toUnderscoreCase(entry2.getKey());
          final Object propNode2 = entry2.getValue();
          if (propName2.equals("enabled")) {
            builder.omitNorms(!nodeBooleanValue(propNode2));
            propsIterator.remove();
          } else if (propName2.equals(Loading.KEY)) {
            builder.normsLoading(Loading.parse(nodeStringValue(propNode2, null), null));
            propsIterator.remove();
          }
        }
        DocumentMapperParser.checkNoRemainingFields(
            propName, properties, parserContext.indexVersionCreated());
        iterator.remove();
      } else if (propName.equals("omit_term_freq_and_positions")) {
        final IndexOptions op =
            nodeBooleanValue(propNode)
                ? IndexOptions.DOCS
                : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
        if (indexVersionCreated.onOrAfter(Version.V_1_0_0_RC2)) {
          throw new ElasticsearchParseException(
              "'omit_term_freq_and_positions' is not supported anymore - use ['index_options' : 'docs']  instead");
        }
        // deprecated option for BW compat
        builder.indexOptions(op);
        iterator.remove();
      } else if (propName.equals("index_options")) {
        builder.indexOptions(nodeIndexOptionValue(propNode));
        iterator.remove();
      } else if (propName.equals("analyzer")
          || // for backcompat, reading old indexes, remove for v3.0
          propName.equals("index_analyzer") && indexVersionCreated.before(Version.V_2_0_0_beta1)) {

        NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
        if (analyzer == null) {
          throw new MapperParsingException(
              "analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
        }
        indexAnalyzer = analyzer;
        iterator.remove();
      } else if (propName.equals("search_analyzer")) {
        NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
        if (analyzer == null) {
          throw new MapperParsingException(
              "analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
        }
        searchAnalyzer = analyzer;
        iterator.remove();
      } else if (propName.equals("include_in_all")) {
        builder.includeInAll(nodeBooleanValue(propNode));
        iterator.remove();
      } else if (propName.equals("postings_format")
          && indexVersionCreated.before(Version.V_2_0_0_beta1)) {
        // ignore for old indexes
        iterator.remove();
      } else if (propName.equals("doc_values_format")
          && indexVersionCreated.before(Version.V_2_0_0_beta1)) {
        // ignore for old indexes
        iterator.remove();
      } else if (propName.equals("similarity")) {
        builder.similarity(parserContext.similarityLookupService().similarity(propNode.toString()));
        iterator.remove();
      } else if (propName.equals("fielddata")) {
        final Settings settings =
            Settings.builder()
                .put(SettingsLoader.Helper.loadNestedFromMap(nodeMapValue(propNode, "fielddata")))
                .build();
        builder.fieldDataSettings(settings);
        iterator.remove();
      } else if (propName.equals("copy_to")) {
        if (parserContext.isWithinMultiField()) {
          if (indexVersionCreated.after(Version.V_2_1_0)
              || (indexVersionCreated.after(Version.V_2_0_1)
                  && indexVersionCreated.before(Version.V_2_1_0))) {
            throw new MapperParsingException(
                "copy_to in multi fields is not allowed. Found the copy_to in field ["
                    + name
                    + "] which is within a multi field.");
          } else {
            ESLoggerFactory.getLogger("mapping [" + parserContext.type() + "]")
                .warn(
                    "Found a copy_to in field ["
                        + name
                        + "] which is within a multi field. This feature has been removed and the copy_to will be ignored.");
            // we still parse this, otherwise the message will only appear once and the copy_to
            // removed. After that it will appear again. Better to have it always.
          }
        }
        parseCopyFields(propNode, builder);
        iterator.remove();
      }
    }

    if (indexAnalyzer == null) {
      if (searchAnalyzer != null) {
        // If the index was created before 2.0 then we are trying to upgrade the mappings so use the
        // default indexAnalyzer
        // instead of throwing an exception so the user is able to upgrade
        if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
          indexAnalyzer = parserContext.analysisService().defaultIndexAnalyzer();
        } else {
          throw new MapperParsingException(
              "analyzer on field [" + name + "] must be set when search_analyzer is set");
        }
      }
    } else if (searchAnalyzer == null) {
      searchAnalyzer = indexAnalyzer;
    }
    builder.indexAnalyzer(indexAnalyzer);
    builder.searchAnalyzer(searchAnalyzer);
  }
Exemplo n.º 23
0
  public void testRandom() throws Exception {
    boolean omitNorms = false;
    boolean stored = false;
    boolean enabled = true;
    boolean tv_stored = false;
    boolean tv_payloads = false;
    boolean tv_offsets = false;
    boolean tv_positions = false;
    String similarity = null;
    XContentBuilder mappingBuilder = jsonBuilder();
    mappingBuilder.startObject().startObject("test");
    List<Tuple<String, Boolean>> booleanOptionList = new ArrayList<>();
    boolean allDefault = true;
    if (frequently()) {
      allDefault = false;
      mappingBuilder.startObject("_all");
      if (randomBoolean()) {
        booleanOptionList.add(new Tuple<>("omit_norms", omitNorms = randomBoolean()));
      }
      if (randomBoolean()) {
        booleanOptionList.add(new Tuple<>("store", stored = randomBoolean()));
      }
      if (randomBoolean()) {
        booleanOptionList.add(new Tuple<>("store_term_vectors", tv_stored = randomBoolean()));
      }
      if (randomBoolean()) {
        booleanOptionList.add(new Tuple<>("enabled", enabled = randomBoolean()));
      }
      if (randomBoolean()) {
        booleanOptionList.add(
            new Tuple<>("store_term_vector_offsets", tv_offsets = randomBoolean()));
      }
      if (randomBoolean()) {
        booleanOptionList.add(
            new Tuple<>("store_term_vector_positions", tv_positions = randomBoolean()));
      }
      if (randomBoolean()) {
        booleanOptionList.add(
            new Tuple<>("store_term_vector_payloads", tv_payloads = randomBoolean()));
      }
      Collections.shuffle(booleanOptionList, getRandom());
      for (Tuple<String, Boolean> option : booleanOptionList) {
        mappingBuilder.field(option.v1(), option.v2().booleanValue());
      }
      tv_stored |= tv_positions || tv_payloads || tv_offsets;
      if (randomBoolean()) {
        mappingBuilder.field("similarity", similarity = randomBoolean() ? "BM25" : "TF/IDF");
      }
      mappingBuilder.endObject();
    }

    DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
    String mapping = mappingBuilder.endObject().endObject().bytes().toUtf8();
    logger.info(mapping);
    DocumentMapper docMapper = parser.parse(mapping);
    String builtMapping = docMapper.mappingSource().string();
    // reparse it
    DocumentMapper builtDocMapper = parser.parse(builtMapping);

    byte[] json =
        jsonBuilder()
            .startObject()
            .field("foo", "bar")
            .field("foobar", "foobar")
            .endObject()
            .bytes()
            .toBytes();
    Document doc = builtDocMapper.parse("test", "test", "1", new BytesArray(json)).rootDoc();
    AllField field = (AllField) doc.getField("_all");
    if (enabled) {
      assertThat(field.fieldType().omitNorms(), equalTo(omitNorms));
      assertThat(field.fieldType().stored(), equalTo(stored));
      assertThat(field.fieldType().storeTermVectorOffsets(), equalTo(tv_offsets));
      assertThat(field.fieldType().storeTermVectorPayloads(), equalTo(tv_payloads));
      assertThat(field.fieldType().storeTermVectorPositions(), equalTo(tv_positions));
      assertThat(field.fieldType().storeTermVectors(), equalTo(tv_stored));
      AllEntries allEntries = field.getAllEntries();
      assertThat(allEntries.fields().size(), equalTo(2));
      assertThat(allEntries.fields().contains("foobar"), equalTo(true));
      assertThat(allEntries.fields().contains("foo"), equalTo(true));
      if (!stored) {
        assertThat(field.stringValue(), nullValue());
      }
      String text = stored ? field.stringValue() : "bar foobar";
      assertThat(text.trim(), equalTo(allEntries.buildText().trim()));
    } else {
      assertThat(field, nullValue());
    }
    if (similarity == null || similarity.equals("TF/IDF")) {
      assertThat(builtDocMapper.allFieldMapper().fieldType().similarity(), nullValue());
    } else {
      assertThat(
          similarity, equalTo(builtDocMapper.allFieldMapper().fieldType().similarity().name()));
    }
    if (allDefault) {
      BytesStreamOutput bytesStreamOutput = new BytesStreamOutput(0);
      XContentBuilder b = new XContentBuilder(XContentType.JSON.xContent(), bytesStreamOutput);
      XContentBuilder xContentBuilder =
          builtDocMapper.allFieldMapper().toXContent(b, ToXContent.EMPTY_PARAMS);
      xContentBuilder.flush();
      assertThat(bytesStreamOutput.size(), equalTo(0));
    }
  }
Exemplo n.º 24
0
        @Override
        public Mapper.Builder<?, ?> parse(
            String name, Map<String, Object> node, ParserContext parserContext)
            throws MapperParsingException {
          ContentPath.Type pathType = null;
          FieldMapper.Builder mainFieldBuilder = null;
          List<FieldMapper.Builder> fields = null;
          String firstType = null;

          for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator();
              iterator.hasNext(); ) {
            Map.Entry<String, Object> entry = iterator.next();
            String fieldName = Strings.toUnderscoreCase(entry.getKey());
            Object fieldNode = entry.getValue();
            if (fieldName.equals("path")
                && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
              pathType = parsePathType(name, fieldNode.toString());
              iterator.remove();
            } else if (fieldName.equals("fields")) {
              Map<String, Object> fieldsNode = (Map<String, Object>) fieldNode;
              for (Iterator<Map.Entry<String, Object>> fieldsIterator =
                      fieldsNode.entrySet().iterator();
                  fieldsIterator.hasNext(); ) {
                Map.Entry<String, Object> entry1 = fieldsIterator.next();
                String propName = entry1.getKey();
                Map<String, Object> propNode = (Map<String, Object>) entry1.getValue();

                String type;
                Object typeNode = propNode.get("type");
                if (typeNode != null) {
                  type = typeNode.toString();
                  if (firstType == null) {
                    firstType = type;
                  }
                } else {
                  throw new MapperParsingException(
                      "no type specified for property [" + propName + "]");
                }

                Mapper.TypeParser typeParser = parserContext.typeParser(type);
                if (typeParser == null) {
                  throw new MapperParsingException(
                      "no handler for type [" + type + "] declared on field [" + fieldName + "]");
                }
                if (propName.equals(name)) {
                  mainFieldBuilder =
                      (FieldMapper.Builder) typeParser.parse(propName, propNode, parserContext);
                  fieldsIterator.remove();
                } else {
                  if (fields == null) {
                    fields = new ArrayList<>(2);
                  }
                  fields.add(
                      (FieldMapper.Builder) typeParser.parse(propName, propNode, parserContext));
                  fieldsIterator.remove();
                }
              }
              fieldsNode.remove("type");
              DocumentMapperParser.checkNoRemainingFields(
                  fieldName, fieldsNode, parserContext.indexVersionCreated());
              iterator.remove();
            }
          }

          if (mainFieldBuilder == null) {
            if (fields == null) {
              // No fields at all were specified in multi_field, so lets return a non indexed string
              // field.
              return new StringFieldMapper.Builder(name).index(false);
            }
            Mapper.TypeParser typeParser = parserContext.typeParser(firstType);
            if (typeParser == null) {
              // The first multi field's type is unknown
              mainFieldBuilder = new StringFieldMapper.Builder(name).index(false);
            } else {
              Mapper.Builder substitute =
                  typeParser.parse(name, Collections.<String, Object>emptyMap(), parserContext);
              if (substitute instanceof FieldMapper.Builder) {
                mainFieldBuilder = ((FieldMapper.Builder) substitute).index(false);
              } else {
                // The first multi isn't a core field type
                mainFieldBuilder = new StringFieldMapper.Builder(name).index(false);
              }
            }
          }

          if (fields != null && pathType != null) {
            for (Mapper.Builder field : fields) {
              mainFieldBuilder.addMultiField(field);
            }
            mainFieldBuilder.multiFieldPathType(pathType);
          } else if (fields != null) {
            for (Mapper.Builder field : fields) {
              mainFieldBuilder.addMultiField(field);
            }
          } else if (pathType != null) {
            mainFieldBuilder.multiFieldPathType(pathType);
          }
          return mainFieldBuilder;
        }
  @Test
  public void testMultipleDocsEncryptedLast() throws IOException {
    DocumentMapperParser mapperParser =
        new DocumentMapperParser(
            new Index("test"),
            ImmutableSettings.EMPTY,
            new AnalysisService(new Index("test")),
            null,
            null,
            null);
    mapperParser.putTypeParser(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser());

    String mapping =
        copyToStringFromClasspath("/org/elasticsearch/index/mapper/multipledocs/test-mapping.json");
    DocumentMapper docMapper = mapperParser.parse(mapping);
    byte[] html =
        copyToBytesFromClasspath(
            "/org/elasticsearch/index/mapper/xcontent/htmlWithValidDateMeta.html");
    byte[] pdf = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/xcontent/encrypted.pdf");

    BytesReference json =
        jsonBuilder()
            .startObject()
            .field("_id", 1)
            .field("file1", html)
            .field("file2", pdf)
            .endObject()
            .bytes();

    ParseContext.Document doc = docMapper.parse(json).rootDoc();
    assertThat(
        doc.get(docMapper.mappers().smartName("file1").mapper().names().indexName()),
        containsString("World"));
    assertThat(
        doc.get(docMapper.mappers().smartName("file1.title").mapper().names().indexName()),
        equalTo("Hello"));
    assertThat(
        doc.get(docMapper.mappers().smartName("file1.author").mapper().names().indexName()),
        equalTo("kimchy"));
    assertThat(
        doc.get(docMapper.mappers().smartName("file1.keywords").mapper().names().indexName()),
        equalTo("elasticsearch,cool,bonsai"));
    assertThat(
        doc.get(docMapper.mappers().smartName("file1.content_type").mapper().names().indexName()),
        equalTo("text/html; charset=ISO-8859-1"));
    assertThat(
        doc.getField(
                docMapper.mappers().smartName("file1.content_length").mapper().names().indexName())
            .numericValue()
            .longValue(),
        is(344L));

    assertThat(
        doc.get(docMapper.mappers().smartName("file2").mapper().names().indexName()), nullValue());
    assertThat(
        doc.get(docMapper.mappers().smartName("file2.title").mapper().names().indexName()),
        nullValue());
    assertThat(
        doc.get(docMapper.mappers().smartName("file2.author").mapper().names().indexName()),
        nullValue());
    assertThat(
        doc.get(docMapper.mappers().smartName("file2.keywords").mapper().names().indexName()),
        nullValue());
    assertThat(
        doc.get(docMapper.mappers().smartName("file2.content_type").mapper().names().indexName()),
        nullValue());
    assertThat(
        doc.getField(
            docMapper.mappers().smartName("file2.content_length").mapper().names().indexName()),
        nullValue());
  }
Exemplo n.º 26
0
 @Before
 public void setupMapperParser() {
   mapperParser = MapperTestUtils.newMapperParser();
   mapperParser.putTypeParser(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser());
 }
  public void testExternalValues() throws Exception {
    Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
    Settings settings =
        Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
    IndexService indexService = createIndex("test", settings);
    MapperRegistry mapperRegistry =
        new MapperRegistry(
            Collections.singletonMap(
                ExternalMapperPlugin.EXTERNAL,
                new ExternalMapper.TypeParser(ExternalMapperPlugin.EXTERNAL, "foo")),
            Collections.singletonMap(
                ExternalMetadataMapper.CONTENT_TYPE, new ExternalMetadataMapper.TypeParser()));

    DocumentMapperParser parser =
        new DocumentMapperParser(
            indexService.getIndexSettings(),
            indexService.mapperService(),
            indexService.analysisService(),
            indexService.similarityService(),
            mapperRegistry,
            indexService::newQueryShardContext);
    DocumentMapper documentMapper =
        parser.parse(
            "type",
            new CompressedXContent(
                XContentFactory.jsonBuilder()
                    .startObject()
                    .startObject("type")
                    .startObject(ExternalMetadataMapper.CONTENT_TYPE)
                    .endObject()
                    .startObject("properties")
                    .startObject("field")
                    .field("type", "external")
                    .endObject()
                    .endObject()
                    .endObject()
                    .endObject()
                    .string()));

    ParsedDocument doc =
        documentMapper.parse(
            "test",
            "type",
            "1",
            XContentFactory.jsonBuilder().startObject().field("field", "1234").endObject().bytes());

    assertThat(doc.rootDoc().getField("field.bool"), notNullValue());
    assertThat(doc.rootDoc().getField("field.bool").stringValue(), is("T"));

    assertThat(doc.rootDoc().getField("field.point"), notNullValue());
    if (version.before(Version.V_2_2_0)) {
      assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0"));
    } else {
      assertThat(
          Long.parseLong(doc.rootDoc().getField("field.point").stringValue()),
          is(GeoEncodingUtils.mortonHash(51.0, 42.0)));
    }

    assertThat(doc.rootDoc().getField("field.shape"), notNullValue());

    assertThat(doc.rootDoc().getField("field.field"), notNullValue());
    assertThat(doc.rootDoc().getField("field.field").stringValue(), is("foo"));

    assertThat(
        doc.rootDoc().getField(ExternalMetadataMapper.FIELD_NAME).stringValue(),
        is(ExternalMetadataMapper.FIELD_VALUE));
  }
Exemplo n.º 28
0
    protected static void parseProperties(
        ObjectMapper.Builder objBuilder,
        Map<String, Object> propsNode,
        ParserContext parserContext) {
      Iterator<Map.Entry<String, Object>> iterator = propsNode.entrySet().iterator();
      while (iterator.hasNext()) {
        Map.Entry<String, Object> entry = iterator.next();
        String propName = entry.getKey();
        // Should accept empty arrays, as a work around for when the
        // user can't provide an empty Map. (PHP for example)
        boolean isEmptyList =
            entry.getValue() instanceof List && ((List<?>) entry.getValue()).isEmpty();

        if (entry.getValue() instanceof Map) {
          @SuppressWarnings("unchecked")
          Map<String, Object> propNode = (Map<String, Object>) entry.getValue();
          String type;
          Object typeNode = propNode.get("type");
          if (typeNode != null) {
            type = typeNode.toString();
          } else {
            // lets see if we can derive this...
            if (propNode.get("properties") != null) {
              type = ObjectMapper.CONTENT_TYPE;
            } else if (propNode.size() == 1 && propNode.get("enabled") != null) {
              // if there is a single property with the enabled
              // flag on it, make it an object
              // (usually, setting enabled to false to not index
              // any type, including core values, which
              type = ObjectMapper.CONTENT_TYPE;
            } else {
              throw new MapperParsingException("No type specified for property [" + propName + "]");
            }
          }

          Mapper.TypeParser typeParser = parserContext.typeParser(type);
          if (typeParser == null) {
            throw new MapperParsingException(
                "No handler for type [" + type + "] declared on field [" + propName + "]");
          }
          objBuilder.add(typeParser.parse(propName, propNode, parserContext));
          propNode.remove("type");
          DocumentMapperParser.checkNoRemainingFields(
              propName, propNode, parserContext.indexVersionCreated());
          iterator.remove();
        } else if (isEmptyList) {
          iterator.remove();
        } else {
          throw new MapperParsingException(
              "Expected map for property [fields] on field ["
                  + propName
                  + "] but got a "
                  + propName.getClass());
        }
      }

      DocumentMapperParser.checkNoRemainingFields(
          propsNode,
          parserContext.indexVersionCreated(),
          "DocType mapping definition has unsupported parameters: ");
    }
Exemplo n.º 29
0
  public static boolean parseMultiField(
      FieldMapper.Builder builder,
      String name,
      Mapper.TypeParser.ParserContext parserContext,
      String propName,
      Object propNode) {
    parserContext = parserContext.createMultiFieldContext(parserContext);
    if (propName.equals("path")
        && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
      builder.multiFieldPathType(parsePathType(name, propNode.toString()));
      return true;
    } else if (propName.equals("fields")) {

      final Map<String, Object> multiFieldsPropNodes;

      if (propNode instanceof List && ((List<?>) propNode).isEmpty()) {
        multiFieldsPropNodes = Collections.emptyMap();
      } else if (propNode instanceof Map) {
        multiFieldsPropNodes = (Map<String, Object>) propNode;
      } else {
        throw new MapperParsingException(
            "expected map for property [fields] on field ["
                + propNode
                + "] or "
                + "["
                + propName
                + "] but got a "
                + propNode.getClass());
      }

      for (Map.Entry<String, Object> multiFieldEntry : multiFieldsPropNodes.entrySet()) {
        String multiFieldName = multiFieldEntry.getKey();
        if (multiFieldName.contains(".")) {
          throw new MapperParsingException(
              "Field name ["
                  + multiFieldName
                  + "] which is a multi field of ["
                  + name
                  + "] cannot contain '.'");
        }
        if (!(multiFieldEntry.getValue() instanceof Map)) {
          throw new MapperParsingException(
              "illegal field [" + multiFieldName + "], only fields can be specified inside fields");
        }
        @SuppressWarnings("unchecked")
        Map<String, Object> multiFieldNodes = (Map<String, Object>) multiFieldEntry.getValue();

        String type;
        Object typeNode = multiFieldNodes.get("type");
        if (typeNode != null) {
          type = typeNode.toString();
        } else {
          throw new MapperParsingException(
              "no type specified for property [" + multiFieldName + "]");
        }
        if (type.equals(ObjectMapper.CONTENT_TYPE)
            || type.equals(ObjectMapper.NESTED_CONTENT_TYPE)) {
          throw new MapperParsingException("Type [" + type + "] cannot be used in multi field");
        }

        Mapper.TypeParser typeParser = parserContext.typeParser(type);
        if (typeParser == null) {
          throw new MapperParsingException(
              "no handler for type [" + type + "] declared on field [" + multiFieldName + "]");
        }
        builder.addMultiField(typeParser.parse(multiFieldName, multiFieldNodes, parserContext));
        multiFieldNodes.remove("type");
        DocumentMapperParser.checkNoRemainingFields(
            propName, multiFieldNodes, parserContext.indexVersionCreated());
      }
      return true;
    }
    return false;
  }
Exemplo n.º 30
0
  public void testNullValue() throws IOException {
    String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            .startObject("field")
            .field("type", "ip")
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();

    DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
    assertEquals(mapping, mapper.mappingSource().toString());

    ParsedDocument doc =
        mapper.parse(
            "test",
            "type",
            "1",
            XContentFactory.jsonBuilder().startObject().nullField("field").endObject().bytes());
    assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field"));

    mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            .startObject("field")
            .field("type", "ip")
            .field("null_value", "::1")
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();

    mapper = parser.parse("type", new CompressedXContent(mapping));
    assertEquals(mapping, mapper.mappingSource().toString());

    doc =
        mapper.parse(
            "test",
            "type",
            "1",
            XContentFactory.jsonBuilder().startObject().nullField("field").endObject().bytes());
    IndexableField[] fields = doc.rootDoc().getFields("field");
    assertEquals(2, fields.length);
    IndexableField pointField = fields[0];
    assertEquals(1, pointField.fieldType().pointDimensionCount());
    assertEquals(16, pointField.fieldType().pointNumBytes());
    assertFalse(pointField.fieldType().stored());
    assertEquals(
        new BytesRef(InetAddressPoint.encode(InetAddresses.forString("::1"))),
        pointField.binaryValue());
    IndexableField dvField = fields[1];
    assertEquals(DocValuesType.SORTED_SET, dvField.fieldType().docValuesType());
    assertEquals(
        new BytesRef(InetAddressPoint.encode(InetAddresses.forString("::1"))),
        dvField.binaryValue());
    assertFalse(dvField.fieldType().stored());
  }