public void testField() throws Exception {
    IndexService indexService = createIndex("test");
    DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
    String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .endObject()
            .endObject()
            .string();

    DocumentMapper mapper = parser.parse(mapping);
    assertEquals(mapping, serialize(mapper));

    Mapper update =
        parse(
            mapper,
            parser,
            XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject());
    assertNotNull(update);
    // original mapping not modified
    assertEquals(mapping, serialize(mapper));
    // but we have an update
    assertEquals("{\"type\":{\"properties\":{\"foo\":{\"type\":\"string\"}}}}", serialize(update));
  }
 public void testRejectNorms() throws IOException {
   // not supported as of 5.0
   for (String type : Arrays.asList("byte", "short", "integer", "long", "float", "double")) {
     DocumentMapperParser parser =
         createIndex("index-" + type).mapperService().documentMapperParser();
     String mapping =
         XContentFactory.jsonBuilder()
             .startObject()
             .startObject("type")
             .startObject("properties")
             .startObject("foo")
             .field("type", type)
             .field("norms", random().nextBoolean())
             .endObject()
             .endObject()
             .endObject()
             .endObject()
             .string();
     MapperParsingException e =
         expectThrows(
             MapperParsingException.class,
             () -> parser.parse("type", new CompressedXContent(mapping)));
     assertThat(
         e.getMessage(),
         containsString("Mapping definition for [foo] has unsupported parameters:  [norms"));
   }
 }
  public void testObject() throws Exception {
    IndexService indexService = createIndex("test");
    DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
    String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .endObject()
            .endObject()
            .string();

    DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
    assertEquals(mapping, serialize(mapper));

    Mapper update =
        parse(
            mapper,
            parser,
            XContentFactory.jsonBuilder()
                .startObject()
                .startObject("foo")
                .startObject("bar")
                .field("baz", "foo")
                .endObject()
                .endObject()
                .endObject());
    assertNotNull(update);
    // original mapping not modified
    assertEquals(mapping, serialize(mapper));
    // but we have an update
    assertEquals(
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            .startObject("foo")
            .startObject("properties")
            .startObject("bar")
            .startObject("properties")
            .startObject("baz")
            .field("type", "text")
            .startObject("fields")
            .startObject("keyword")
            .field("type", "keyword")
            .field("ignore_above", 256)
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string(),
        serialize(update));
  }
  public void testIncremental() throws Exception {
    IndexService indexService = createIndex("test");
    DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
    // Make sure that mapping updates are incremental, this is important for performance otherwise
    // every new field introduction runs in linear time with the total number of fields
    String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            .startObject("foo")
            .field("type", "text")
            .endObject()
            .endObject()
            .endObject()
            .string();

    DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
    assertEquals(mapping, serialize(mapper));

    Mapper update =
        parse(
            mapper,
            parser,
            XContentFactory.jsonBuilder()
                .startObject()
                .field("foo", "bar")
                .field("bar", "baz")
                .endObject());
    assertNotNull(update);
    // original mapping not modified
    assertEquals(mapping, serialize(mapper));
    // but we have an update
    assertEquals(
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            // foo is NOT in the update
            .startObject("bar")
            .field("type", "text")
            .startObject("fields")
            .startObject("keyword")
            .field("type", "keyword")
            .field("ignore_above", 256)
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string(),
        serialize(update));
  }
  public void testComplexArray() throws Exception {
    IndexService indexService = createIndex("test");
    DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
    String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .endObject()
            .endObject()
            .string();

    DocumentMapper mapper = parser.parse(mapping);
    assertEquals(mapping, serialize(mapper));

    Mapper update =
        parse(
            mapper,
            parser,
            XContentFactory.jsonBuilder()
                .startObject()
                .startArray("foo")
                .startObject()
                .field("bar", "baz")
                .endObject()
                .startObject()
                .field("baz", 3)
                .endObject()
                .endArray()
                .endObject());
    assertEquals(mapping, serialize(mapper));
    assertEquals(
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            .startObject("foo")
            .startObject("properties")
            .startObject("bar")
            .field("type", "string")
            .endObject()
            .startObject("baz")
            .field("type", "long")
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string(),
        serialize(update));
  }
Пример #6
0
 public DocumentMapper merge(
     String type, CompressedXContent mappingSource, MergeReason reason, boolean updateAllTypes) {
   if (DEFAULT_MAPPING.equals(type)) {
     // verify we can parse it
     // NOTE: never apply the default here
     DocumentMapper mapper = documentParser.parse(type, mappingSource);
     // still add it as a document mapper so we have it registered and, for example, persisted back
     // into
     // the cluster meta data if needed, or checked for existence
     synchronized (this) {
       mappers = newMapBuilder(mappers).put(type, mapper).map();
     }
     try {
       defaultMappingSource = mappingSource.string();
     } catch (IOException e) {
       throw new ElasticsearchGenerationException("failed to un-compress", e);
     }
     return mapper;
   } else {
     synchronized (this) {
       final boolean applyDefault =
           // the default was already applied if we are recovering
           reason != MergeReason.MAPPING_RECOVERY
               // only apply the default mapping if we don't have the type yet
               && mappers.containsKey(type) == false;
       DocumentMapper mergeWith = parse(type, mappingSource, applyDefault);
       return merge(mergeWith, reason, updateAllTypes);
     }
   }
 }
Пример #7
0
 public DocumentMapper parse(String mappingType, String mappingSource, boolean applyDefault)
     throws MapperParsingException {
   String defaultMappingSource;
   if (PercolatorService.TYPE_NAME.equals(mappingType)) {
     defaultMappingSource = this.defaultPercolatorMappingSource;
   } else {
     defaultMappingSource = this.defaultMappingSource;
   }
   return documentParser.parse(
       mappingType, mappingSource, applyDefault ? defaultMappingSource : null);
 }
Пример #8
0
 public void add(String type, String mappingSource) {
   if (DEFAULT_MAPPING.equals(type)) {
     // verify we can parse it
     DocumentMapper mapper = documentParser.parse(type, mappingSource);
     // still add it as a document mapper so we have it registered and, for example, persisted back
     // into
     // the cluster meta data if needed, or checked for existence
     synchronized (mutex) {
       mappers = newMapBuilder(mappers).put(type, mapper).map();
     }
     defaultMappingSource = mappingSource;
   } else {
     add(parse(type, mappingSource));
   }
 }
  public void testDynamicMappingsNotNeeded() throws Exception {
    IndexService indexService = createIndex("test");
    DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
    String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            .startObject("foo")
            .field("type", "text")
            .endObject()
            .endObject()
            .endObject()
            .string();

    DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
    Mapper update =
        parse(
            mapper,
            parser,
            XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject());
    // foo is already defined in the mappings
    assertNull(update);
  }
Пример #10
0
 public DocumentMapper merge(
     String type, CompressedXContent mappingSource, boolean applyDefault, boolean updateAllTypes) {
   if (DEFAULT_MAPPING.equals(type)) {
     // verify we can parse it
     DocumentMapper mapper = documentParser.parseCompressed(type, mappingSource);
     // still add it as a document mapper so we have it registered and, for example, persisted back
     // into
     // the cluster meta data if needed, or checked for existence
     try (ReleasableLock lock = mappingWriteLock.acquire()) {
       mappers = newMapBuilder(mappers).put(type, mapper).map();
     }
     try {
       defaultMappingSource = mappingSource.string();
     } catch (IOException e) {
       throw new ElasticsearchGenerationException("failed to un-compress", e);
     }
     return mapper;
   } else {
     return merge(parse(type, mappingSource, applyDefault), updateAllTypes);
   }
 }
Пример #11
0
 public DocumentMapper parse(
     String mappingType, CompressedXContent mappingSource, boolean applyDefault)
     throws MapperParsingException {
   return documentParser.parse(
       mappingType, mappingSource, applyDefault ? defaultMappingSource : null);
 }
Пример #12
0
 /** Just parses and returns the mapper without adding it. */
 public DocumentMapper parse(String mappingType, String mappingSource)
     throws MapperParsingException {
   return documentParser.parse(mappingType, mappingSource, defaultMappingSource);
 }
Пример #13
0
    @SuppressWarnings({"unchecked"})
    @Override
    public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext)
        throws MapperParsingException {
      AttachmentMapper.Builder builder = new AttachmentMapper.Builder(name);

      for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator();
          iterator.hasNext(); ) {
        Map.Entry<String, Object> entry = iterator.next();
        String fieldName = entry.getKey();
        Object fieldNode = entry.getValue();
        if (fieldName.equals("path")
            && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
          builder.pathType(parsePathType(name, fieldNode.toString()));
          iterator.remove();
        } else if (fieldName.equals("fields")) {
          Map<String, Object> fieldsNode = (Map<String, Object>) fieldNode;
          for (Iterator<Map.Entry<String, Object>> fieldsIterator =
                  fieldsNode.entrySet().iterator();
              fieldsIterator.hasNext(); ) {
            Map.Entry<String, Object> entry1 = fieldsIterator.next();
            String propName = entry1.getKey();
            Map<String, Object> propNode = (Map<String, Object>) entry1.getValue();

            Mapper.Builder<?, ?> mapperBuilder =
                findMapperBuilder(propNode, propName, parserContext);
            if (parseMultiField(
                (FieldMapper.Builder) mapperBuilder,
                fieldName,
                parserContext,
                propName,
                propNode)) {
              fieldsIterator.remove();
            } else if (propName.equals(name)
                && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
              builder.content(mapperBuilder);
              fieldsIterator.remove();
            } else {
              switch (propName) {
                case FieldNames.CONTENT:
                  builder.content(mapperBuilder);
                  fieldsIterator.remove();
                  break;
                case FieldNames.DATE:
                  builder.date(mapperBuilder);
                  fieldsIterator.remove();
                  break;
                case FieldNames.AUTHOR:
                  builder.author(mapperBuilder);
                  fieldsIterator.remove();
                  break;
                case FieldNames.CONTENT_LENGTH:
                  builder.contentLength(mapperBuilder);
                  fieldsIterator.remove();
                  break;
                case FieldNames.CONTENT_TYPE:
                  builder.contentType(mapperBuilder);
                  fieldsIterator.remove();
                  break;
                case FieldNames.KEYWORDS:
                  builder.keywords(mapperBuilder);
                  fieldsIterator.remove();
                  break;
                case FieldNames.LANGUAGE:
                  builder.language(mapperBuilder);
                  fieldsIterator.remove();
                  break;
                case FieldNames.TITLE:
                  builder.title(mapperBuilder);
                  fieldsIterator.remove();
                  break;
                case FieldNames.NAME:
                  builder.name(mapperBuilder);
                  fieldsIterator.remove();
                  break;
              }
            }
          }
          DocumentMapperParser.checkNoRemainingFields(
              fieldName, fieldsNode, parserContext.indexVersionCreated());
          iterator.remove();
        }
      }

      return builder;
    }