@Override
 public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
   TTLFieldMapper ttlMergeWith = (TTLFieldMapper) mergeWith;
   if (((TTLFieldMapper) mergeWith).enabledState
       != Defaults
           .ENABLED_STATE) { // only do something if actually something was set for the document
                             // mapper that we merge with
     if (this.enabledState == EnabledAttributeMapper.ENABLED
         && ((TTLFieldMapper) mergeWith).enabledState == EnabledAttributeMapper.DISABLED) {
       mergeResult.addConflict("_ttl cannot be disabled once it was enabled.");
     } else {
       if (!mergeResult.simulate()) {
         this.enabledState = ttlMergeWith.enabledState;
       }
     }
   }
   if (ttlMergeWith.defaultTTL != -1) {
     // we never build the default when the field is disabled so we should also not set it
     // (it does not make a difference though as everything that is not build in toXContent will
     // also not be set in the cluster)
     if (!mergeResult.simulate() && (enabledState == EnabledAttributeMapper.ENABLED)) {
       this.defaultTTL = ttlMergeWith.defaultTTL;
     }
   }
 }
 @Override
 public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
   super.merge(mergeWith, mergeResult);
   CompletionFieldMapper fieldMergeWith = (CompletionFieldMapper) mergeWith;
   if (fieldType().analyzingSuggestLookupProvider.hasPayloads()
       != fieldMergeWith.fieldType().analyzingSuggestLookupProvider.hasPayloads()) {
     mergeResult.addConflict(
         "mapper [" + fieldType.names().fullName() + "] has different payload values");
   }
   if (fieldType().analyzingSuggestLookupProvider.getPreservePositionsIncrements()
       != fieldMergeWith
           .fieldType()
           .analyzingSuggestLookupProvider
           .getPreservePositionsIncrements()) {
     mergeResult.addConflict(
         "mapper ["
             + fieldType.names().fullName()
             + "] has different 'preserve_position_increments' values");
   }
   if (fieldType().analyzingSuggestLookupProvider.getPreserveSep()
       != fieldMergeWith.fieldType().analyzingSuggestLookupProvider.getPreserveSep()) {
     mergeResult.addConflict(
         "mapper ["
             + fieldType.names().fullName()
             + "] has different 'preserve_separators' values");
   }
   if (!ContextMapping.mappingsAreEqual(
       fieldType().getContextMapping(), fieldMergeWith.fieldType().getContextMapping())) {
     mergeResult.addConflict(
         "mapper [" + fieldType.names().fullName() + "] has different 'context_mapping' values");
   }
   if (!mergeResult.simulate()) {
     this.maxInputLength = fieldMergeWith.maxInputLength;
   }
 }
  @Override
  public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
    super.merge(mergeWith, mergeResult);
    if (!this.getClass().equals(mergeWith.getClass())) {
      return;
    }

    GeoShapeFieldMapper gsfm = (GeoShapeFieldMapper) mergeWith;
    if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
      if (gsfm.coerce.explicit()) {
        this.coerce = gsfm.coerce;
      }
    }
  }
  @Test
  public void testMerge() throws IOException {
    String stage1Mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("person")
            .startObject("properties")
            .startObject("tc")
            .field("type", "token_count")
            .field("analyzer", "keyword")
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();
    DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
    DocumentMapper stage1 = parser.parse(stage1Mapping);

    String stage2Mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("person")
            .startObject("properties")
            .startObject("tc")
            .field("type", "token_count")
            .field("analyzer", "standard")
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();
    DocumentMapper stage2 = parser.parse(stage2Mapping);

    MergeResult mergeResult = stage1.merge(stage2.mapping(), true, false);
    assertThat(mergeResult.hasConflicts(), equalTo(false));
    // Just simulated so merge hasn't happened yet
    assertThat(
        ((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(),
        equalTo("keyword"));

    mergeResult = stage1.merge(stage2.mapping(), false, false);
    assertThat(mergeResult.hasConflicts(), equalTo(false));
    // Just simulated so merge hasn't happened yet
    assertThat(
        ((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(),
        equalTo("standard"));
  }
 @Override
 public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
   super.merge(mergeWith, mergeResult);
   CompletionFieldMapper fieldMergeWith = (CompletionFieldMapper) mergeWith;
   if (!mergeResult.simulate()) {
     this.maxInputLength = fieldMergeWith.maxInputLength;
   }
 }
 @Override
 public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
   super.merge(mergeWith, mergeResult);
   if (!this.getClass().equals(mergeWith.getClass())) {
     return;
   }
   if (!mergeResult.simulate()) {
     this.fieldType = this.fieldType.clone();
     this.fieldType.setNullValue(((ShortFieldMapper) mergeWith).fieldType().nullValue());
     this.fieldType.freeze();
   }
 }
  @Override
  public void merge(final Mapper mergeWith, final MergeResult mergeResult)
      throws MergeMappingException {
    if (!(mergeWith instanceof ObjectMapper)) {
      mergeResult.addConflict(
          "Can't merge a non object mapping ["
              + mergeWith.name()
              + "] with an object mapping ["
              + name()
              + "]");
      return;
    }
    ObjectMapper mergeWithObject = (ObjectMapper) mergeWith;

    if (nested().isNested()) {
      if (!mergeWithObject.nested().isNested()) {
        mergeResult.addConflict(
            "object mapping [" + name() + "] can't be changed from nested to non-nested");
        return;
      }
    } else {
      if (mergeWithObject.nested().isNested()) {
        mergeResult.addConflict(
            "object mapping [" + name() + "] can't be changed from non-nested to nested");
        return;
      }
    }

    if (!mergeResult.simulate()) {
      if (mergeWithObject.dynamic != null) {
        this.dynamic = mergeWithObject.dynamic;
      }
    }

    doMerge(mergeWithObject, mergeResult);

    List<Mapper> mappersToPut = new ArrayList<>();
    List<ObjectMapper> newObjectMappers = new ArrayList<>();
    List<FieldMapper> newFieldMappers = new ArrayList<>();
    for (Mapper mapper : mergeWithObject) {
      Mapper mergeWithMapper = mapper;
      Mapper mergeIntoMapper = mappers.get(mergeWithMapper.simpleName());
      if (mergeIntoMapper == null) {
        // no mapping, simply add it if not simulating
        if (!mergeResult.simulate()) {
          mappersToPut.add(mergeWithMapper);
          MapperUtils.collect(mergeWithMapper, newObjectMappers, newFieldMappers);
        }
      } else if (mergeIntoMapper instanceof MetadataFieldMapper == false) {
        // root mappers can only exist here for backcompat, and are merged in Mapping
        mergeIntoMapper.merge(mergeWithMapper, mergeResult);
      }
    }
    if (!newFieldMappers.isEmpty()) {
      mergeResult.addFieldMappers(newFieldMappers);
    }
    if (!newObjectMappers.isEmpty()) {
      mergeResult.addObjectMappers(newObjectMappers);
    }
    // add the mappers only after the administration have been done, so it will not be visible to
    // parser (which first try to read with no lock)
    for (Mapper mapper : mappersToPut) {
      putMapper(mapper);
    }
  }