@Override public void close() { for (FieldMapper<String> mapper : mappers.values()) { mapper.close(); } // disposeHelper(); }
private void addFieldData( ParseContext context, FieldMapper<String> mapper, Collection<String> data) throws IOException { if (data != null && !data.isEmpty()) { if (mappers.get(mapper.names().indexName()) == null) { // New mapper context.setWithinNewMapper(); try { parseData(context, mapper, data); FieldMapperListener.Aggregator newFields = new FieldMapperListener.Aggregator(); ObjectMapperListener.Aggregator newObjects = new ObjectMapperListener.Aggregator(); mapper.traverse(newFields); mapper.traverse(newObjects); // callback on adding those fields! context.docMapper().addFieldMappers(newFields.mappers); context.docMapper().addObjectMappers(newObjects.mappers); context.setMappingsModified(); synchronized (mutex) { UpdateInPlaceMap<String, FieldMapper<String>>.Mutator mappingMutator = this.mappers.mutator(); mappingMutator.put(mapper.names().indexName(), mapper); mappingMutator.close(); } } finally { context.clearWithinNewMapper(); } } else { // Mapper already added parseData(context, mapper, data); } } }
private Fields generateTermVectorsFromDoc(TermVectorRequest request, boolean doAllFields) throws IOException { // parse the document, at the moment we do update the mapping, just like percolate ParsedDocument parsedDocument = parseDocument(indexShard.shardId().getIndex(), request.type(), request.doc()); // select the right fields and generate term vectors ParseContext.Document doc = parsedDocument.rootDoc(); Collection<String> seenFields = new HashSet<>(); Collection<GetField> getFields = new HashSet<>(); for (IndexableField field : doc.getFields()) { FieldMapper fieldMapper = indexShard.mapperService().smartNameFieldMapper(field.name()); if (seenFields.contains(field.name())) { continue; } else { seenFields.add(field.name()); } if (!isValidField(fieldMapper)) { continue; } if (request.selectedFields() == null && !doAllFields && !fieldMapper.fieldType().storeTermVectors()) { continue; } if (request.selectedFields() != null && !request.selectedFields().contains(field.name())) { continue; } String[] values = doc.getValues(field.name()); getFields.add(new GetField(field.name(), Arrays.asList((Object[]) values))); } return generateTermVectors(getFields, request.offsets(), request.perFieldAnalyzer()); }
private void removeObjectFieldMappers(DocumentMapper docMapper) { // we need to remove those mappers for (FieldMapper mapper : docMapper.mappers()) { FieldMappers mappers = nameFieldMappers.get(mapper.names().name()); if (mappers != null) { mappers = mappers.remove(mapper); if (mappers.isEmpty()) { nameFieldMappers = newMapBuilder(nameFieldMappers).remove(mapper.names().name()).immutableMap(); } else { nameFieldMappers = newMapBuilder(nameFieldMappers).put(mapper.names().name(), mappers).immutableMap(); } } mappers = indexNameFieldMappers.get(mapper.names().indexName()); if (mappers != null) { mappers = mappers.remove(mapper); if (mappers.isEmpty()) { indexNameFieldMappers = newMapBuilder(indexNameFieldMappers) .remove(mapper.names().indexName()) .immutableMap(); } else { indexNameFieldMappers = newMapBuilder(indexNameFieldMappers) .put(mapper.names().indexName(), mappers) .immutableMap(); } } mappers = fullNameFieldMappers.get(mapper.names().fullName()); if (mappers != null) { mappers = mappers.remove(mapper); if (mappers.isEmpty()) { fullNameFieldMappers = newMapBuilder(fullNameFieldMappers).remove(mapper.names().fullName()).immutableMap(); } else { fullNameFieldMappers = newMapBuilder(fullNameFieldMappers) .put(mapper.names().fullName(), mappers) .immutableMap(); } } } for (ObjectMapper mapper : docMapper.objectMappers().values()) { ObjectMappers mappers = objectMappers.get(mapper.fullPath()); if (mappers != null) { mappers = mappers.remove(mapper); if (mappers.isEmpty()) { objectMappers = newMapBuilder(objectMappers).remove(mapper.fullPath()).immutableMap(); } else { objectMappers = newMapBuilder(objectMappers).put(mapper.fullPath(), mappers).immutableMap(); } } } }
private void removeObjectAndFieldMappers(DocumentMapper docMapper) { // we need to remove those mappers MapBuilder<String, FieldMappers> nameFieldMappers = newMapBuilder(this.nameFieldMappers); MapBuilder<String, FieldMappers> indexNameFieldMappers = newMapBuilder(this.indexNameFieldMappers); MapBuilder<String, FieldMappers> fullNameFieldMappers = newMapBuilder(this.fullNameFieldMappers); for (FieldMapper mapper : docMapper.mappers()) { FieldMappers mappers = nameFieldMappers.get(mapper.names().name()); if (mappers != null) { mappers = mappers.remove(mapper); if (mappers.isEmpty()) { nameFieldMappers.remove(mapper.names().name()); } else { nameFieldMappers.put(mapper.names().name(), mappers); } } mappers = indexNameFieldMappers.get(mapper.names().indexName()); if (mappers != null) { mappers = mappers.remove(mapper); if (mappers.isEmpty()) { indexNameFieldMappers.remove(mapper.names().indexName()); } else { indexNameFieldMappers.put(mapper.names().indexName(), mappers); } } mappers = fullNameFieldMappers.get(mapper.names().fullName()); if (mappers != null) { mappers = mappers.remove(mapper); if (mappers.isEmpty()) { fullNameFieldMappers.remove(mapper.names().fullName()); } else { fullNameFieldMappers.put(mapper.names().fullName(), mappers); } } } this.nameFieldMappers = nameFieldMappers.map(); this.indexNameFieldMappers = indexNameFieldMappers.map(); this.fullNameFieldMappers = fullNameFieldMappers.map(); MapBuilder<String, ObjectMappers> fullPathObjectMappers = newMapBuilder(this.fullPathObjectMappers); for (ObjectMapper mapper : docMapper.objectMappers().values()) { ObjectMappers mappers = fullPathObjectMappers.get(mapper.fullPath()); if (mappers != null) { mappers = mappers.remove(mapper); if (mappers.isEmpty()) { fullPathObjectMappers.remove(mapper.fullPath()); } else { fullPathObjectMappers.put(mapper.fullPath(), mappers); } } } this.fullPathObjectMappers = fullPathObjectMappers.map(); }
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(simpleName()); builder.field("type", CONTENT_TYPE); if (indexCreatedBefore2x) { builder.field("path", pathType.name().toLowerCase(Locale.ROOT)); } builder.startObject("fields"); contentMapper.toXContent(builder, params); authorMapper.toXContent(builder, params); titleMapper.toXContent(builder, params); nameMapper.toXContent(builder, params); dateMapper.toXContent(builder, params); keywordsMapper.toXContent(builder, params); contentTypeMapper.toXContent(builder, params); contentLengthMapper.toXContent(builder, params); languageMapper.toXContent(builder, params); multiFields.toXContent(builder, params); builder.endObject(); multiFields.toXContent(builder, params); builder.endObject(); return builder; }
private boolean assertMappersShareSameFieldType() { for (DocumentMapper mapper : docMappers(false)) { List<FieldMapper> fieldMappers = new ArrayList<>(); Collections.addAll(fieldMappers, mapper.mapping().metadataMappers); MapperUtils.collect(mapper.root(), new ArrayList<ObjectMapper>(), fieldMappers); for (FieldMapper fieldMapper : fieldMappers) { assert fieldMapper.fieldType() == fieldTypes.get(fieldMapper.name()) : fieldMapper.name(); } } return true; }
private static Mapper parseObjectOrField(ParseContext context, Mapper mapper) throws IOException { if (mapper instanceof ObjectMapper) { return parseObject(context, (ObjectMapper) mapper, false); } else { FieldMapper fieldMapper = (FieldMapper) mapper; Mapper update = fieldMapper.parse(context); if (fieldMapper.copyTo() != null) { parseCopyFields(context, fieldMapper, fieldMapper.copyTo().copyToFields()); } return update; } }
private void parseData( ParseContext context, FieldMapper<String> mapper, Collection<String> values) throws IOException { for (String value : values) { Field field = new Field( mapper.names().indexName(), value, isUriField(mapper.name()) ? Defaults.URI_FIELD_TYPE : Defaults.LABEL_FIELD_TYPE); context.doc().add(field); } }
public void postProcess(DocumentMapper documentMapper) { for (Map.Entry<String, List<Object>> entry : fields().entrySet()) { FieldMapper<?> fieldMapper = documentMapper.mappers().indexName(entry.getKey()).mapper(); if (fieldMapper == null) { continue; } List<Object> fieldValues = entry.getValue(); for (int i = 0; i < fieldValues.size(); i++) { fieldValues.set(i, fieldMapper.valueForSearch(fieldValues.get(i))); } } }
@Override public TerminationHandle warmReader( final IndexShard indexShard, final Engine.Searcher searcher) { final MapperService mapperService = indexShard.mapperService(); final Map<String, MappedFieldType> warmUpGlobalOrdinals = new HashMap<>(); for (DocumentMapper docMapper : mapperService.docMappers(false)) { for (FieldMapper fieldMapper : docMapper.mappers()) { final MappedFieldType fieldType = fieldMapper.fieldType(); final String indexName = fieldType.name(); if (fieldType.eagerGlobalOrdinals() == false) { continue; } warmUpGlobalOrdinals.put(indexName, fieldType); } } final IndexFieldDataService indexFieldDataService = indexShard.indexFieldDataService(); final CountDownLatch latch = new CountDownLatch(warmUpGlobalOrdinals.size()); for (final MappedFieldType fieldType : warmUpGlobalOrdinals.values()) { executor.execute( () -> { try { final long start = System.nanoTime(); IndexFieldData.Global ifd = indexFieldDataService.getForField(fieldType); DirectoryReader reader = searcher.getDirectoryReader(); IndexFieldData<?> global = ifd.loadGlobal(reader); if (reader.leaves().isEmpty() == false) { global.load(reader.leaves().get(0)); } if (indexShard.warmerService().logger().isTraceEnabled()) { indexShard .warmerService() .logger() .trace( "warmed global ordinals for [{}], took [{}]", fieldType.name(), TimeValue.timeValueNanos(System.nanoTime() - start)); } } catch (Exception e) { indexShard .warmerService() .logger() .warn("failed to warm-up global ordinals for [{}]", e, fieldType.name()); } finally { latch.countDown(); } }); } return () -> latch.await(); }
private void checkFieldUniqueness( String type, Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers) { assert Thread.holdsLock(this); // first check within mapping final Set<String> objectFullNames = new HashSet<>(); for (ObjectMapper objectMapper : objectMappers) { final String fullPath = objectMapper.fullPath(); if (objectFullNames.add(fullPath) == false) { throw new IllegalArgumentException( "Object mapper [" + fullPath + "] is defined twice in mapping for type [" + type + "]"); } } final Set<String> fieldNames = new HashSet<>(); for (FieldMapper fieldMapper : fieldMappers) { final String name = fieldMapper.name(); if (objectFullNames.contains(name)) { throw new IllegalArgumentException( "Field [" + name + "] is defined both as an object and a field in [" + type + "]"); } else if (fieldNames.add(name) == false) { throw new IllegalArgumentException( "Field [" + name + "] is defined twice in [" + type + "]"); } } // then check other types for (String fieldName : fieldNames) { if (fullPathObjectMappers.containsKey(fieldName)) { throw new IllegalArgumentException( "[" + fieldName + "] is defined as a field in mapping [" + type + "] but this name is already used for an object in other types"); } } for (String objectPath : objectFullNames) { if (fieldTypes.get(objectPath) != null) { throw new IllegalArgumentException( "[" + objectPath + "] is defined as an object in mapping [" + type + "] but this name is already used for a field in other types"); } } }
private Fields addGeneratedTermVectors( Engine.GetResult get, Fields termVectorsByField, TermVectorRequest request, Set<String> selectedFields) throws IOException { /* only keep valid fields */ Set<String> validFields = new HashSet<>(); for (String field : selectedFields) { FieldMapper fieldMapper = indexShard.mapperService().smartNameFieldMapper(field); if (!isValidField(fieldMapper)) { continue; } // already retrieved, only if the analyzer hasn't been overridden at the field if (fieldMapper.fieldType().storeTermVectors() && (request.perFieldAnalyzer() == null || !request.perFieldAnalyzer().containsKey(field))) { continue; } validFields.add(field); } if (validFields.isEmpty()) { return termVectorsByField; } /* generate term vectors from fetched document fields */ GetResult getResult = indexShard .getService() .get( get, request.id(), request.type(), validFields.toArray(Strings.EMPTY_ARRAY), null, false); Fields generatedTermVectors = generateTermVectors( getResult.getFields().values(), request.offsets(), request.perFieldAnalyzer()); /* merge with existing Fields */ if (termVectorsByField == null) { return generatedTermVectors; } else { return mergeFields(termVectorsByField, generatedTermVectors); } }
@Override public FieldMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) { ExternalMapper update = (ExternalMapper) super.updateFieldType(fullNameToFieldType); MultiFields multiFieldsUpdate = multiFields.updateFieldType(fullNameToFieldType); BinaryFieldMapper binMapperUpdate = (BinaryFieldMapper) binMapper.updateFieldType(fullNameToFieldType); BooleanFieldMapper boolMapperUpdate = (BooleanFieldMapper) boolMapper.updateFieldType(fullNameToFieldType); BaseGeoPointFieldMapper pointMapperUpdate = (BaseGeoPointFieldMapper) pointMapper.updateFieldType(fullNameToFieldType); GeoShapeFieldMapper shapeMapperUpdate = (GeoShapeFieldMapper) shapeMapper.updateFieldType(fullNameToFieldType); TextFieldMapper stringMapperUpdate = (TextFieldMapper) stringMapper.updateFieldType(fullNameToFieldType); if (update == this && multiFieldsUpdate == multiFields && binMapperUpdate == binMapper && boolMapperUpdate == boolMapper && pointMapperUpdate == pointMapper && shapeMapperUpdate == shapeMapper && stringMapperUpdate == stringMapper) { return this; } if (update == this) { update = (ExternalMapper) clone(); } update.multiFields = multiFieldsUpdate; update.binMapper = binMapperUpdate; update.boolMapper = boolMapperUpdate; update.pointMapper = pointMapperUpdate; update.shapeMapper = shapeMapperUpdate; update.stringMapper = stringMapperUpdate; return update; }
@Override public Mapper parse(ParseContext context) throws IOException { byte[] bytes = "Hello world".getBytes(Charset.defaultCharset()); binMapper.parse(context.createExternalValueContext(bytes)); boolMapper.parse(context.createExternalValueContext(true)); // Let's add a Dummy Point Double lat = 42.0; Double lng = 51.0; GeoPoint point = new GeoPoint(lat, lng); pointMapper.parse(context.createExternalValueContext(point)); // Let's add a Dummy Shape Point shape = ShapeBuilders.newPoint(-100, 45).build(); shapeMapper.parse(context.createExternalValueContext(shape)); context = context.createExternalValueContext(generatedValue); // Let's add a Original String stringMapper.parse(context); multiFields.parse(this, context); return null; }
@Override protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); if (includeDefaults || pathType != Defaults.PATH_TYPE) { builder.field("path", pathType.name().toLowerCase(Locale.ROOT)); } if (includeDefaults || fieldType().isLatLonEnabled() != Defaults.ENABLE_LATLON) { builder.field("lat_lon", fieldType().isLatLonEnabled()); } if (includeDefaults || fieldType().isGeohashEnabled() != Defaults.ENABLE_GEOHASH) { builder.field("geohash", fieldType().isGeohashEnabled()); } if (includeDefaults || fieldType().isGeohashPrefixEnabled() != Defaults.ENABLE_GEOHASH_PREFIX) { builder.field("geohash_prefix", fieldType().isGeohashPrefixEnabled()); } if (fieldType().isGeohashEnabled() && (includeDefaults || fieldType().geohashPrecision() != Defaults.GEO_HASH_PRECISION)) { builder.field("geohash_precision", fieldType().geohashPrecision()); } if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != NumericUtils.PRECISION_STEP_DEFAULT)) { builder.field("precision_step", fieldType().latFieldType().numericPrecisionStep()); } if (includeDefaults || fieldType().coerce != Defaults.COERCE) { builder.field(Names.COERCE, fieldType().coerce); } if (includeDefaults || fieldType().ignoreMalformed != Defaults.IGNORE_MALFORMED) { builder.field(Names.IGNORE_MALFORMED, fieldType().ignoreMalformed); } }
@Override public IndexFieldData<AtomicNumericFieldData> build( Index index, @IndexSettings Settings indexSettings, FieldMapper<?> mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { return new PackedArrayIndexFieldData( index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, numericType, breakerService); }
@Override public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { super.merge(mergeWith, mergeResult); CompletionFieldMapper fieldMergeWith = (CompletionFieldMapper) mergeWith; if (!mergeResult.simulate()) { this.maxInputLength = fieldMergeWith.maxInputLength; } }
@Override protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { super.doMerge(mergeWith, updateAllTypes); IpFieldMapper other = (IpFieldMapper) mergeWith; this.includeInAll = other.includeInAll; if (other.ignoreMalformed.explicit()) { this.ignoreMalformed = other.ignoreMalformed; } }
private Map<String, FieldMappingMetaData> findFieldMappingsByType( DocumentMapper documentMapper, GetFieldMappingsIndexRequest request) { MapBuilder<String, FieldMappingMetaData> fieldMappings = new MapBuilder<>(); final DocumentFieldMappers allFieldMappers = documentMapper.mappers(); for (String field : request.fields()) { if (Regex.isMatchAllPattern(field)) { for (FieldMapper fieldMapper : allFieldMappers) { addFieldMapper( fieldMapper.fieldType().name(), fieldMapper, fieldMappings, request.includeDefaults()); } } else if (Regex.isSimpleMatchPattern(field)) { // go through the field mappers 3 times, to make sure we give preference to the resolve // order: full name, index name, name. // also make sure we only store each mapper once. Collection<FieldMapper> remainingFieldMappers = newLinkedList(allFieldMappers); for (Iterator<FieldMapper> it = remainingFieldMappers.iterator(); it.hasNext(); ) { final FieldMapper fieldMapper = it.next(); if (Regex.simpleMatch(field, fieldMapper.fieldType().name())) { addFieldMapper( fieldMapper.fieldType().name(), fieldMapper, fieldMappings, request.includeDefaults()); it.remove(); } } for (Iterator<FieldMapper> it = remainingFieldMappers.iterator(); it.hasNext(); ) { final FieldMapper fieldMapper = it.next(); if (Regex.simpleMatch(field, fieldMapper.fieldType().name())) { addFieldMapper( fieldMapper.fieldType().name(), fieldMapper, fieldMappings, request.includeDefaults()); it.remove(); } } } else { // not a pattern FieldMapper fieldMapper = allFieldMappers.smartNameFieldMapper(field); if (fieldMapper != null) { addFieldMapper(field, fieldMapper, fieldMappings, request.includeDefaults()); } else if (request.probablySingleFieldRequest()) { fieldMappings.put(field, FieldMappingMetaData.NULL); } } } return fieldMappings.immutableMap(); }
@Test public void testSimpleAllMappers() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping.json"); DocumentMapper docMapper = MapperTestUtils.newParser().parse(mapping); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json"); Document doc = docMapper.parse(new BytesArray(json)).rootDoc(); AllField field = (AllField) doc.getField("_all"); AllEntries allEntries = ((AllTokenStream) field.tokenStream(docMapper.mappers().indexAnalyzer())).allEntries(); assertThat(allEntries.fields().size(), equalTo(3)); assertThat(allEntries.fields().contains("address.last.location"), equalTo(true)); assertThat(allEntries.fields().contains("name.last"), equalTo(true)); assertThat(allEntries.fields().contains("simple1"), equalTo(true)); FieldMapper mapper = docMapper.mappers().smartNameFieldMapper("_all"); assertThat(field.fieldType().omitNorms(), equalTo(true)); assertThat( mapper.queryStringTermQuery(new Term("_all", "foobar")), Matchers.instanceOf(AllTermQuery.class)); }
/** Creates an copy of the current field with given field name and boost */ private static void parseCopy(String field, ParseContext context) throws IOException { FieldMapper fieldMapper = context.docMapper().mappers().getMapper(field); if (fieldMapper != null) { fieldMapper.parse(context); } else { // The path of the dest field might be completely different from the current one so we need to reset it context = context.overridePath(new ContentPath(0)); ObjectMapper mapper = context.root(); String objectPath = ""; String fieldPath = field; int posDot = field.lastIndexOf('.'); if (posDot > 0) { objectPath = field.substring(0, posDot); context.path().add(objectPath); mapper = context.docMapper().objectMappers().get(objectPath); fieldPath = field.substring(posDot + 1); } if (mapper == null) { //TODO: Create an object dynamically? throw new MapperParsingException("attempt to copy value to non-existing object [" + field + "]"); } ObjectMapper update = parseDynamicValue(context, mapper, fieldPath, context.parser().currentToken()); assert update != null; // we are parsing a dynamic value so we necessarily created a new mapping // propagate the update to the root while (objectPath.length() > 0) { String parentPath = ""; ObjectMapper parent = context.root(); posDot = objectPath.lastIndexOf('.'); if (posDot > 0) { parentPath = objectPath.substring(0, posDot); parent = context.docMapper().objectMappers().get(parentPath); } if (parent == null) { throw new IllegalStateException("[" + objectPath + "] has no parent for path [" + parentPath + "]"); } update = parent.mappingUpdate(update); objectPath = parentPath; } context.addDynamicMappingsUpdate(update); } }
private void addFieldMapper( String field, FieldMapper<?> fieldMapper, MapBuilder<String, FieldMappingMetaData> fieldMappings, boolean includeDefaults) { if (fieldMappings.containsKey(field)) { return; } try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.startObject(); fieldMapper.toXContent( builder, includeDefaults ? includeDefaultsParams : ToXContent.EMPTY_PARAMS); builder.endObject(); fieldMappings.put( field, new FieldMappingMetaData(fieldMapper.names().fullName(), builder.bytes())); } catch (IOException e) { throw new ElasticsearchException("failed to serialize XContent of field [" + field + "]", e); } }
@Override public IndexFieldData<?> build( Index index, Settings indexSettings, FieldMapper<?> mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService, GlobalOrdinalsBuilder globalOrdinalBuilder) { return new IndexIndexFieldData(index, mapper.names()); }
private boolean isValidField(FieldMapper field) { // must be a string if (!(field instanceof StringFieldMapper)) { return false; } // and must be indexed if (field.fieldType().indexOptions() == IndexOptions.NONE) { return false; } return true; }
public void testNumericDetectionEnabled() throws Exception { String mapping = XContentFactory.jsonBuilder() .startObject() .startObject("type") .field("numeric_detection", true) .endObject() .endObject() .string(); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get(); DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); ParsedDocument doc = defaultMapper.parse( "test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("s_long", "100") .field("s_double", "100.0") .endObject() .bytes()); assertNotNull(doc.dynamicMappingsUpdate()); client() .admin() .indices() .preparePutMapping("test") .setType("type") .setSource(doc.dynamicMappingsUpdate().toString()) .get(); defaultMapper = index.mapperService().documentMapper("type"); FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long"); assertThat(mapper.fieldType().typeName(), equalTo("long")); mapper = defaultMapper.mappers().smartNameFieldMapper("s_double"); assertThat(mapper.fieldType().typeName(), equalTo("float")); }
@Override public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { super.merge(mergeWith, mergeResult); if (!this.getClass().equals(mergeWith.getClass())) { return; } GeoShapeFieldMapper gsfm = (GeoShapeFieldMapper) mergeWith; if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) { if (gsfm.coerce.explicit()) { this.coerce = gsfm.coerce; } } }
public void testSimple() throws Exception { String mapping = copyToStringFromClasspath( "/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-mapping.json"); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping).get(); DocumentMapper docMapper = index.mapperService().documentMapper("person"); byte[] json = copyToBytesFromClasspath( "/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-data.json"); ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", new BytesArray(json)); client() .admin() .indices() .preparePutMapping("test") .setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString()) .get(); docMapper = index.mapperService().documentMapper("person"); Document doc = parsedDoc.rootDoc(); IndexableField f = doc.getField("name"); assertThat(f.name(), equalTo("name")); assertThat(f.stringValue(), equalTo("some name")); assertThat(f.fieldType().stored(), equalTo(true)); FieldMapper fieldMapper = docMapper.mappers().getMapper("name"); assertThat(fieldMapper.fieldType().stored(), equalTo(true)); f = doc.getField("age"); assertThat(f.name(), equalTo("age")); assertThat(f.fieldType().stored(), equalTo(true)); fieldMapper = docMapper.mappers().getMapper("age"); assertThat(fieldMapper.fieldType().stored(), equalTo(true)); }
@Override protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); doXContentAnalyzers(builder, includeDefaults); if (includeInAll != null) { builder.field("include_in_all", includeInAll); } else if (includeDefaults) { builder.field("include_in_all", true); } if (includeDefaults || positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) { builder.field("position_increment_gap", positionIncrementGap); } if (includeDefaults || fieldType().fielddata() != ((TextFieldType) defaultFieldType).fielddata()) { builder.field("fielddata", fieldType().fielddata()); } if (fieldType().fielddata()) { if (includeDefaults || fieldType().fielddataMinFrequency() != Defaults.FIELDDATA_MIN_FREQUENCY || fieldType().fielddataMaxFrequency() != Defaults.FIELDDATA_MAX_FREQUENCY || fieldType().fielddataMinSegmentSize() != Defaults.FIELDDATA_MIN_SEGMENT_SIZE) { builder.startObject("fielddata_frequency_filter"); if (includeDefaults || fieldType().fielddataMinFrequency() != Defaults.FIELDDATA_MIN_FREQUENCY) { builder.field("min", fieldType().fielddataMinFrequency()); } if (includeDefaults || fieldType().fielddataMaxFrequency() != Defaults.FIELDDATA_MAX_FREQUENCY) { builder.field("max", fieldType().fielddataMaxFrequency()); } if (includeDefaults || fieldType().fielddataMinSegmentSize() != Defaults.FIELDDATA_MIN_SEGMENT_SIZE) { builder.field("min_segment_size", fieldType().fielddataMinSegmentSize()); } builder.endObject(); } } }
@Override protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); if (includeDefaults || fieldType().nullValue() != null) { Object nullValue = fieldType().nullValue(); if (nullValue != null) { nullValue = InetAddresses.toAddrString((InetAddress) nullValue); } builder.field("null_value", nullValue); } if (includeDefaults || ignoreMalformed.explicit()) { builder.field("ignore_malformed", ignoreMalformed.value()); } if (includeInAll != null) { builder.field("include_in_all", includeInAll); } else if (includeDefaults) { builder.field("include_in_all", false); } }